├── MANIFEST.in
├── tests
├── __init__.py
├── condition_test.py
├── dlt_client_unit_test.py
├── dlt_message_performance_test.py
├── continuousness_helper_test.py
├── dlt_filter_unit_test.py
├── dlt_main_loop_unit_test.py
├── dlt_main_loop_with_dlt_client_unit_test.py
├── dlt_core_unit_test.py
├── dlt_message_handler_unit_test.py
├── dlt_main_loop_by_reading_dlt_file_unit_test.py
├── dlt_context_handler_unit_test.py
├── dlt_broker_from_file_spinner_test.py
├── utils.py
├── dlt_message_unit_test.py
├── dlt_broker_time_test.py
└── dlt_file_spinner_unit_test.py
├── .coveragerc
├── docs
├── _static
│ └── img
│ │ └── favicon.ico
├── more_docu.rst
├── requirements-docs.txt
├── index.rst
├── Makefile
├── make.bat
├── _templates
│ └── versions.html
└── conf.py
├── .flake8
├── Dockerfile
├── .github
└── workflows
│ └── python-dlt-ci.yaml
├── Makefile
├── dlt
├── __init__.py
├── helpers.py
├── py_dlt_receive.py
├── core
│ ├── __init__.py
│ ├── core_2188.py
│ └── core_21810.py
├── dlt_broker.py
└── dlt_broker_handlers.py
├── .gitignore
├── tox.ini
├── pyproject.toml
├── README.md
├── .pylintrc
└── LICENCE.txt
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENCE.txt
2 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved.
2 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = True
3 | source = dlt
4 |
5 | [report]
6 | show_missing = True
7 |
--------------------------------------------------------------------------------
/docs/_static/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bmwcarit/python-dlt/HEAD/docs/_static/img/favicon.ico
--------------------------------------------------------------------------------
/docs/more_docu.rst:
--------------------------------------------------------------------------------
1 | More docu
2 | =========
3 |
4 | Just a dummy file
5 |
6 | on
7 | --
8 |
9 | something
10 | _________
--------------------------------------------------------------------------------
/docs/requirements-docs.txt:
--------------------------------------------------------------------------------
1 | sphinx==5.1.1
2 | sphinx-multiversion==0.2.4
3 | sphinx_rtd_theme==0.5.2 # NB: when upgrading, do not forget to check the templates in _templates!
4 | sphinxcontrib-images==0.9.4
5 | sphinxcontrib-apidoc
6 | sphinx-click
7 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 |
2 | Python DLT library
3 | ================================================
4 |
5 | .. toctree::
6 | :maxdepth: 2
7 |
8 | more_docu
9 | api/modules
10 |
11 |
12 | Purpose
13 | _______
14 |
15 | Python wrapper over libdlt2.
16 |
17 |
18 |
19 | Indices and tables
20 | ==================
21 |
22 | * :ref:`genindex`
23 | * :ref:`modindex`
24 | * :ref:`search`
25 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 119
3 | ignore =
4 | # D10*: Missing docstring
5 | D10
6 | # E203: whitespace before ':'
7 | # This error is not PEP8 complaint and should be ignored
8 | E203
9 | # W503: line break before binary operator
10 | # seems to conflict with black code formatting
11 | W503
12 | # W605: invalid escape sequence '\d'
13 | W605
14 | exclude =
15 | .git,
16 | .tox,
17 | .eggs,
18 | __pycache__,
19 | build,
20 | dist
21 |
--------------------------------------------------------------------------------
/tests/condition_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 |
3 | from dlt.helpers import LimitCondition
4 |
5 |
6 | class TestsLimitCondition(object):
7 | __test__ = True
8 |
9 | def test_none(self):
10 | cond = LimitCondition(None)
11 | assert cond()
12 |
13 | def test_limit_decreasing(self):
14 | cond = LimitCondition(2)
15 | cond()
16 | assert cond.limit == 1
17 | assert cond() # limit=0
18 | assert not cond() # limit=-1
19 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG BASE_IMAGE=alpine:3.20
2 |
3 | FROM ${BASE_IMAGE}
4 |
5 | ARG LIBDLT_VERSION=v2.18.8
6 |
7 | RUN set -ex \
8 | && apk add --no-cache build-base musl-dev linux-headers git cmake ninja \
9 | wget curl dbus zlib zlib-dev \
10 | python3 python3-dev py3-pip py3-tox \
11 | && git clone https://github.com/GENIVI/dlt-daemon \
12 | && cd /dlt-daemon \
13 | && git checkout ${LIBDLT_VERSION} \
14 | && cd /dlt-daemon \
15 | && cmake CMakeLists.txt \
16 | && make -j \
17 | && make install \
18 | && ldconfig /usr/local/lib
19 |
20 | RUN mkdir -p /workspace
21 |
22 | WORKDIR /workspace
23 |
24 | # vim: set ft=dockerfile :
25 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/_templates/versions.html:
--------------------------------------------------------------------------------
1 | {%- if current_version %}
2 |
3 |
4 | Other Versions
5 | v: {{ current_version.name }}
6 |
7 |
8 |
9 | {%- if versions.tags %}
10 |
11 | - Tags
12 | {%- for item in versions.tags %}
13 | - {{ item.name }}
14 | {%- endfor %}
15 |
16 | {%- endif %}
17 | {%- if versions.branches %}
18 |
19 | - Branches
20 | {%- for item in versions.branches %}
21 | - {{ item.name }}
22 | {%- endfor %}
23 |
24 | {%- endif %}
25 |
26 |
27 | {%- endif %}
28 |
--------------------------------------------------------------------------------
/.github/workflows/python-dlt-ci.yaml:
--------------------------------------------------------------------------------
1 | name: python-dlt-ci-actions
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | run-test-for-python-dlt:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | LIBDLT_VERSION:
11 | - "v2.18.8"
12 | steps:
13 | - uses: actions/checkout@v2
14 | - name: Build python-dlt unit test docker image
15 | id: docker_build
16 | uses: docker/build-push-action@v2
17 | with:
18 | push: false
19 | build-args: |
20 | LIBDLT_VERSION=${{ matrix.LIBDLT_VERSION }}
21 | tags: python-dlt/python-dlt-unittest:${{ matrix.LIBDLT_VERSION }}
22 | - name: lint check for the code base
23 | uses: addnab/docker-run-action@v3
24 | with:
25 | image: python-dlt/python-dlt-unittest:${{ matrix.LIBDLT_VERSION }}
26 | options: -v ${{ github.workspace }}:/pydlt -w /pydlt
27 | run: tox -e black,ruff
28 | - name: Run unit test
29 | uses: addnab/docker-run-action@v3
30 | with:
31 | image: python-dlt/python-dlt-unittest:${{ matrix.LIBDLT_VERSION }}
32 | options: -v ${{ github.workspace }}:/pydlt -w /pydlt
33 | run: tox
34 |
--------------------------------------------------------------------------------
/tests/dlt_client_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 | """Basic unittests for DLTClient class"""
3 |
4 | import unittest
5 | from unittest.mock import patch, Mock
6 |
7 | from dlt.dlt import DLTClient, DLT_RETURN_OK, DLT_RETURN_ERROR
8 |
9 |
10 | class TestDLTClient(unittest.TestCase):
11 | def setUp(self):
12 | # - patch port so that connect fails even if dlt-daemon is running
13 | self.client = DLTClient(servIP="127.0.0.1", port=424242)
14 |
15 | def test_connect_with_timeout_failed(self):
16 | # - timeout error
17 | self.assertFalse(self.client.connect(timeout=2))
18 |
19 | # - dlt_receiver_init error
20 | with patch("socket.create_connection", return_value=Mock(fileno=Mock(return_value=2000000))), patch(
21 | "dlt.dlt.dltlib.dlt_receiver_init", return_value=DLT_RETURN_ERROR
22 | ):
23 | self.assertFalse(self.client.connect(timeout=2))
24 |
25 | def test_connect_with_timeout_success(self):
26 | with patch("socket.create_connection", return_value=Mock(fileno=Mock(return_value=2000000))), patch(
27 | "dlt.dlt.dltlib.dlt_receiver_init", return_value=DLT_RETURN_OK
28 | ):
29 | self.assertTrue(self.client.connect(timeout=2))
30 |
--------------------------------------------------------------------------------
/tests/dlt_message_performance_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 | """Basic unittests for DLT messages"""
3 |
4 | import io
5 | import unittest
6 |
7 | from dlt.dlt import DLTFilter
8 |
9 | from .utils import create_messages
10 |
11 | stream_one = io.BytesIO(b"5\x00\x00 MGHS\xdd\xf6e\xca&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00")
12 | stream_two = io.BytesIO(b"5\x00\x00 MGHS\xdd\xf6e\xca&\x01DA1\x00DC2\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00")
13 |
14 | LOOPS = 100000
15 |
16 |
17 | class TestsDLTMessagePerf(unittest.TestCase):
18 | def setUp(self):
19 | self.msgs = [create_messages(stream_one) for i in range(int(LOOPS * 0.1))]
20 | self.msgs += [create_messages(stream_two) for i in range(int(LOOPS * 0.9))]
21 |
22 | def test_compare_dict(self):
23 | # with dict as other
24 | attrs = {"apid": "DA1", "ctid": "DC1"}
25 | for msg in self.msgs:
26 | msg.compare(other=attrs)
27 |
28 | def test_compare_filter(self):
29 | # with DLTFilter as other
30 | flt = DLTFilter()
31 | flt.add("DA1", "DC1")
32 | for msg in self.msgs:
33 | msg.compare(other=flt)
34 |
35 | def test_compare_mesage(self):
36 | # with dict as other
37 | other = create_messages(stream_one)
38 | for msg in self.msgs:
39 | msg.compare(other=other)
40 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Non released dlt-daemon version based on 2.18.10
2 | LIBDLT_VERSION=v2.18.10
3 |
4 | IMAGE=python-dlt/python-dlt-unittest
5 | TAG?=latest
6 | DK_CMD=docker run --rm -v $(shell pwd):/pydlt -w /pydlt
7 | TEST_ARGS?="-e py3,lint"
8 |
9 | .PHONY: all
10 | all:
11 | @echo "python-dlt testing commands, libdlt version: ${LIBDLT_VERSION}"
12 | @echo " make unit-test -- Run unit tests with tox (Run 'make build-image' the first time)"
13 | @echo " make build-image -- Build docker image for the usage of 'make unit-test'"
14 | @echo " make clean -- Remove all temporary files"
15 |
16 | .PHONY: unit-test
17 | unit-test:
18 | ${DK_CMD} ${IMAGE}:${TAG} tox ${TEST_ARGS}
19 |
20 | .PHONY: lint
21 | lint:
22 | ${DK_CMD} ${IMAGE}:${TAG} tox -e lint
23 |
24 | .PHONY: build-image
25 | build-image:
26 | docker build --build-arg LIBDLT_VERSION=${LIBDLT_VERSION} \
27 | --tag ${IMAGE}:${TAG} .
28 | docker build --build-arg LIBDLT_VERSION=${LIBDLT_VERSION} \
29 | --tag ${IMAGE}:${LIBDLT_VERSION} .
30 |
31 | .PHONY: bash
32 | bash:
33 | ${DK_CMD} -it ${IMAGE}:${TAG}
34 |
35 | .PHONY: clean
36 | clean:
37 | ifeq (,$(wildcard /.dockerenv))
38 | ${DK_CMD} ${IMAGE}:${TAG} make clean
39 | else
40 | find . -name "__pycache__" | xargs -n1 rm -rf
41 | find . -name "*.pyc" | xargs -n1 rm -rf
42 | rm -rf .coverage
43 | rm -rf *.egg-info
44 | rm -rf .eggs
45 | rm -rf junit_reports
46 | rm -rf .tox
47 | endif
48 |
--------------------------------------------------------------------------------
/dlt/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved.
2 | """DLT support module"""
3 |
4 | import collections
5 | import logging
6 | import subprocess
7 |
8 | if not hasattr(subprocess, "TimeoutExpired"):
9 | import subprocess32 as subprocess # pylint: disable=import-error
10 |
11 |
12 | LOGGER = logging.getLogger(__name__)
13 | ProcessResult = collections.namedtuple("ProcessResult", ("stdout", "stderr", "returncode"))
14 |
15 |
16 | def run_command(command, timeout=60, shell=True):
17 | """Run command in a shell and return stdout, stderr and return code
18 |
19 | :param str|list command: a command to run
20 | :param int timeout: timeout for the command
21 | :param bool shell: shell switch
22 | :returns: process result
23 | :rtype: subprocess compatible ProcessResult
24 | :raises RuntimeError: If timeout expires.
25 | """
26 | process = subprocess.Popen(
27 | command, shell=shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
28 | )
29 | try:
30 | stdout, stderr = process.communicate(timeout=timeout)
31 | except subprocess.TimeoutExpired:
32 | process.terminate()
33 | raise RuntimeError("Timeout %d seconds reached for command '%s'" % (timeout, command))
34 | if isinstance(stdout, bytes):
35 | stdout = stdout.decode("utf-8")
36 | if isinstance(stderr, bytes):
37 | stderr = stderr.decode("utf-8")
38 | return ProcessResult(stdout, stderr, process.returncode)
39 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 | .vscode/
93 |
94 | # Spyder project settings
95 | .spyderproject
96 | .spyproject
97 |
98 | # Rope project settings
99 | .ropeproject
100 |
101 | # mkdocs documentation
102 | /site
103 |
104 | # mypy
105 | .mypy_cache/
106 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py3,flake8,black,lint,ruff,bandit,docs
3 | output_dir={env:SPHINX_OUTPUT_DIR:{toxworkdir}/_build}
4 | isolated_build = True
5 | toxworkdir = {env:TOXWORKDIR:/tmp/tox}
6 |
7 | [testenv]
8 | deps =
9 | pytest
10 | pytest-cov
11 | commands =
12 | pytest \
13 | --cov=dlt \
14 | --cov-branch \
15 | --cov-report=html \
16 | --cov-report=term-missing \
17 | {posargs:tests}
18 |
19 | [pytest]
20 | filterwarnings =
21 | error
22 |
23 |
24 | [testenv:flake8]
25 | basepython = python3
26 | skip_install = true
27 | deps =
28 | flake8
29 | commands =
30 | flake8 --format=pylint --output-file=flake8.txt --max-line-length=119
31 |
32 | [testenv:ruff]
33 | basepython = python3
34 | skip_install = true
35 | deps =
36 | ruff==0.4.1
37 | mypy
38 | commands =
39 | ruff check ./dlt ./tests
40 |
41 | [testenv:black]
42 | skip_install = True
43 | skipsdist = True
44 | deps =
45 | black
46 | commands =
47 | black -l 119 --check --diff .
48 |
49 | [testenv:lint]
50 | basepython = python3
51 | skip_install = true
52 | deps =
53 | click
54 | pylint==3.2.7
55 | commands =
56 | pylint . --output-format=json:pylint.json,colorized
57 |
58 | [testenv:bandit]
59 | basepython = python3
60 | skip_install = true
61 | deps =
62 | bandit
63 | bandit[toml]==1.7.4
64 | commands =
65 | bandit -c pyproject.toml -r . -f json -o bandit.json --exit-zero
66 |
67 | [testenv:dev]
68 | basepython = python3
69 | passenv = SOURCE_DATE_EPOCH
70 | skip_install = True
71 | skipsdist = True
72 | deps =
73 | build
74 | twine
75 | wheel
76 | commands =
77 | python -m build
78 | twine upload -r software-factory-pypi-dev dist/*
79 |
80 | [testenv:release]
81 | basepython = python3
82 | passenv = SOURCE_DATE_EPOCH
83 | skip_install = True
84 | skipsdist = True
85 | deps =
86 | build
87 | twine
88 | wheel
89 | commands =
90 | python -m build
91 | twine upload -r software-factory-pypi dist/*
92 |
--------------------------------------------------------------------------------
/dlt/helpers.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved.
2 | """DLT client helpers"""
3 |
4 |
5 | class LimitCondition(object):
6 | """Condition object for counting messages"""
7 |
8 | def __init__(self, limit):
9 | """Constructor
10 |
11 | :param int limit: The maximum number of the messages for the condition
12 | """
13 | self.limit = limit
14 |
15 | def __call__(self):
16 | if self.limit is None:
17 | return True
18 |
19 | self.limit = self.limit - 1
20 | return self.limit >= 0
21 |
22 |
23 | class ContinuousnessChecker(object):
24 | """ContinuousnessChecker class is intended to find problems in the order of DLT messages"""
25 |
26 | _ignore = ["DA1-DC1-0"] # control message will be ignored - there is no continuation
27 |
28 | def __init__(self, start=0):
29 | self._index = start
30 | self._counter = dict()
31 |
32 | def __call__(self, message):
33 | key = "{}-{}-{}".format(message.apid, message.ctid, message.seid)
34 |
35 | self._index += 1
36 |
37 | if key in self._ignore:
38 | return
39 |
40 | if key in self._counter:
41 | # message of current type already received - check the continuousness
42 | err_msg = "Missing message detected. Message"
43 | err_msg += " #{} (apid='%s', ctid='%s', seid='%s')" % (message.apid, message.ctid, message.seid)
44 | err_msg += " should have counter '{}' instead of '{}'"
45 |
46 | if not (self._counter[key] + 1) % 256 == message.mcnt:
47 | counter = self._counter[key]
48 | self._counter[key] = message.mcnt
49 | raise RuntimeError(err_msg.format(self._index - 1, (counter + 1) % 256, message.mcnt))
50 |
51 | self._counter[key] = message.mcnt
52 | else:
53 | # first message of current type
54 | self._counter[key] = message.mcnt
55 |
56 |
57 | def bytes_to_str(byte_or_str):
58 | """Return string from bytes"""
59 | if isinstance(byte_or_str, bytes):
60 | return byte_or_str.decode("utf8", errors="replace")
61 |
62 | return str(byte_or_str)
63 |
--------------------------------------------------------------------------------
/tests/continuousness_helper_test.py:
--------------------------------------------------------------------------------
1 | from dlt.helpers import ContinuousnessChecker
2 | import pytest
3 |
4 |
5 | class Msg(object):
6 | def __init__(self, apid, ctid, seid, mcnt):
7 | self.apid = apid
8 | self.ctid = ctid
9 | self.seid = seid
10 | self.mcnt = mcnt
11 |
12 |
13 | def run_check(messages):
14 | cont = ContinuousnessChecker()
15 | for msg in messages:
16 | cont(msg)
17 |
18 |
19 | class TestsContinuousness(object):
20 | def test_simple(self):
21 | messages = [
22 | Msg("X", "Y", "99", 4),
23 | Msg("X", "Y", "99", 5),
24 | Msg("X", "Y", "99", 6),
25 | Msg("X", "Y", "99", 7),
26 | Msg("X", "Y", "99", 8),
27 | ]
28 | run_check(messages)
29 |
30 | def test_simple_missing(self):
31 | with pytest.raises(RuntimeError):
32 | messages = [
33 | Msg("X", "Y", "99", 4),
34 | Msg("X", "Y", "99", 5),
35 | Msg("X", "Y", "99", 6),
36 | # 7 is missing
37 | Msg("X", "Y", "99", 8),
38 | Msg("X", "Y", "99", 9),
39 | ]
40 | run_check(messages)
41 |
42 | def test_simple_over(self):
43 | # message counter is a unsigned char so counts till 255 and then restarted back to 0
44 | messages = [Msg("X", "Y", "99", 254), Msg("X", "Y", "99", 255), Msg("X", "Y", "99", 0), Msg("X", "Y", "99", 1)]
45 | run_check(messages)
46 |
47 | def test_simple_reset(self):
48 | with pytest.raises(RuntimeError):
49 | messages = [Msg("X", "Y", "99", 230), Msg("X", "Y", "99", 231), Msg("X", "Y", "99", 0)]
50 | run_check(messages)
51 |
52 | def test_ignore_control(self):
53 | messages = [Msg("DA1", "DC1", "0", 0), Msg("X", "Y", "99", 231), Msg("DA1", "DC1", "0", 0)]
54 | run_check(messages)
55 |
56 | def test_zeros_da1_dc1(self):
57 | messages = [Msg("DA1", "DC1", "0", 0), Msg("DA1", "DC1", "0", 0)]
58 | run_check(messages)
59 |
60 | def test_zeros_non_da1_dc1(self):
61 | with pytest.raises(RuntimeError):
62 | messages = [Msg("X", "Y", "0", 0), Msg("X", "Y", "0", 0)]
63 | run_check(messages)
64 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 | import datetime
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = "dlt"
21 | copyright = "%(year)s, %(author)s" % dict(year=datetime.date.today().year, author="BMW Car IT")
22 | author = "BMW Car IT"
23 |
24 | # -- General configuration ---------------------------------------------------
25 |
26 | # Add any Sphinx extension module names here, as strings. They can be
27 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
28 | # ones.
29 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.autosectionlabel", "sphinx_click.ext", "sphinx_multiversion"]
30 |
31 | # Add any paths that contain templates here, relative to this directory.
32 | templates_path = ["_templates"]
33 |
34 | # List of patterns, relative to source directory, that match files and
35 | # directories to ignore when looking for source files.
36 | # This pattern also affects html_static_path and html_extra_path.
37 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
38 |
39 |
40 | # -- Options for HTML output -------------------------------------------------
41 |
42 | # The theme to use for HTML and HTML Help pages. See the documentation for
43 | # a list of builtin themes.
44 | #
45 | html_theme = "sphinx_rtd_theme"
46 |
47 | # Add any paths that contain custom static files (such as style sheets) here,
48 | # relative to this directory. They are copied after the builtin static files,
49 | # so a file named "default.css" will overwrite the builtin "default.css".
50 | html_static_path = ["_static"]
51 |
52 | # sphinx-multiversion configuration
53 | # Format for versioned output directories inside the build directory
54 | smv_outputdir_format = "{ref.name}"
55 | # Include only master branch
56 | smv_branch_whitelist = r"^master.*$"
57 | # Determines whether remote or local git branches/tags are preferred if their output dirs conflict
58 | smv_prefer_remote_refs = False
59 |
--------------------------------------------------------------------------------
/dlt/py_dlt_receive.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2017. BMW Car IT GmbH. All rights reserved.
2 | """DLT Receive using py_dlt"""
3 |
4 | import argparse
5 | import logging
6 | import time
7 |
8 | from dlt.dlt import DLT_UDP_MULTICAST_FD_BUFFER_SIZE, DLT_UDP_MULTICAST_BUFFER_SIZE
9 | from dlt.dlt_broker import DLTBroker
10 |
11 | logging.basicConfig(format="%(asctime)s %(name)s %(levelname)-8s %(message)s")
12 | root_logger = logging.getLogger() # pylint: disable=invalid-name
13 | logger = logging.getLogger("py-dlt-receive") # pylint: disable=invalid-name
14 |
15 |
16 | def parse_args():
17 | """Parse command line arguments"""
18 | logger.info("Parsing arguments")
19 | parser = argparse.ArgumentParser(description="Receive DLT messages")
20 | parser.add_argument("--host", required=True, help="hostname or ip address to connect to")
21 | parser.add_argument("--file", required=True, help="The file into which the messages will be written")
22 | parser.add_argument(
23 | "--udp-fd-buffer-size",
24 | dest="udp_fd_buffer_size",
25 | default=DLT_UDP_MULTICAST_FD_BUFFER_SIZE,
26 | type=int,
27 | help=f"Set the socket buffer size in udp multicast mode. default: {DLT_UDP_MULTICAST_FD_BUFFER_SIZE} bytes",
28 | )
29 | parser.add_argument(
30 | "--udp-buffer-size",
31 | dest="udp_buffer_size",
32 | default=DLT_UDP_MULTICAST_BUFFER_SIZE,
33 | type=int,
34 | help=f"Set the DltReceiver buffer size in udp multicast mode. default: {DLT_UDP_MULTICAST_BUFFER_SIZE} bytes",
35 | )
36 | return parser.parse_args()
37 |
38 |
39 | def dlt_receive(options):
40 | """Receive DLT messages via DLTBroker"""
41 | logger.info("Creating DLTBroker instance")
42 | broker = DLTBroker(
43 | ip_address=options.host,
44 | filename=options.file,
45 | udp_fd_buffer_size_bytes=options.udp_buffer_size,
46 | udp_buffer_size_bytes=options.udp_fd_buffer_size,
47 | )
48 |
49 | logger.info("Starting DLTBroker")
50 | broker.start() # start the loop
51 | try:
52 | logger.info("Receiving messages...")
53 | while True:
54 | time.sleep(0.1)
55 | except KeyboardInterrupt:
56 | logger.info("Interrupted...")
57 | finally:
58 | logger.info("Stopping DLT broker")
59 | broker.stop()
60 | logger.info("Stopped DLT broker")
61 |
62 |
63 | def main():
64 | """Main function"""
65 | root_logger.setLevel(level=logging.INFO)
66 |
67 | options = parse_args()
68 | logger.info("Parsed arguments: %s", options)
69 |
70 | dlt_receive(options)
71 |
72 |
73 | if __name__ == "__main__":
74 | main()
75 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "dlt"
3 | dynamic = ["version"]
4 | description = "Python implementation for DLT"
5 | authors = [
6 | {name = "BMW CarIT", email="carit.info@bmw.de"},
7 | ]
8 | readme = "README.md"
9 | license = {file = "LICENCE.txt"}
10 | classifiers = [
11 | "Development Status :: 5 - Production/Stable",
12 | "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
13 | "Intended Audience :: Developers",
14 | "Topic :: Software Development",
15 | "Topic :: System :: Logging",
16 | "Programming Language :: Python :: 3",
17 | ]
18 |
19 | dependencies = [
20 | ]
21 |
22 | [project.optional-dependencies]
23 | dev = [
24 | "black>=22.10",
25 | "flake8>=5",
26 | "pytest>=7.2.0",
27 | "pytest-cov>=4.0.0"
28 | ]
29 |
30 | [project.urls]
31 | "Homepage" = "https://github.com/bmwcarit/python-dlt"
32 |
33 | [project.scripts]
34 | py_dlt_receive = "dlt.py_dlt_receive:main"
35 |
36 | [tool.setuptools.packages.find]
37 | include = ["dlt*"]
38 | exclude = ["playbook*", "zuul.d*", "extracted_files*", "tests"]
39 |
40 | [build-system]
41 | requires = ["setuptools>=45", "setuptools-git-versioning"]
42 | build-backend = "setuptools.build_meta"
43 |
44 | [tool.setuptools-git-versioning]
45 | enabled = true
46 | dev_template = "{tag}.dev{ccount}+{sha}"
47 |
48 | [tool.black]
49 | line-length = 119
50 | target_version = ['py37']
51 | include = '\.pyi?$'
52 | exclude = '''
53 | (
54 | /(
55 | \.eggs # exclude a few common directories in the
56 | | \.git # root of the project
57 | | \.hg
58 | | \.mypy_cache
59 | | \.tox
60 | | \.venv
61 | | _build
62 | | buck-out
63 | | build
64 | | dist
65 | )/
66 | | foo.py # also separately exclude a file named foo.py in
67 | # the root of the project
68 | | _version.py
69 | )
70 | '''
71 |
72 | [tool.ruff]
73 | line-length = 119
74 | lint.select = ["E", "F", "Q", "D"]
75 | # the following is equivalent to --docstring-convention=pep8
76 | lint.extend-ignore = [
77 | "D100",
78 | "D107",
79 | "D105",
80 | "D401",
81 | "D101",
82 | "D102",
83 | "D103",
84 | "D104",
85 | "D200",
86 | "D400",
87 | "D203",
88 | "D205",
89 | "D212",
90 | "D213",
91 | "D214",
92 | "D215",
93 | "D404",
94 | "D405",
95 | "D406",
96 | "D407",
97 | "D408",
98 | "D409",
99 | "D410",
100 | "D411",
101 | "D413",
102 | "D415",
103 | "D416",
104 | "D417",
105 | ]
106 |
107 | # D104: Missing docstring in public package
108 | # This D104 error will be ignored only in __init__ files
109 | lint.per-file-ignores = {"__init__.py" = ["D104"]}
110 |
111 | [tool.bandit]
112 | exclude_dirs = ["tests", ".tox"]
113 |
--------------------------------------------------------------------------------
/tests/dlt_filter_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved.
2 | """Basic unittests for DLTFilter definition"""
3 | import unittest
4 |
5 | import ctypes
6 |
7 | from dlt.dlt import DLTFilter
8 | from dlt.core.core_base import DLT_FILTER_MAX, DLT_ID_SIZE
9 |
10 |
11 | class TestDLTFilter(unittest.TestCase):
12 | def setUp(self):
13 | self.dlt_filter = DLTFilter()
14 |
15 | def tearDown(self):
16 | del self.dlt_filter
17 |
18 | def test_init(self):
19 | assert len(self.dlt_filter.apid) == DLT_FILTER_MAX
20 | assert len(self.dlt_filter.ctid) == DLT_FILTER_MAX
21 | assert self.dlt_filter.counter == 0
22 |
23 | for entry in self.dlt_filter.apid:
24 | assert ctypes.string_at(entry, DLT_ID_SIZE) == b"\0\0\0\0"
25 |
26 | for entry in self.dlt_filter.ctid:
27 | assert ctypes.string_at(entry, DLT_ID_SIZE) == b"\0\0\0\0"
28 |
29 | def test_add0(self):
30 | assert self.dlt_filter.add("AAA", "BBB") == 0
31 | assert self.dlt_filter.counter == 1
32 | assert len(self.dlt_filter.apid[0]) == 4
33 | assert len(self.dlt_filter.ctid[0]) == 4
34 | assert ctypes.string_at(self.dlt_filter.apid[0], DLT_ID_SIZE) == b"AAA\0"
35 | assert ctypes.string_at(self.dlt_filter.ctid[0], DLT_ID_SIZE) == b"BBB\0"
36 |
37 | def test_add1(self):
38 | assert self.dlt_filter.add("AAA", "BBB") == 0
39 | assert self.dlt_filter.add("XXX", "YYY") == 0
40 | assert self.dlt_filter.counter == 2
41 | assert ctypes.string_at(self.dlt_filter.apid[0], DLT_ID_SIZE) == b"AAA\0"
42 | assert ctypes.string_at(self.dlt_filter.ctid[0], DLT_ID_SIZE) == b"BBB\0"
43 | assert ctypes.string_at(self.dlt_filter.apid[1], DLT_ID_SIZE) == b"XXX\0"
44 | assert ctypes.string_at(self.dlt_filter.ctid[1], DLT_ID_SIZE) == b"YYY\0"
45 |
46 | def test_add2(self):
47 | assert self.dlt_filter.add("AAAA", "BBBB") == 0
48 | assert self.dlt_filter.add("XXX", "YYY") == 0
49 | assert self.dlt_filter.add("CCCC", "DDDD") == 0
50 | assert self.dlt_filter.counter == 3
51 | assert ctypes.string_at(self.dlt_filter.apid[0], DLT_ID_SIZE) == b"AAAA"
52 | assert ctypes.string_at(self.dlt_filter.ctid[0], DLT_ID_SIZE) == b"BBBB"
53 | assert ctypes.string_at(self.dlt_filter.apid[1], DLT_ID_SIZE) == b"XXX\0"
54 | assert ctypes.string_at(self.dlt_filter.ctid[1], DLT_ID_SIZE) == b"YYY\0"
55 | assert ctypes.string_at(self.dlt_filter.apid[2], DLT_ID_SIZE) == b"CCCC"
56 | assert ctypes.string_at(self.dlt_filter.ctid[2], DLT_ID_SIZE) == b"DDDD"
57 |
58 | def test_repr(self):
59 | assert self.dlt_filter.add("AAAA", "BBBB") == 0
60 | assert self.dlt_filter.add("XXX", "YYY") == 0
61 | assert self.dlt_filter.add("CCCC", "DDDD") == 0
62 | print(self.dlt_filter)
63 | assert str(self.dlt_filter) == str([(b"AAAA", b"BBBB"), (b"XXX", b"YYY"), (b"CCCC", b"DDDD")])
64 |
--------------------------------------------------------------------------------
/dlt/core/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2017. BMW Car IT GmbH. All rights reserved.
2 | """Basic ctypes binding to the DLT library"""
3 | import ctypes
4 | import os
5 |
6 | from dlt.core.core_base import * # noqa: F403
7 |
8 |
9 | API_VER = None
10 |
11 |
12 | def get_version(loaded_lib):
13 | """Return the API version of the loaded libdlt.so library"""
14 | global API_VER # pylint: disable=global-statement
15 | if API_VER is None:
16 | buf = ctypes.create_string_buffer(255)
17 | loaded_lib.dlt_get_version(ctypes.byref(buf), 255)
18 | # buf would be something like:
19 | # DLT Package Version: X.XX.X STABLE, Package Revision: vX.XX.XX build on Jul XX XXXX XX:XX:XX
20 | # -SYSTEMD -SYSTEMD_WATCHDOG -TEST -SHM
21 | buf_split = buf.value.decode().split()
22 |
23 | API_VER = buf_split[3]
24 |
25 | return API_VER
26 |
27 |
28 | def get_api_specific_file(version):
29 | """Return specific version api filename, if not found fallback to first major version release"""
30 | version_tuple = [int(num) for num in version.split(".")]
31 | name = "core_{}.py".format("".join((str(num) for num in version_tuple)))
32 | if os.path.exists(os.path.join(os.path.dirname(os.path.abspath(__file__)), name)):
33 | return name
34 |
35 | # The minor version does not exist, try to truncate
36 | if version_tuple[-1] != 0:
37 | version_tuple = version_tuple[:-1] + [0]
38 | name = "core_{}.py".format("".join((str(num) for num in version_tuple)))
39 | if not os.path.exists(os.path.join(os.path.dirname(os.path.abspath(__file__)), name)):
40 | raise ImportError("No module file: {}".format(name))
41 |
42 | return name
43 |
44 |
45 | def check_libdlt_version(api_ver):
46 | """Check the version compatibility.
47 |
48 | python-dlt now only supports to run libdlt 2.18.5 or above.
49 | """
50 | ver_info = tuple(int(num) for num in api_ver.split("."))
51 | if ver_info < (2, 18, 5):
52 | raise ImportError(
53 | "python-dlt only supports libdlt \
54 | v2.18.5 (33fbad18c814e13bd7ba2053525d8959fee437d1) or above"
55 | )
56 |
57 |
58 | API_VER = get_version(dltlib) # noqa: F405
59 | check_libdlt_version(API_VER)
60 |
61 | # Load version specific definitions, if such a file exists, possibly
62 | # overriding above definitions
63 | #
64 | # The intent is to have version specific implementations to be able to
65 | # provide declarations *incrementally*.
66 | #
67 | # For instance if version 2.17.0 introduces new changes in addition to
68 | # retaining all changes from 2.16.0, then core_2170.py would import
69 | # core_2160.py and declare only version specific changes/overrides. The
70 | # loading logic here below should not require changes.
71 | #
72 | # This allows the implementation below to import just one final module
73 | # (as opposed to loading multiple implementations in a specific order)
74 | # to provide new/overriding implementations.
75 | api_specific_file = get_api_specific_file(API_VER)
76 | overrides = __import__("dlt.core.{}".format(api_specific_file[:-3]), globals(), locals(), ["*"])
77 | locals().update(overrides.__dict__)
78 |
--------------------------------------------------------------------------------
/tests/dlt_main_loop_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 | """Basic unittests for the py_dlt_client_main_loop function"""
3 | import ctypes
4 | import functools
5 | from io import BytesIO as StringIO
6 | import socket
7 | import unittest
8 | from unittest.mock import patch, Mock
9 |
10 | from dlt.dlt import py_dlt_client_main_loop, DLTClient, logger
11 | from dlt.core import cDltStorageHeader
12 | from tests.utils import stream_one
13 |
14 |
15 | def mock_dlt_receiver_receive_socket(client_receiver, partial=False, Fail=False):
16 | if Fail:
17 | return 0
18 | stream_one.seek(0)
19 | buf = stream_one.read()
20 | if partial:
21 | buf = buf[:16]
22 |
23 | client_receiver._obj.buf = ctypes.create_string_buffer(buf)
24 | client_receiver._obj.bytesRcvd = len(buf)
25 | return len(buf)
26 |
27 |
28 | class TestMainLoop(unittest.TestCase):
29 | def setUp(self):
30 | self.client = DLTClient()
31 | self.client._connected_socket = Mock()
32 |
33 | def test_target_down(self):
34 | with patch.object(self.client._connected_socket, "recv", side_effect=socket.timeout):
35 | callback = Mock(return_value="should not be called")
36 |
37 | with self.assertLogs(logger=logger) as dlt_logger:
38 | return_value = py_dlt_client_main_loop(self.client, callback=callback)
39 | self.assertFalse(return_value)
40 |
41 | log_output = dlt_logger.output
42 | self.assertEqual(len(log_output), 1)
43 | self.assertEqual(log_output[0], "ERROR:dlt.dlt:[]: DLTLib closed connected socket")
44 |
45 | self.assertFalse(callback.called)
46 |
47 | def test_target_up_nothing_to_read(self):
48 | with patch.object(self.client._connected_socket, "recv", return_value=b"") as mock_recv:
49 | callback = Mock(return_value="should not be called")
50 | self.assertFalse(py_dlt_client_main_loop(self.client, callback=callback))
51 | self.assertEqual(mock_recv.call_count, 1)
52 | self.assertFalse(callback.called)
53 |
54 | @patch("dlt.dlt.dltlib.dlt_receiver_move_to_begin", return_value=0)
55 | def test_exit_if_callback_returns_false(self, *ignored):
56 | with patch.object(self.client._connected_socket, "recv", return_value=b"X"):
57 | # setup dlt_receiver_receive to return a partial message
58 | replacement = functools.partial(mock_dlt_receiver_receive_socket, partial=True)
59 | with patch("dlt.dlt.dltlib.dlt_receiver_receive", new=replacement):
60 | self.assertFalse(py_dlt_client_main_loop(self.client, callback=lambda msg: False))
61 |
62 | def test_read_message(self, *ignored):
63 | dumpfile = StringIO()
64 |
65 | stream_one.seek(0)
66 | expected = stream_one.read()
67 |
68 | with patch.object(self.client._connected_socket, "recv", return_value=b"X"):
69 | # setup dlt_receiver_receive to return a complete message
70 | replacement = functools.partial(mock_dlt_receiver_receive_socket)
71 | callback = Mock(side_effect=[True, False, False])
72 | with patch("dlt.dlt.dltlib.dlt_receiver_receive", new=replacement):
73 | self.assertTrue(py_dlt_client_main_loop(self.client, dumpfile=dumpfile, callback=callback))
74 | self.assertEqual(dumpfile.getvalue()[ctypes.sizeof(cDltStorageHeader) :], expected)
75 |
--------------------------------------------------------------------------------
/tests/dlt_main_loop_with_dlt_client_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 | """Basic unittests for the py_dlt_client_main_loop function"""
3 | import ctypes
4 | import functools
5 | from io import BytesIO as StringIO
6 | import socket
7 | import unittest
8 | from unittest.mock import patch, Mock
9 |
10 | from dlt.dlt import py_dlt_client_main_loop, DLTClient, logger
11 | from dlt.core import cDltStorageHeader
12 | from tests.utils import stream_one
13 |
14 |
15 | def mock_dlt_receiver_receive_socket(client_receiver, partial=False, Fail=False):
16 | if Fail:
17 | return 0
18 | stream_one.seek(0)
19 | buf = stream_one.read()
20 | if partial:
21 | buf = buf[:16]
22 |
23 | client_receiver._obj.buf = ctypes.create_string_buffer(buf)
24 | client_receiver._obj.bytesRcvd = len(buf)
25 | return len(buf)
26 |
27 |
28 | class TestMainLoopWithDltClient(unittest.TestCase):
29 | def setUp(self):
30 | self.client = DLTClient()
31 | self.client._connected_socket = Mock()
32 |
33 | def test_target_down(self):
34 | with patch.object(self.client._connected_socket, "recv", side_effect=socket.timeout):
35 | callback = Mock(return_value="should not be called")
36 |
37 | with self.assertLogs(logger=logger) as dlt_logger:
38 | return_value = py_dlt_client_main_loop(self.client, callback=callback)
39 | self.assertFalse(return_value)
40 |
41 | log_output = dlt_logger.output
42 | self.assertEqual(len(log_output), 1)
43 | self.assertEqual(log_output[0], "ERROR:dlt.dlt:[]: DLTLib closed connected socket")
44 |
45 | self.assertFalse(callback.called)
46 |
47 | def test_target_up_nothing_to_read(self):
48 | with patch.object(self.client._connected_socket, "recv", return_value=b"") as mock_recv:
49 | callback = Mock(return_value="should not be called")
50 | self.assertFalse(py_dlt_client_main_loop(self.client, callback=callback))
51 | self.assertEqual(mock_recv.call_count, 1)
52 | self.assertFalse(callback.called)
53 |
54 | @patch("dlt.dlt.dltlib.dlt_receiver_move_to_begin", return_value=0)
55 | def test_exit_if_callback_returns_false(self, *ignored):
56 | with patch.object(self.client._connected_socket, "recv", return_value=b"X"):
57 | # setup dlt_receiver_receive to return a partial message
58 | replacement = functools.partial(mock_dlt_receiver_receive_socket, partial=True)
59 | with patch("dlt.dlt.dltlib.dlt_receiver_receive", new=replacement):
60 | self.assertFalse(py_dlt_client_main_loop(self.client, callback=lambda msg: False))
61 |
62 | def test_read_message(self, *ignored):
63 | dumpfile = StringIO()
64 |
65 | stream_one.seek(0)
66 | expected = stream_one.read()
67 |
68 | with patch.object(self.client._connected_socket, "recv", return_value=b"X"):
69 | # setup dlt_receiver_receive to return a complete message
70 | replacement = functools.partial(mock_dlt_receiver_receive_socket)
71 | callback = Mock(side_effect=[True, False, False])
72 | with patch("dlt.dlt.dltlib.dlt_receiver_receive", new=replacement):
73 | self.assertTrue(py_dlt_client_main_loop(self.client, dumpfile=dumpfile, callback=callback))
74 | self.assertEqual(dumpfile.getvalue()[ctypes.sizeof(cDltStorageHeader) :], expected)
75 |
--------------------------------------------------------------------------------
/tests/dlt_core_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2017. BMW Car IT GmbH. All rights reserved.
2 | """Basic size tests for ctype wrapper definitions, to protect against regressions"""
3 | import ctypes
4 | import importlib
5 | import os
6 | import unittest
7 | from unittest.mock import patch, MagicMock
8 |
9 | import dlt
10 |
11 |
12 | class TestCoreStructures(unittest.TestCase):
13 | def setUp(self):
14 | self.size_map = {
15 | "cDltServiceConnectionInfo": 10,
16 | "cDltStorageHeader": 16,
17 | "cDltStandardHeader": 4,
18 | "cDltStandardHeaderExtra": 12,
19 | "cDltExtendedHeader": 10,
20 | "cDLTMessage": 120,
21 | "cDltReceiver": 72,
22 | "cDltClient": 144,
23 | "cDLTFilter": 604,
24 | }
25 |
26 | def test_sizeof(self):
27 | importlib.import_module("dlt.core")
28 |
29 | for clsname, expected in self.size_map.items():
30 | actual = ctypes.sizeof(getattr(dlt.core, clsname))
31 | self.assertEqual(
32 | actual,
33 | expected,
34 | msg="v{0}, sizeof {1}: {2} != {3}".format(
35 | dlt.core.get_version(dlt.core.dltlib), clsname, actual, expected
36 | ),
37 | )
38 |
39 |
40 | class TestImportSpecificVersion(unittest.TestCase):
41 | def setUp(self):
42 | self.original_api_version = dlt.core.API_VER
43 | self.version_answer = b"2.18.5"
44 | self.version_str = (
45 | b"DLT Package Version: 2.18.5 STABLE, Package Revision: v2.18.5_5_g33fbad1, "
46 | b"build on Sep 2 2020 11:55:50\n-SYSTEMD -SYSTEMD_WATCHDOG -TEST -SHM\n"
47 | )
48 | self.version_filename = "core_2185.py"
49 | self.version_truncate_str = "2.18.5"
50 | self.version_truncate_filename = "core_2180.py"
51 |
52 | dlt.core.API_VER = None
53 |
54 | def tearDown(self):
55 | dlt.core.API_VER = self.original_api_version
56 |
57 | def test_get_version(self):
58 | def mock_dlt_get_version(buf, buf_size):
59 | ver_cstr = ctypes.create_string_buffer(self.version_str)
60 | ctypes.memmove(buf, ver_cstr, len(ver_cstr))
61 |
62 | mock_loaded_lib = MagicMock()
63 | mock_loaded_lib.dlt_get_version = MagicMock(side_effect=mock_dlt_get_version)
64 |
65 | api_version = dlt.core.get_version(mock_loaded_lib)
66 | self.assertEqual(mock_loaded_lib.dlt_get_version.call_count, 1)
67 |
68 | self.assertEqual(api_version, self.version_answer.decode())
69 | self.assertEqual(dlt.core.API_VER, self.version_answer.decode())
70 |
71 | def test_get_api_specific_file(self):
72 | with patch.object(os.path, "exists", return_value=True):
73 | filename = dlt.core.get_api_specific_file(self.version_answer.decode())
74 | self.assertEqual(filename, self.version_filename)
75 |
76 | def test_get_api_specific_file_not_found(self):
77 | with patch.object(os.path, "exists", side_effect=[False, False]):
78 | with self.assertRaises(ImportError) as err_cm:
79 | dlt.core.get_api_specific_file(self.version_answer.decode())
80 |
81 | self.assertEqual(str(err_cm.exception), "No module file: {}".format(self.version_truncate_filename))
82 |
83 | def test_get_api_specific_file_truncate_minor_version(self):
84 | with patch.object(os.path, "exists", side_effect=[False, True]):
85 | filename = dlt.core.get_api_specific_file(self.version_truncate_str)
86 | self.assertEqual(filename, self.version_truncate_filename)
87 |
--------------------------------------------------------------------------------
/tests/dlt_message_handler_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 | import os
3 | import time
4 | import unittest
5 | from queue import Empty
6 | from multiprocessing import Event, Queue
7 |
8 | from dlt.dlt_broker_handlers import DLTMessageHandler
9 | from tests.utils import create_messages, stream_multiple
10 |
11 |
12 | class TestDLTMessageHandler(unittest.TestCase):
13 | def setUp(self):
14 | self.filter_queue = Queue()
15 | self.message_queue = Queue()
16 | self.client_cfg = {
17 | "ip_address": b"127.0.0.1",
18 | "filename": b"/dev/null",
19 | "verbose": 0,
20 | "port": "1234",
21 | }
22 | self.stop_event = Event()
23 | self.handler = DLTMessageHandler(self.filter_queue, self.message_queue, self.stop_event, self.client_cfg)
24 |
25 | def test_init(self):
26 | self.assertFalse(self.handler.mp_stop_flag.is_set())
27 | self.assertFalse(self.handler.is_alive())
28 | self.assertTrue(self.handler.filter_queue.empty())
29 | self.assertTrue(self.handler.message_queue.empty())
30 |
31 | def test_run_basic(self):
32 | self.assertFalse(self.handler.is_alive())
33 | self.handler.start()
34 | self.assertTrue(self.handler.is_alive())
35 | self.assertNotEqual(self.handler.pid, os.getpid())
36 | self.stop_event.set()
37 | self.handler.join()
38 | self.assertFalse(self.handler.is_alive())
39 |
40 | def test_handle_add_new_filter(self):
41 | self.handler.filter_queue.put(("queue_id", [("SYS", "JOUR")], True))
42 | time.sleep(0.01)
43 | self.handler.handle(None)
44 | self.assertIn(("SYS", "JOUR"), self.handler.context_map)
45 | self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id"])
46 |
47 | def test_handle_remove_filter_single_entry(self):
48 | self.handler.filter_queue.put(("queue_id", [("SYS", "JOUR")], True))
49 | time.sleep(0.01)
50 | self.handler.handle(None)
51 | self.assertIn(("SYS", "JOUR"), self.handler.context_map)
52 | self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id"])
53 |
54 | self.handler.filter_queue.put(("queue_id", [("SYS", "JOUR")], False))
55 | time.sleep(0.01)
56 | self.handler.handle(None)
57 | self.assertNotIn(("SYS", "JOUR"), self.handler.context_map)
58 |
59 | def test_handle_remove_filter_multiple_entries(self):
60 | self.handler.filter_queue.put(("queue_id1", [("SYS", "JOUR")], True))
61 | self.handler.filter_queue.put(("queue_id2", [("SYS", "JOUR")], True))
62 | time.sleep(0.01)
63 | self.handler.handle(None)
64 | self.assertIn(("SYS", "JOUR"), self.handler.context_map)
65 | self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id1", "queue_id2"])
66 |
67 | self.handler.filter_queue.put(("queue_id1", [("SYS", "JOUR")], False))
68 | time.sleep(0.01)
69 | self.handler.handle(None)
70 | self.assertIn(("SYS", "JOUR"), self.handler.context_map)
71 | self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id2"])
72 |
73 | def test_handle_multiple_similar_filters(self):
74 | self.handler.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
75 | self.handler.filter_queue.put(("queue_id1", [("SYS", "JOUR")], True))
76 | time.sleep(0.01)
77 | self.handler.handle(None)
78 | self.assertIn(("SYS", "JOUR"), self.handler.context_map)
79 | self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id0", "queue_id1"])
80 |
81 | def test_handle_multiple_different_filters(self):
82 | self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
83 | self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True))
84 | time.sleep(0.01)
85 | self.handler.handle(None)
86 | self.assertIn(("SYS", "JOUR"), self.handler.context_map)
87 | self.assertIn(("DA1", "DC1"), self.handler.context_map)
88 | self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id0"])
89 | self.assertEqual(self.handler.context_map[("DA1", "DC1")], ["queue_id1"])
90 |
91 | def test_handle_message_tag_and_distribute(self):
92 | self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
93 | self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True))
94 | self.filter_queue.put(("queue_id2", [("SYS", None)], True))
95 | self.filter_queue.put(("queue_id3", [(None, "DC1")], True))
96 | self.filter_queue.put(("queue_id4", [(None, None)], True))
97 | time.sleep(0.01)
98 |
99 | # - simulate receiving of messages
100 | for _ in range(10):
101 | for message in create_messages(stream_multiple, from_file=True):
102 | self.handler.handle(message)
103 |
104 | self.assertIn(("SYS", "JOUR"), self.handler.context_map)
105 | self.assertIn(("DA1", "DC1"), self.handler.context_map)
106 | self.assertIn((None, None), self.handler.context_map)
107 | self.assertIn(("SYS", None), self.handler.context_map)
108 | self.assertIn((None, "DC1"), self.handler.context_map)
109 | try:
110 | # 60 == 10 messages of each for SYS, JOUR and None combinations +
111 | # 10 for (None,None)
112 | messages = [self.message_queue.get(timeout=0.01) for _ in range(60)]
113 |
114 | # these queues should not get any messages from other queues
115 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id0"]), 10)
116 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id1"]), 10)
117 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id2"]), 10)
118 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id3"]), 10)
119 | # this queue should get all messages
120 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id4"]), 20)
121 | except Empty:
122 | # - we should not get an Empty for at least 40 messages
123 | self.fail()
124 |
--------------------------------------------------------------------------------
/dlt/core/core_2188.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2022. BMW CTW PT. All rights reserved.
2 | """v2.18.8 specific class definitions"""
3 | import ctypes
4 | import logging
5 |
6 | from dlt.core.core_base import dltlib
7 |
8 | # DltClientMode from dlt_client.h
9 | DLT_CLIENT_MODE_UNDEFINED = -1
10 | DLT_CLIENT_MODE_TCP = 0
11 | DLT_CLIENT_MODE_SERIAL = 1
12 | DLT_CLIENT_MODE_UNIX = 2
13 | DLT_CLIENT_MODE_UDP_MULTICAST = 3
14 |
15 | # DltReceiverType from dlt_common.h
16 | DLT_RECEIVE_SOCKET = 0
17 | DLT_RECEIVE_UDP_SOCKET = 1
18 | DLT_RECEIVE_FD = 2
19 | DLT_ID_SIZE = 4
20 | DLT_FILTER_MAX = 30 # Maximum number of filters
21 | DLT_RETURN_ERROR = -1
22 |
23 | # Return value for DLTFilter.add() - exceeded maximum number of filters
24 | MAX_FILTER_REACHED = 1
25 | # Return value for DLTFilter.add() - specified filter already exists
26 | REPEATED_FILTER = 2
27 |
28 | logger = logging.getLogger(__name__) # pylint: disable=invalid-name
29 |
30 |
31 | class sockaddr_in(ctypes.Structure): # pylint: disable=invalid-name
32 | """Auxiliary definition for cDltReceiver. Defined in netinet/in.h header"""
33 |
34 | _fields_ = [
35 | ("sa_family", ctypes.c_ushort), # sin_family
36 | ("sin_port", ctypes.c_ushort),
37 | ("sin_addr", ctypes.c_byte * 4),
38 | ("__pad", ctypes.c_byte * 8),
39 | ] # struct sockaddr_in is 16
40 |
41 |
42 | class cDltReceiver(ctypes.Structure): # pylint: disable=invalid-name
43 | """The structure is used to organise the receiving of data including buffer handling.
44 | This structure is used by the corresponding functions.
45 |
46 | typedef struct
47 | {
48 | int32_t lastBytesRcvd; /**< bytes received in last receive call */
49 | int32_t bytesRcvd; /**< received bytes */
50 | int32_t totalBytesRcvd; /**< total number of received bytes */
51 | char *buffer; /**< pointer to receiver buffer */
52 | char *buf; /**< pointer to position within receiver buffer */
53 | char *backup_buf; /** pointer to the buffer with partial messages if any **/
54 | int fd; /**< connection handle */
55 | DltReceiverType type; /**< type of connection handle */
56 | int32_t buffersize; /**< size of receiver buffer */
57 | struct sockaddr_in addr; /**< socket address information */
58 | } DltReceiver;
59 | """
60 |
61 | _fields_ = [
62 | ("lastBytesRcvd", ctypes.c_int32),
63 | ("bytesRcvd", ctypes.c_int32),
64 | ("totalBytesRcvd", ctypes.c_int32),
65 | ("buffer", ctypes.POINTER(ctypes.c_char)),
66 | ("buf", ctypes.POINTER(ctypes.c_char)),
67 | ("backup_buf", ctypes.POINTER(ctypes.c_char)),
68 | ("fd", ctypes.c_int),
69 | ("type", ctypes.c_int),
70 | ("buffersize", ctypes.c_int32),
71 | ("addr", sockaddr_in),
72 | ]
73 |
74 |
75 | class cDltClient(ctypes.Structure): # pylint: disable=invalid-name
76 | """
77 | typedef struct
78 | {
79 | DltReceiver receiver; /**< receiver pointer to dlt receiver structure */
80 | int sock; /**< sock Connection handle/socket */
81 | char *servIP; /**< servIP IP adress/Hostname of TCP/IP interface */
82 | char *hostip; /**< IP multicast address of group */
83 | int port; /**< Port for TCP connections (optional) */
84 | char *serialDevice; /**< serialDevice Devicename of serial device */
85 | char *socketPath; /**< socketPath Unix socket path */
86 | char ecuid[4]; /**< ECUiD */
87 | speed_t baudrate; /**< baudrate Baudrate of serial interface, as speed_t */
88 | DltClientMode mode; /**< mode DltClientMode */
89 | int send_serial_header; /**< (Boolean) Send DLT messages with serial header */
90 | int resync_serial_header; /**< (Boolean) Resync to serial header on all connection */
91 | } DltClient;
92 | """
93 |
94 | _fields_ = [
95 | ("receiver", cDltReceiver),
96 | ("sock", ctypes.c_int),
97 | ("servIP", ctypes.c_char_p),
98 | ("hostip", ctypes.c_char_p),
99 | ("port", ctypes.c_int),
100 | ("serialDevice", ctypes.c_char_p),
101 | ("socketPath", ctypes.c_char_p),
102 | ("ecuid", ctypes.c_char * 4),
103 | ("baudrate", ctypes.c_uint),
104 | ("mode", ctypes.c_int),
105 | ("send_serial_header", ctypes.c_int),
106 | ("resync_serial_header", ctypes.c_int),
107 | ]
108 |
109 |
110 | class cDLTFilter(ctypes.Structure): # pylint: disable=invalid-name
111 | """
112 | typedef struct
113 | {
114 | char apid[DLT_FILTER_MAX][DLT_ID_SIZE]; /**< application id */
115 | char ctid[DLT_FILTER_MAX][DLT_ID_SIZE]; /**< context id */
116 | int log_level[DLT_FILTER_MAX]; /**< log level */
117 | int32_t payload_max[DLT_FILTER_MAX]; /**< upper border for payload */
118 | int32_t payload_min[DLT_FILTER_MAX]; /**< lower border for payload */
119 | int counter; /**< number of filters */
120 | } DltFilter;
121 | """
122 |
123 | _fields_ = [
124 | ("apid", (ctypes.c_char * DLT_ID_SIZE) * DLT_FILTER_MAX),
125 | ("ctid", (ctypes.c_char * DLT_ID_SIZE) * DLT_FILTER_MAX),
126 | ("log_level", ctypes.c_int * DLT_FILTER_MAX),
127 | ("payload_max", (ctypes.c_int32 * DLT_FILTER_MAX)),
128 | ("payload_min", (ctypes.c_int32 * DLT_FILTER_MAX)),
129 | ("counter", ctypes.c_int),
130 | ]
131 |
132 | # pylint: disable=too-many-arguments
133 | def add(self, apid, ctid, log_level=0, payload_min=0, payload_max=ctypes.c_uint32(-1).value // 2):
134 | """Add new filter pair"""
135 | if isinstance(apid, str):
136 | apid = bytes(apid, "ascii")
137 | if isinstance(ctid, str):
138 | ctid = bytes(ctid, "ascii")
139 | if (
140 | dltlib.dlt_filter_add(
141 | ctypes.byref(self), apid or b"", ctid or b"", log_level, payload_min, payload_max, self.verbose
142 | )
143 | == DLT_RETURN_ERROR
144 | ):
145 | if self.counter >= DLT_FILTER_MAX:
146 | logger.error("Maximum number (%d) of allowed filters reached, ignoring filter!\n", DLT_FILTER_MAX)
147 | return MAX_FILTER_REACHED
148 | logger.debug("Filter ('%s', '%s') already exists", apid, ctid)
149 | return REPEATED_FILTER
150 | return 0
151 |
--------------------------------------------------------------------------------
/dlt/core/core_21810.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2022. BMW CTW PT. All rights reserved.
2 | """v2.18.8 specific class definitions"""
3 | import ctypes
4 | import logging
5 |
6 | from dlt.core.core_base import dltlib
7 |
8 | # DltClientMode from dlt_client.h
9 | DLT_CLIENT_MODE_UNDEFINED = -1
10 | DLT_CLIENT_MODE_TCP = 0
11 | DLT_CLIENT_MODE_SERIAL = 1
12 | DLT_CLIENT_MODE_UNIX = 2
13 | DLT_CLIENT_MODE_UDP_MULTICAST = 3
14 |
15 | # DltReceiverType from dlt_common.h
16 | DLT_RECEIVE_SOCKET = 0
17 | DLT_RECEIVE_UDP_SOCKET = 1
18 | DLT_RECEIVE_FD = 2
19 | DLT_ID_SIZE = 4
20 | DLT_FILTER_MAX = 30 # Maximum number of filters
21 | DLT_RETURN_ERROR = -1
22 |
23 | # Return value for DLTFilter.add() - exceeded maximum number of filters
24 | MAX_FILTER_REACHED = 1
25 | # Return value for DLTFilter.add() - specified filter already exists
26 | REPEATED_FILTER = 2
27 |
28 | logger = logging.getLogger(__name__) # pylint: disable=invalid-name
29 |
30 |
31 | class sockaddr_in(ctypes.Structure): # pylint: disable=invalid-name
32 | """Auxiliary definition for cDltReceiver. Defined in netinet/in.h header"""
33 |
34 | _fields_ = [
35 | ("sa_family", ctypes.c_ushort), # sin_family
36 | ("sin_port", ctypes.c_ushort),
37 | ("sin_addr", ctypes.c_byte * 4),
38 | ("__pad", ctypes.c_byte * 8),
39 | ] # struct sockaddr_in is 16
40 |
41 |
42 | class cDltReceiver(ctypes.Structure): # pylint: disable=invalid-name
43 | """The structure is used to organise the receiving of data including buffer handling.
44 | This structure is used by the corresponding functions.
45 |
46 | typedef struct
47 | {
48 | int32_t lastBytesRcvd; /**< bytes received in last receive call */
49 | int32_t bytesRcvd; /**< received bytes */
50 | int32_t totalBytesRcvd; /**< total number of received bytes */
51 | char *buffer; /**< pointer to receiver buffer */
52 | char *buf; /**< pointer to position within receiver buffer */
53 | char *backup_buf; /** pointer to the buffer with partial messages if any **/
54 | int fd; /**< connection handle */
55 | DltReceiverType type; /**< type of connection handle */
56 | int32_t buffersize; /**< size of receiver buffer */
57 | struct sockaddr_in addr; /**< socket address information */
58 | } DltReceiver;
59 | """
60 |
61 | _fields_ = [
62 | ("lastBytesRcvd", ctypes.c_int32),
63 | ("bytesRcvd", ctypes.c_int32),
64 | ("totalBytesRcvd", ctypes.c_int32),
65 | ("buffer", ctypes.POINTER(ctypes.c_char)),
66 | ("buf", ctypes.POINTER(ctypes.c_char)),
67 | ("backup_buf", ctypes.POINTER(ctypes.c_char)),
68 | ("fd", ctypes.c_int),
69 | ("type", ctypes.c_int),
70 | ("buffersize", ctypes.c_int32),
71 | ("addr", sockaddr_in),
72 | ]
73 |
74 |
75 | class cDltClient(ctypes.Structure): # pylint: disable=invalid-name
76 | """
77 | typedef struct
78 | {
79 | DltReceiver receiver; /**< receiver pointer to dlt receiver structure */
80 | int sock; /**< sock Connection handle/socket */
81 | char *servIP; /**< servIP IP adress/Hostname of TCP/IP interface */
82 | char *hostip; /**< IP multicast address of group */
83 | int port; /**< Port for TCP connections (optional) */
84 | char *serialDevice; /**< serialDevice Devicename of serial device */
85 | char *socketPath; /**< socketPath Unix socket path */
86 | char ecuid[4]; /**< ECUiD */
87 | speed_t baudrate; /**< baudrate Baudrate of serial interface, as speed_t */
88 | DltClientMode mode; /**< mode DltClientMode */
89 | int send_serial_header; /**< (Boolean) Send DLT messages with serial header */
90 | int resync_serial_header; /**< (Boolean) Resync to serial header on all connection */
91 | } DltClient;
92 | """
93 |
94 | _fields_ = [
95 | ("receiver", cDltReceiver),
96 | ("sock", ctypes.c_int),
97 | ("servIP", ctypes.c_char_p),
98 | ("hostip", ctypes.c_char_p),
99 | ("port", ctypes.c_int),
100 | ("serialDevice", ctypes.c_char_p),
101 | ("socketPath", ctypes.c_char_p),
102 | ("ecuid", ctypes.c_char * 4),
103 | ("baudrate", ctypes.c_uint),
104 | ("mode", ctypes.c_int),
105 | ("send_serial_header", ctypes.c_int),
106 | ("resync_serial_header", ctypes.c_int),
107 | ]
108 |
109 |
110 | class cDLTFilter(ctypes.Structure): # pylint: disable=invalid-name
111 | """
112 | typedef struct
113 | {
114 | char apid[DLT_FILTER_MAX][DLT_ID_SIZE]; /**< application id */
115 | char ctid[DLT_FILTER_MAX][DLT_ID_SIZE]; /**< context id */
116 | int log_level[DLT_FILTER_MAX]; /**< log level */
117 | int32_t payload_max[DLT_FILTER_MAX]; /**< upper border for payload */
118 | int32_t payload_min[DLT_FILTER_MAX]; /**< lower border for payload */
119 | int counter; /**< number of filters */
120 | } DltFilter;
121 | """
122 |
123 | _fields_ = [
124 | ("apid", (ctypes.c_char * DLT_ID_SIZE) * DLT_FILTER_MAX),
125 | ("ctid", (ctypes.c_char * DLT_ID_SIZE) * DLT_FILTER_MAX),
126 | ("log_level", ctypes.c_int * DLT_FILTER_MAX),
127 | ("payload_max", (ctypes.c_int32 * DLT_FILTER_MAX)),
128 | ("payload_min", (ctypes.c_int32 * DLT_FILTER_MAX)),
129 | ("counter", ctypes.c_int),
130 | ]
131 |
132 | # pylint: disable=too-many-arguments
133 | def add(self, apid, ctid, log_level=0, payload_min=0, payload_max=ctypes.c_uint32(-1).value // 2):
134 | """Add new filter pair"""
135 | if isinstance(apid, str):
136 | apid = bytes(apid, "ascii")
137 | if isinstance(ctid, str):
138 | ctid = bytes(ctid, "ascii")
139 | if (
140 | dltlib.dlt_filter_add(
141 | ctypes.byref(self), apid or b"", ctid or b"", log_level, payload_min, payload_max, self.verbose
142 | )
143 | == DLT_RETURN_ERROR
144 | ):
145 | if self.counter >= DLT_FILTER_MAX:
146 | logger.error("Maximum number (%d) of allowed filters reached, ignoring filter!\n", DLT_FILTER_MAX)
147 | return MAX_FILTER_REACHED
148 | logger.debug("Filter ('%s', '%s') already exists", apid, ctid)
149 | return REPEATED_FILTER
150 | return 0
151 |
--------------------------------------------------------------------------------
/tests/dlt_main_loop_by_reading_dlt_file_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2023. BMW Car IT GmbH. All rights reserved.
2 | """Basic unittests for the py_dlt_file_main_loop function"""
3 | import os
4 | import unittest
5 | import tempfile
6 | from threading import Thread
7 | import time
8 |
9 | from dlt.dlt import cDLTFile, py_dlt_file_main_loop
10 | from tests.utils import (
11 | append_stream_to_file,
12 | stream_multiple,
13 | stream_with_params,
14 | create_messages,
15 | append_message_to_file,
16 | )
17 |
18 |
19 | class TestMainLoopByReadingDltFile(unittest.TestCase):
20 | def setUp(self):
21 | # Empty content dlt file is created
22 | _, self.dlt_file_name = tempfile.mkstemp(suffix=b".dlt")
23 | self.dlt_reader = cDLTFile(filename=self.dlt_file_name, is_live=True, iterate_unblock_mode=False)
24 | # message_queue to store the dispatched messages from main loop
25 | self.message_queue = []
26 | # When callback() is called, then it is reset to True
27 | self.callback_is_called = False
28 | # With this variable, we could test different return value from callback()
29 | # If callback() returns True, then main loop keeps going; otherwise, it breaks
30 | self.callback_return_value = True
31 | # Thread for main loop, which is instantiated in test case
32 | self.main_loop = None
33 |
34 | def _callback_for_message(self, message):
35 | self.callback_is_called = True
36 | print("Called here")
37 | if message:
38 | self.message_queue.append(message)
39 | return self.callback_return_value
40 |
41 | def _start_main_loop(self):
42 | self.main_loop = Thread(
43 | target=py_dlt_file_main_loop,
44 | kwargs={"dlt_reader": self.dlt_reader, "callback": self._callback_for_message},
45 | )
46 | # self.main_loop.daemon = True
47 | self.main_loop.start()
48 | time.sleep(1)
49 |
50 | def tearDown(self):
51 | if not self.dlt_reader.stop_reading_proc.is_set():
52 | self.dlt_reader.stop_reading_proc.set()
53 | # After the stop of dlt_reader, main loop should be stopped automatically
54 | if self.main_loop:
55 | for _ in range(5):
56 | if not self.main_loop.is_alive():
57 | break
58 | time.sleep(0.1)
59 | self.assertFalse(self.main_loop.is_alive())
60 | os.remove(self.dlt_file_name)
61 |
62 | def test_001_empty_dlt_file(self):
63 | """When dlt file has empty content, then no message could be dispatched, and no return value from main loop"""
64 | self._start_main_loop()
65 | time.sleep(0.1)
66 | # When file has empty content, callback() will not be called by any message
67 | self.assertFalse(self.callback_is_called)
68 | self.assertEqual(0, len(self.message_queue))
69 |
70 | def test_002_first_write_then_read_dlt_file(self):
71 | """
72 | Simulate a real dlt file case: first write to it, and then use main loop to read it
73 | """
74 | # First write to dlt file without opening main loop
75 | append_stream_to_file(stream_multiple, self.dlt_file_name)
76 | time.sleep(0.1)
77 | # Expectation: py_dlt_file_main_loop reads out the first batch messages to message_queue
78 | self._start_main_loop()
79 | time.sleep(0.1)
80 | self.assertTrue(self.callback_is_called)
81 | self.assertEqual(2, len(self.message_queue))
82 |
83 | def test_003_first_read_then_write_dlt_file(self):
84 | """
85 | Simulate a real dlt file case: first open main loop to read, then write to the file at opening main loop
86 | """
87 | # First only main loop to read dlt file
88 | self._start_main_loop()
89 | # Then write to dlt file
90 | append_stream_to_file(stream_multiple, self.dlt_file_name)
91 | time.sleep(0.1)
92 | # Expect the written logs could be dispatched by main loop
93 | self.assertTrue(self.callback_is_called)
94 | self.assertEqual(2, len(self.message_queue))
95 |
96 | def test_004_read_2_writes(self):
97 | """
98 | Test main loop reads from 2 consecutive writes to dlt file
99 | """
100 | # First only main loop to read dlt file
101 | self._start_main_loop()
102 | # First write to dlt file
103 | append_stream_to_file(stream_multiple, self.dlt_file_name)
104 | time.sleep(0.1)
105 | # Expect main loop could dispatch 2 logs
106 | self.assertTrue(self.callback_is_called)
107 | self.assertEqual(2, len(self.message_queue))
108 | # Second write to dlt file, and expect to dispatch 3 logs
109 | append_stream_to_file(stream_with_params, self.dlt_file_name)
110 | time.sleep(0.1)
111 | self.assertEqual(3, len(self.message_queue))
112 |
113 | def test_005_callback_return_false(self):
114 | """
115 | If callback returns false, then main loop should exit
116 | """
117 | # Set callback return value to False
118 | self.callback_return_value = False
119 | # Write to file
120 | append_stream_to_file(stream_multiple, self.dlt_file_name)
121 | time.sleep(0.1)
122 | # Open main loop to dispatch logs
123 | self._start_main_loop()
124 | # Expect main loop could dispatch 2 logs
125 | self.assertTrue(self.callback_is_called)
126 | # Callback returns False after it handles the first message, which terminates main loop
127 | # So, main loop wont be able to proceed the second message
128 | self.assertEqual(1, len(self.message_queue))
129 | self.assertFalse(self.main_loop.is_alive())
130 |
131 | def test_006_read_empty_apid_ctid_message(self):
132 | """
133 | Simulate a case to read a apid==b"" and ctid==b"" message
134 | """
135 | # Construct a message with apid==b"" and ctid==b""
136 | message = create_messages(stream_with_params, from_file=True)[0]
137 | message.extendedheader.apid = b""
138 | message.extendedheader.ctid = b""
139 | # Write this message into dlt file
140 | append_message_to_file(message, self.dlt_file_name)
141 | # Expectation: py_dlt_file_main_loop reads out the first batch messages to message_queue
142 | self._start_main_loop()
143 | time.sleep(0.1)
144 | self.assertTrue(self.callback_is_called)
145 | self.assertEqual(1, len(self.message_queue))
146 | # Expectation: the received message should have apid==b"" and ctid==b""
147 | self.assertEqual("", self.message_queue[0].apid)
148 | self.assertEqual("", self.message_queue[0].ctid)
149 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # python-dlt
2 |
3 | python-dlt is a thin Python ctypes wrapper around libdlt functions. It was
4 | primarily created for use with BMW's test execution framework. However,
5 | the implementation is independent and the API makes few assumptions about
6 | the intended use.
7 |
8 | Note: This is only tested with libdlt version v2.18.8 and v2.18.10,
9 | later versions might require adaptations. The package will not support previous libdlt
10 | versions from python-dlt v2.0. Also only GENIVI DLT daemon produced traces
11 | have been tested.
12 |
13 | ## Design
14 |
15 | The code is split up into 3 primary components:
16 |
17 | * The `core`: This subpackage provides the major chunk of ctypes wrappers for
18 | the structures defined in libdlt. It abstracts out the libdlt structures for use
19 | by the rest of python-dlt. Classes defined here ideally should *not* be used
20 | outside of python-dlt. The module `core_base.py` provides the default
21 | implementation of the classes and the other `core_*.py` modules provide the
22 | overrides for the version specific implementations of libdlt. The correct version
23 | specific implementation will be loaded automatically at runtime. (the logic for
24 | this is in `core/__init__.py`)
25 |
26 | * The python interface classes: These are defined in `dlt.py`. Most of the
27 | classes here derive from their corresponding ctypes class definitions from
28 | `core` and provide a more python friendly api/access to the underlying C/ctypes
29 | implementations. Ideally, python code using `python-dlt` would use these classes
30 | rather than the base classes in `core`.
31 |
32 | * API for tools: This is the component that provides common interfaces required
33 | by the tools that use `python-dlt`, like the `DLTBroker`, 'DLTLifecycle' etc. These
34 | classes do not have equivalents in libdlt and were created based on usage
35 | requirements (and as such make assumptions about the manner in which they would
36 | be used).
37 |
38 | If you're reading this document to work on the core or the python classes, it
39 | would be a good idea to first understand the design of libdlt itself. This is
40 | fairly well documented (look under the `doc/` directory of the `dlt-deamon` code
41 | base). Of course the best reference is the code itself. `dlt-daemon` is written
42 | in C and is a pretty well laid out, straight forward (ie: not many layers of
43 | abstractions), small code base. Makes for good bedtime reading.
44 |
45 | The rest of this document will describe and demonstrate some of the design of
46 | the external API of python-dlt.
47 |
48 | The classes most relevant for users of python-dlt possibly are `DLTClient`,
49 | `DLTFile`, `DLTMessage`, `DLTBroker`. The names hopefully make their purpose
50 | evident.
51 |
52 | Here are examples of some interesting ways to use these classes:
53 |
54 | * DLTFile and DLTMessage::
55 |
56 | ```python
57 | >>> from dlt import dlt
58 | >>> # DLTFile object can be obtained by loading a trace file
59 | >>> d = dlt.load("high_full_trace.dlt")
60 | >>> d.generate_index() # Read the whole trace file and generate its index
61 | >>> print(d.counter_total) # number of DLT messages in the file
62 | ...
63 | >>> print(d[0]) # messages can be indexed
64 | ...
65 | >>> for msg in d: # DLTFile object is iterable
66 | ... print(msg.apid) # DLTMessage objects have all the attrs
67 | ... print(msg.payload_decoded) # one might expect from a DLT frame
68 | ... print(msg) # The str() of the DLTMessage closely matches the
69 | ... # output of dlt-receive
70 | >>> d[0] == d[-1] # DLTMessage objects can be compared to each other
71 | >>> d.compare(dict(apid="SYS", citd="JOUR")) # ...or can be compared to an
72 | ... # dict of attributes
73 | >>> import pickle
74 | >>> pickle.dumps(d[0]) # DLTMessage objects are (de)serializable using
75 | ... # the pickle protocol (this is to enable sharing
76 | ... # of the DLTMessage in a multiprocessing
77 | ... # environment)
78 | ```
79 |
80 |
81 | * DLTClient and DLTBroker::
82 |
83 | ```python
84 | >>> from dlt import dlt
85 | >>> c = dlt.DLTClient(servIP="127.0.0.1") # Only initializes the client
86 | >>> c.connect() # ...this connects
87 | >>> dlt.dltlib.dlt_receiver_receive(ctypes.byref(client.receiver), DLT_RECEIVE_SOCKET) # receives data
88 | >>> c.read_message() # reads a single DLTMessage from received data and returns it
89 | >>>
90 | >>> # more interesting is the DLTBroker class...
91 | >>> # - create an instance that initializes a DLTClient. Accepts a filename
92 | >>> # where DLT traces would be stored
93 | >>> broker = DLTBroker(ip_address="127.0.0.1", filename='/tmp/testing_log.dlt')
94 | >>> # needs to be started and stopped explicitly and will create a run a
95 | >>> # DLTClient instance in a new *process*.
96 | >>> broker.start()
97 | >>> broker.stop()
98 | >>>
99 | >>> # Usually, used in conjunction with the DLTContext class from mtee
100 | >>> from mtee.testing.connectors.connector_dlt import DLTContext
101 | >>> broker = DLTBroker(ip_address="127.0.0.1", filename="/tmp/testing_log.dlt", verbose=True)
102 | >>> ctx = DLTContext(broker, filters=[("SYS", "JOUR")])
103 | >>> broker.start()
104 | >>> print(ctx.wait_for(count=10))
105 | >>>
106 | ```
107 |
108 | ## Design of DLTBroker
109 |
110 | The DLTBroker abstracts out the management of 2 (multiprocessing) queues:
111 |
112 | * The `message_queue`: This queue receives *all* messages from the DLT daemon
113 | (via a DLTClient instance, running as a separate process, code in
114 | `dlt.dlt_broker_handlers.DLTMessageHandler`) and stores them to a
115 | trace file.
116 |
117 | * The `filter_queue`: This queue instructs the `DLTMessageHandler` which
118 | messages would be interesting at runtime, to be filtered and returned (for
119 | example, via a request from `DLTContext`). This is run as a separate thread in
120 | the `DLTBroker` process. The code for this is in
121 | `dlt.dlt_broker_handlers.DLTContextHandler`.
122 |
123 | ## Running tox on a local machine
124 |
125 | In order to run tox command for this repository, please perform the following:
126 |
127 | 1. Build a docker image from the `Dockerfile` provided using:
128 |
129 | ```commandline
130 | $ docker build -t python-dlt -f Dockerfile .
131 | ```
132 |
133 | 2. Run the tox in the docker container using:
134 |
135 | ```commandline
136 | $ docker run -it --rm --volume $(pwd):/workspace python-dlt /bin/sh -xc "tox -e py3,lint"
137 | ```
138 |
139 | 3. [Special Case] Getting an interactive shell inside the docker container to run arbitrary commands:
140 |
141 | ```commandline
142 | $ docker run -it --rm --volume $(pwd):/workspace --entrypoint sh python-dlt
143 | ```
144 |
--------------------------------------------------------------------------------
/tests/dlt_context_handler_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 | from multiprocessing import Queue as mp_queue
3 | from queue import Empty, Queue
4 | import time
5 | import unittest
6 |
7 | from dlt.dlt_broker_handlers import DLTContextHandler
8 | from tests.utils import create_messages, stream_one, stream_multiple
9 |
10 |
11 | class TestDLTContextHandler(unittest.TestCase):
12 | def setUp(self):
13 | self.filter_queue = mp_queue()
14 | self.message_queue = mp_queue()
15 | self.handler = DLTContextHandler(self.filter_queue, self.message_queue)
16 |
17 | def test_init(self):
18 | self.assertFalse(self.handler.stop_flag.is_set())
19 | self.assertFalse(self.handler.is_alive())
20 | self.assertTrue(self.handler.filter_queue.empty())
21 | self.assertTrue(self.handler.message_queue.empty())
22 |
23 | def test_register_no_filter(self):
24 | queue = Queue()
25 | queue_id = id(queue)
26 |
27 | self.handler.register(queue)
28 |
29 | # When no filter is specified, filter (None, None) should be
30 | # added (ie: match all messages)
31 | self.assertIn(queue_id, self.handler.context_map)
32 | self.assertEqual(self.handler.context_map[queue_id], (queue, [(None, None)]))
33 | self.assertEqual(self.handler.filter_queue.get(), (queue_id, [(None, None)], True))
34 |
35 | def test_register_single_filter(self):
36 | queue = Queue()
37 | queue_id = id(queue)
38 | filters = ("SYS", "JOUR")
39 |
40 | self.handler.register(queue, filters)
41 |
42 | # Specified, filter should be added to filter_queue
43 | self.assertIn(queue_id, self.handler.context_map)
44 | self.assertEqual(self.handler.context_map[queue_id], (queue, filters))
45 | self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, True))
46 |
47 | def test_register_similar_filters(self):
48 | queue0 = Queue()
49 | queue_id0 = id(queue0)
50 | filters0 = ("SYS", "JOUR")
51 |
52 | queue1 = Queue()
53 | queue_id1 = id(queue1)
54 | filters1 = ("SYS", "JOUR")
55 |
56 | self.handler.register(queue0, filters0)
57 | self.handler.register(queue1, filters1)
58 |
59 | # Each queue should have a unique entry in the context_map and
60 | # filter_queue (even if they have the same filter)
61 | self.assertIn(queue_id0, self.handler.context_map)
62 | self.assertIn(queue_id1, self.handler.context_map)
63 | self.assertEqual(self.handler.context_map[queue_id0], (queue0, filters0))
64 | self.assertEqual(self.handler.context_map[queue_id1], (queue1, filters1))
65 | self.assertEqual(self.handler.filter_queue.get(), (queue_id0, filters0, True))
66 | self.assertEqual(self.handler.filter_queue.get(), (queue_id1, filters1, True))
67 |
68 | def test_unregister(self):
69 | queue = Queue()
70 | queue_id = id(queue)
71 | filters = ("SYS", "JOUR")
72 |
73 | self.handler.register(queue, filters)
74 | self.assertIn(queue_id, self.handler.context_map)
75 | self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, True))
76 |
77 | self.handler.unregister(queue)
78 | self.assertNotIn(queue_id, self.handler.context_map)
79 | self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, False))
80 |
81 | def test_run_no_messages(self):
82 | try:
83 | self.handler.start()
84 | time.sleep(0.2)
85 | self.handler.stop()
86 | self.assertTrue(self.handler.stop_flag.is_set())
87 | self.assertFalse(self.handler.is_alive())
88 | except: # noqa: E722
89 | self.fail()
90 |
91 | def test_run_single_context_queue(self):
92 | queue = Queue()
93 | queue_id = id(queue)
94 | filters = ("DA1", "DC1")
95 | self.handler.register(queue, filters)
96 |
97 | self.handler.start()
98 |
99 | # - simulate feeding of messages into the message_queue
100 | for _ in range(10):
101 | self.handler.message_queue.put((queue_id, create_messages(stream_one)))
102 |
103 | try:
104 | for _ in range(10):
105 | queue.get(timeout=0.01)
106 | except Empty:
107 | # - we should not get an Empty for exactly 10 messages
108 | self.fail()
109 | finally:
110 | self.handler.stop()
111 |
112 | def test_run_multiple_context_queue(self):
113 | self.handler.start()
114 |
115 | queue0 = Queue()
116 | queue_id0 = id(queue0)
117 | filters0 = ("DA1", "DC1")
118 | self.handler.register(queue0, filters0)
119 |
120 | queue1 = Queue()
121 | queue_id1 = id(queue1)
122 | filters1 = ("SYS", "JOUR")
123 | self.handler.register(queue1, filters1)
124 |
125 | # - queue with no filter
126 | queue2 = Queue()
127 | queue_id2 = id(queue2)
128 | self.handler.register(queue2)
129 |
130 | # - simulate feeding of messages into the message_queue
131 | for _ in range(10):
132 | for message in create_messages(stream_multiple, from_file=True):
133 | queue_id = queue_id0 if message.apid == "DA1" else queue_id1
134 | self.handler.message_queue.put((queue_id, message))
135 | # - simulate feeding of all messages for the queue with
136 | # no filter.
137 | self.handler.message_queue.put((queue_id2, message))
138 |
139 | try:
140 | da1_messages = []
141 | sys_messages = []
142 | all_messages = []
143 | for _ in range(10):
144 | da1_messages.append(queue0.get(timeout=0.01))
145 | sys_messages.append(queue1.get(timeout=0.01))
146 | all_messages.append(queue2.get(timeout=0.01))
147 |
148 | # these queues should not get any messages from other queues
149 | self.assertTrue(all(msg.apid == "DA1" for msg in da1_messages))
150 | self.assertTrue(all(msg.apid == "SYS" for msg in sys_messages))
151 | # this queues should get all messages
152 | self.assertFalse(
153 | all(msg.apid == "DA1" for msg in all_messages) or all(msg.apid == "SYS" for msg in all_messages)
154 | )
155 | except Empty:
156 | # - we should not get an Empty for at least 10 messages
157 | self.fail()
158 | finally:
159 | self.handler.stop()
160 |
161 | def test_run_unregister_with_unread_messages(self):
162 | self.handler.start()
163 | queue = Queue()
164 | queue_id = id(queue)
165 | filters = ("DA1", "DC1")
166 | self.handler.register(queue, filters)
167 |
168 | self.assertIn(queue_id, self.handler.context_map)
169 | self.handler.unregister(queue)
170 |
171 | # - simulate feeding of messages into the message_queue
172 | for _ in range(3):
173 | self.handler.message_queue.put((queue_id, create_messages(stream_one)))
174 |
175 | try:
176 | self.assertNotIn(queue_id, self.handler.context_map)
177 | # allow some time for the thread to read all messages
178 | time.sleep(0.5)
179 | self.assertTrue(self.handler.message_queue.empty())
180 | self.assertTrue(queue.empty())
181 | finally:
182 | self.handler.stop()
183 |
--------------------------------------------------------------------------------
/tests/dlt_broker_from_file_spinner_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2023. BMW Car IT GmbH. All rights reserved.
2 | """Test DLTBroker with message handler DLTFileSpinner"""
3 | import os
4 | import pytest
5 | import tempfile
6 | import time
7 | import unittest
8 | from unittest.mock import ANY, patch
9 | from queue import Queue, Empty
10 |
11 | from dlt.dlt_broker import DLTBroker, logger
12 | from tests.utils import (
13 | stream_multiple,
14 | stream_with_params,
15 | append_stream_to_file,
16 | create_messages,
17 | append_message_to_file,
18 | )
19 |
20 |
21 | class TestDLTBrokerFromDLTFileSpinnerWithNotExistingDLT(unittest.TestCase):
22 | def setUp(self) -> None:
23 | self.broker = None
24 | _, self.dlt_file_name = tempfile.mkstemp(suffix=b".dlt")
25 |
26 | def tearDown(self) -> None:
27 | if self.broker:
28 | self.broker.stop()
29 | if os.path.exists(self.dlt_file_name):
30 | os.remove(self.dlt_file_name)
31 |
32 | def test_broker_with_not_existing_dlt_file(self):
33 | """
34 | Test DLTBroker could work with not existing dlt file
35 |
36 | 1. prepare a file name which does not exist
37 | 2. start dlt broker to dispatch messages from this not-existing file --> no error
38 | 3. dlt broker could not add context successfully, but encounter a warning message
39 | 4. no message could be dispatched from not existing file and throws out Queue.Empty exception
40 | 5. dlt_time is 0.0, because it could not be reset according to the latest timestamp of messages
41 | """
42 | # Remove the dlt file
43 | os.remove(self.dlt_file_name)
44 | # Start broker with non-existing dlt file
45 | self.broker = DLTBroker(
46 | filename=self.dlt_file_name,
47 | enable_dlt_time=True,
48 | enable_filter_set_ack=True,
49 | ignore_filter_set_ack_timeout=True,
50 | )
51 | self.broker.start()
52 | # Add context should report warning message
53 | queue = Queue(maxsize=0)
54 | with patch.object(logger, "warning") as logger_mock:
55 | self.broker.add_context(queue, filters=None)
56 | logger_mock.assert_called_with(ANY, ANY, [(None, None)], id(queue))
57 | # Not existing dlt file should not throw any exception out
58 | for _ in range(5):
59 | with pytest.raises(Empty):
60 | queue.get_nowait()
61 | # dlt_time is not None, even though it is not reset with latest timestamp from messages
62 | self.assertEqual(self.broker.dlt_time(), 0.0)
63 |
64 | def test_broker_with_later_created_dlt_file(self):
65 | """
66 | Simulate a scenario: first dlt file does not exist, then dlt file is created and written with messages.
67 |
68 | 1. delete the dlt file
69 | 2. start broker
70 | 3. create the dlt file and write 1 sample message
71 | Expectation: 1 message could be dispatched from broker
72 | """
73 | # 1. delete the dlt file
74 | os.remove(self.dlt_file_name)
75 | # 2. Start broker with non-existing dlt file
76 | self.broker = DLTBroker(
77 | filename=self.dlt_file_name,
78 | enable_dlt_time=True,
79 | enable_filter_set_ack=True,
80 | ignore_filter_set_ack_timeout=True,
81 | )
82 | self.broker.start()
83 | # Add context should report warning message
84 | queue = Queue(maxsize=0)
85 | self.broker.add_context(queue, filters=None)
86 | # 3. Write 1 sample message to the dlt file
87 | append_stream_to_file(stream_with_params, self.dlt_file_name)
88 | # Expectation: 1 message could be dispatched from broker
89 | time.sleep(0.5)
90 | self.assertIsNotNone(queue.get_nowait())
91 | # If we try to dispatch for another time, exception Queue.Empty is thrown,
92 | # because there is no new log from dlt file
93 | with pytest.raises(Empty):
94 | queue.get_nowait()
95 |
96 |
97 | class TestDLTBrokerFromDLTFileSpinner(unittest.TestCase):
98 | def setUp(self):
99 | # Dlt file is created with empty content
100 | _, self.dlt_file_name = tempfile.mkstemp(suffix=b".dlt")
101 | self.dispatched_message_queue = Queue(maxsize=0)
102 | # Instantiate DLTBroker without ignoring fileter ack timeout
103 | self.broker = DLTBroker(
104 | filename=self.dlt_file_name,
105 | enable_dlt_time=True,
106 | enable_filter_set_ack=True,
107 | ignore_filter_set_ack_timeout=True,
108 | )
109 | self.broker.start()
110 | self.broker.add_context(self.dispatched_message_queue, filters=None)
111 |
112 | def tearDown(self):
113 | self.broker.stop()
114 | os.remove(self.dlt_file_name)
115 |
116 | def test_001_dispatch_from_empty_dlt_file(self):
117 | """
118 | From empty file, no message could be dispatched from queue and raise Queue.Empty.
119 | dlt_time is 0.0, because it could not be reset according to the latest timestamp of messages
120 | """
121 | for _ in range(5):
122 | with pytest.raises(Empty):
123 | self.dispatched_message_queue.get_nowait()
124 | self.assertEqual(self.broker.dlt_time(), 0.0)
125 |
126 | def test_002_dispatch_from_real_dlt_file(self):
127 | """
128 | Test DltBroker dispatches from a run-time written dlt file
129 |
130 | With a running dlt broker:
131 | 1. Write 2 sample messages to dlt file
132 | 2. These two messages could be dispatched with the running dlt broker
133 | With another try to dispatch, Queue.Empty is thrown, because no more logs could be read from dlt log;
134 | dlt_time from dlt_broker is equal to the timestamp of 2nd message
135 | 3. Append another 1 message to the same dlt file
136 | 4. Total 3 messages could be dispatched with the dlt broker
137 | With another try to dispatch, Queue.Empty is thrown, because no more logs could be read from dlt log;
138 | dlt_time from dlt_broker is equal to the timestamp of 3rd message
139 | """
140 | # 1. Write 2 sample messages to dlt file
141 | append_stream_to_file(stream_multiple, self.dlt_file_name)
142 | # 2. Dispatch 2 messages from dlt broker
143 | time.sleep(0.1)
144 | message_1 = self.dispatched_message_queue.get_nowait()
145 | time.sleep(0.1)
146 | message_2 = self.dispatched_message_queue.get_nowait()
147 | self.assertNotEqual(message_1, message_2)
148 | # If we try to dispatch for another time, exception Queue.Empty is thrown,
149 | # because there is no new log from dlt file
150 | with pytest.raises(Empty):
151 | self.dispatched_message_queue.get_nowait()
152 | # Validate dlt time from broker
153 | self.assertEqual(self.broker.dlt_time(), message_2.storage_timestamp)
154 | # 3. Append another 1 message to the same dlt file
155 | append_stream_to_file(stream_with_params, self.dlt_file_name)
156 | # 4. Total 3 messages could be dispatched with the dlt broker
157 | time.sleep(0.1)
158 | message_3 = self.dispatched_message_queue.get_nowait()
159 | self.assertNotEqual(message_1, message_3)
160 | self.assertNotEqual(message_2, message_3)
161 | # If try to dispatch for another time, exception Queue.Empty is thrown,
162 | # because there is no new log from dlt file
163 | with pytest.raises(Empty):
164 | self.dispatched_message_queue.get_nowait()
165 | # Validate dlt time from broker
166 | self.assertEqual(self.broker.dlt_time(), message_3.storage_timestamp)
167 |
168 | def test_003_dispatch_from_real_dlt_file(self):
169 | """
170 | Test DltBroker dispatches apid==b"" and ctid==b"" message from a run-time written dlt file
171 |
172 | With a running dlt broker:
173 | 1. Write apid==b"" and ctid==b"" message to dlt file
174 | 2. This message could be dispatched with the running dlt broker
175 | a. With another try to dispatch, Queue.Empty is thrown, because no more logs could be read from dlt log;
176 | b. dlt_time from dlt_broker is equal to the timestamp of this message
177 | c. the received message should have apid==b"" and ctid==b""
178 | """
179 | # 1. Write apid==b"" and ctid==b"" message to dlt file
180 | # Construct a message with apid==b"" and ctid==b""
181 | message = create_messages(stream_with_params, from_file=True)[0]
182 | message.extendedheader.apid = b""
183 | message.extendedheader.ctid = b""
184 | # Write this message into dlt file
185 | append_message_to_file(message, self.dlt_file_name)
186 | # 2. Dispatch from dlt broker
187 | time.sleep(0.5)
188 | message = self.dispatched_message_queue.get_nowait()
189 | # If we try to dispatch for another time, exception Queue.Empty is thrown,
190 | # because there is no new log from dlt file
191 | with pytest.raises(Empty):
192 | self.dispatched_message_queue.get_nowait()
193 | # Validate dlt time from broker
194 | self.assertEqual(self.broker.dlt_time(), message.storage_timestamp)
195 | # Expectation: the received message should have apid==b"" and ctid==b""
196 | self.assertEqual("", message.apid)
197 | self.assertEqual("", message.ctid)
198 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
2 | """Test helpers and data"""
3 |
4 | import atexit
5 | import ctypes
6 | import io
7 | import tempfile
8 | import os
9 |
10 | from dlt.dlt import DLTClient, load
11 |
12 |
13 | stream_one = io.BytesIO(b"5\x00\x00 MGHS\xdd\xf6e\xca&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00")
14 |
15 | stream_with_params = (
16 | b"DLT\x01\xc2<\x85W\xc7\xc5\x02\x00MGHS=r\x00\xa0MGHS\x00\x00\x02B\x00X\xd4\xf1A\x08"
17 | b"ENV\x00LVLM\x00\x02\x00\x00-\x00CLevelMonitor::notification() => commandType\x00#"
18 | b"\x00\x00\x00\x03\x00\x00\x00\x00\x02\x00\x00\t\x00deviceId\x00#\x00\x00\x00\x05\x00"
19 | b"\x00\x00\x00\x02\x00\x00\x06\x00value\x00#\x00\x00\x00\xea\x0f\x00\x00\x00\x02\x00"
20 | b"\x00\x12\x00simulation status\x00#\x00\x00\x00\x00\x00\x00\x00"
21 | )
22 |
23 | stream_multiple = (
24 | b"DLT\x01#o\xd1WD>\x0c\x00MGHS5\x00\x00YMGHS\x00\x01\x80\xd1&\x01DA1\x00DC1\x00\x03\x00\x00\x00"
25 | b"\x07\x01\x00SYS\x00\x01\x00FILE\xff\xff\x16\x00File transfer manager.\x12\x00"
26 | b"DLT System ManagerremoDLT\x01#o\xd1Wo>\x0c\x00MGHS=\x00\x01PMGHS\x00\x00\x03\xf4\x00"
27 | b"\x01i\xa6A\x05SYS\x00JOUR\x00\x02\x00\x00\x1b\x002011/11/11 11:11:18.005274\x00\x00\x02\x00\x00"
28 | b"\t\x006.005274\x00\x00\x02\x00\x00\x16\x00systemd-journal[748]:\x00\x00\x02\x00\x00\x0f\x00"
29 | b"Informational:\x00\x00\x02\x00\x00\xcf\x00Runtime journal (/run/log/journal/) is currently"
30 | b" using 8.0M.\nMaximum allowed usage is set to 385.9M.\nLeaving at least 578.8M free (of"
31 | b" currently available 3.7G of space).\nEnforced usage limit is thus 385.9M.\x00"
32 | )
33 |
34 | stream_multiple_with_malformed_message_at_begining = (
35 | b"DLT\x01\xfar\xc5c\xf7j\x03\x00\x00\x00\x00\x00\x00\x00\x00LCMFLOW WUP invalidDLT\x01"
36 | b"\xfar\xc5c\x0bo\x03\x00XORA'\x01\x00\x1bXORA\x16\x02\x00\x00\x00\x00\x00\x00\x00"
37 | b"\x00\x00\x00\x00\x11\x04\x00\x00\x00\x00DLT\x01\xfar\xc5c?o\x03"
38 | b"DLT\x01#o\xd1WD>\x0c\x00MGHS5\x00\x00YMGHS\x00\x01\x80\xd1&\x01DA1\x00DC1\x00\x03\x00\x00\x00"
39 | b"\x07\x01\x00SYS\x00\x01\x00FILE\xff\xff\x16\x00File transfer manager.\x12\x00"
40 | b"DLT System ManagerremoDLT\x01#o\xd1Wo>\x0c\x00MGHS=\x00\x01PMGHS\x00\x00\x03\xf4\x00"
41 | b"\x01i\xa6A\x05SYS\x00JOUR\x00\x02\x00\x00\x1b\x002011/11/11 11:11:18.005274\x00\x00\x02\x00\x00"
42 | b"\t\x006.005274\x00\x00\x02\x00\x00\x16\x00systemd-journal[748]:\x00\x00\x02\x00\x00\x0f\x00"
43 | b"Informational:\x00\x00\x02\x00\x00\xcf\x00Runtime journal (/run/log/journal/) is currently"
44 | b" using 8.0M.\nMaximum allowed usage is set to 385.9M.\nLeaving at least 578.8M free (of"
45 | b" currently available 3.7G of space).\nEnforced usage limit is thus 385.9M.\x00"
46 | )
47 |
48 | msg_benoit = (
49 | b"DLT\x01\xa5\xd1\xceW\x90\xb9\r\x00MGHS=\x00\x00RMGHS\x00\x00\n[\x00\x0f\x9b#A\x01DEMODATA\x00"
50 | b"\x82\x00\x002\x00Logging from the constructor of a global instance\x00"
51 | )
52 |
53 |
54 | control_one = (
55 | b"DLT\x01#o\xd1W\x99!\x0c\x00MGHS5\x00\x00;MGHS\x00\x01\x7f\xdb&\x01DA1\x00DC1\x00\x03"
56 | b"\x00\x00\x00\x07\x01\x00HDDM\x01\x00CAPI\xff\xff\x04\x00CAPI\x06\x00hddmgrremo"
57 | )
58 |
59 | # DLT file with invalid storage header and frames
60 | file_storage_clean = (
61 | b"DLT\x01\x9a\xc6\xbfW\x020\t\x00MGHS5\x00\x00 MGHS\x00\x02\x8aC&\x01DA1\x00DC1"
62 | b"\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00DLT\x01\x9a\xc6\xbfWoA\t\x00MGHS="
63 | b"\x00\x00NMGHS\x00\x00\x049\x00\x01p\n\x00MGHS5\x00\x00 MGHS" # not to buffer
117 | b"\x00\x00mj&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
118 | )
119 |
120 | file_with_lifecycles_without_start = (
121 | b"DLT\x01\xc5\x82\xdaX\x19\x93\r\x00XORA'\x01\x00\x1bXORA" # trace to buffer
122 | b"\x16\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x04\x00\x00\x00\x00"
123 | b"DLT\x01\xc5\x82\xdaXQi\x0e\x00MGHS5\x00\x00 MGHS" # trace to buffer
124 | b"\x00\x03U\xe0&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
125 | b"DLT\x01m\xc2\x91Y\xad\xe4\x07\x00MGHS=\x01\x00zMGHS" # random trace
126 | b"\x00\x00\x02\xab\x00\x00@VA\x01DLTDINTM\x00\x02\x00\x00Z\x00"
127 | b"ApplicationID 'DBSY' registered for PID 689, Description=DBus"
128 | b" Logging|SysInfra|Log&Trace\n\x00"
129 | b"DLT\x01\xed\xc2\x91Y\x0f\xf0\x08\x00MGHS5\x00\x00 MGHS" # trace to buffer
130 | b"\x00\x00\x9dC&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
131 | b"DLT\x01\xed\xc2\x91Y\x17.\n\x00MGHS=\x00\x00NMGHS" # new lifecycle
132 | b"\x00\x00\x02\xae\x00\x00@/A\x01DLTDINTM\x00\x02\x00\x00.\x00"
133 | b"Daemon launched. Starting to output traces...\x00"
134 | )
135 |
136 |
137 | def append_stream_to_file(stream, file_name):
138 | msgs = create_messages(stream, from_file=True)
139 | for msg in msgs:
140 | append_message_to_file(msg, file_name)
141 |
142 |
143 | def append_message_to_file(message, file_name):
144 | # Use 'ab' instead of 'wb' because it is to append instead to overwrite
145 | with open(file_name, "ab") as file:
146 | file.write(message.to_bytes())
147 | file.flush()
148 |
149 |
150 | def create_messages(stream, from_file=False):
151 | if from_file is False:
152 | stream.seek(0)
153 | buf = stream.read()
154 |
155 | client = DLTClient()
156 | client.receiver.buf = ctypes.create_string_buffer(buf)
157 | client.receiver.bytesRcvd = len(buf)
158 |
159 | return client.read_message()
160 |
161 | _, tmpname = tempfile.mkstemp(suffix=b"")
162 | tmpfile = open(tmpname, "wb")
163 | tmpfile.write(stream)
164 | tmpfile.flush()
165 | tmpfile.seek(0)
166 | tmpfile.close()
167 |
168 | atexit.register(os.remove, tmpname)
169 |
170 | msgs = load(tmpname)
171 | return msgs
172 |
173 |
174 | class MockDLTMessage(object):
175 | """Mock DLT message for dltlyse plugin testing"""
176 |
177 | def __init__(self, ecuid="MGHS", apid="SYS", ctid="JOUR", sid="958", payload="", tmsp=0.0, sec=0, msec=0, mcnt=0):
178 | self.ecuid = ecuid
179 | self.apid = apid
180 | self.ctid = ctid
181 | self.sid = sid
182 | self.payload = payload
183 | self.tmsp = tmsp
184 | self.mcnt = mcnt
185 | self.storageheader = MockStorageHeader(sec=sec, msec=msec)
186 |
187 | def compare(self, target):
188 | """Compare DLT Message to a dictionary"""
189 | return target == {k: v for k, v in self.__dict__.items() if k in target.keys()}
190 |
191 | @property
192 | def payload_decoded(self):
193 | """Fake payload decoding"""
194 | return self.payload
195 |
196 | @property
197 | def storage_timestamp(self):
198 | """Fake storage timestamp"""
199 | return float("{}.{}".format(self.storageheader.seconds, self.storageheader.microseconds))
200 |
201 | def __repr__(self):
202 | return str(self.__dict__)
203 |
204 |
205 | class MockStorageHeader(object):
206 | """Mock DLT storage header for plugin testing"""
207 |
208 | def __init__(self, msec=0, sec=0):
209 | self.microseconds = msec
210 | self.seconds = sec
211 |
--------------------------------------------------------------------------------
/tests/dlt_message_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved.
2 | """Basic unittests for DLT messages"""
3 | import io
4 | import pickle
5 | import re
6 | from unittest.mock import patch, PropertyMock
7 |
8 | import pytest
9 |
10 | from dlt.dlt import DLTMessage
11 | from tests.utils import (
12 | create_messages,
13 | stream_one,
14 | stream_with_params,
15 | stream_multiple,
16 | stream_multiple_with_malformed_message_at_begining,
17 | msg_benoit,
18 | control_one,
19 | )
20 |
21 |
22 | class TestsDLTMessageUnit(object):
23 | def test_malformed_message(self):
24 | msgs = create_messages(stream_multiple_with_malformed_message_at_begining, from_file=True)
25 |
26 | assert msgs[0].message_id == 1279675715
27 | assert len(msgs) == 3
28 | assert not msgs[0].extendedheader
29 |
30 | def test_compare_default_attrs(self):
31 | attrs = {"extendedheader.apid": "DA1", "extendedheader.ctid": "DC1"}
32 | msg = create_messages(stream_one)
33 |
34 | assert msg.compare(other=attrs)
35 | assert msg.compare(other={"extendedheader.ctid": "DC1"})
36 |
37 | def test_equal(self):
38 | msg1 = create_messages(stream_one)
39 | msg2 = create_messages(stream_one)
40 |
41 | assert msg1 == msg2
42 |
43 | def test_easy_attributes(self):
44 | msg = create_messages(stream_one)
45 |
46 | assert msg.ecuid == "MGHS"
47 | assert msg.seid == 0
48 | assert msg.tmsp == 372391.26500000001
49 | assert msg.apid == "DA1"
50 | assert msg.ctid == "DC1"
51 |
52 | def test_compare(self):
53 | msg1 = create_messages(stream_one)
54 | msg2 = create_messages(stream_one)
55 |
56 | assert msg1.compare(msg2)
57 | assert msg1.compare(other=msg2)
58 | assert msg1.compare(dict(apid="DA1", ctid="DC1"))
59 | assert not msg1.compare(dict(apid="DA1", ctid="XX"))
60 |
61 | def test_compare_regexp(self):
62 | msg1 = create_messages(stream_one)
63 |
64 | assert msg1.compare(dict(apid="DA1", ctid=re.compile(r"D.*")))
65 | assert msg1.compare(
66 | dict(apid="DA1", ctid=re.compile(r"D.*"), payload_decoded=re.compile(r".connection_info ok."))
67 | )
68 | assert msg1.compare(
69 | dict(apid="DA1", ctid=re.compile(r"D.*"), payload_decoded=re.compile(r".connection_info ok."))
70 | )
71 | assert msg1.compare(dict(apid="DA1", ctid=re.compile(r"D.*"), payload_decoded=re.compile(r".*info ok.")))
72 | assert msg1.compare(dict(apid="DA1", ctid="DC1", payload_decoded=re.compile(r".*info ok.")))
73 | assert msg1.compare(dict(apid=re.compile(r"D.")))
74 | assert msg1.compare(dict(apid=re.compile(r"D.+")))
75 | assert msg1.compare(dict(apid=re.compile(r"D.")))
76 | assert not msg1.compare(dict(apid=re.compile(r"X.")))
77 |
78 | def test_compare_regexp_nsm(self):
79 | nsm = create_messages(
80 | io.BytesIO(b"5\x00\x00 MGHS\xdd\xf6e\xca&\x01NSM\x00DC1\x00\x02\x0f\x00\x00" b"\x00\x02\x00\x00\x00\x00")
81 | )
82 | nsma = create_messages(
83 | io.BytesIO(b"5\x00\x00 MGHS\xdd\xf6e\xca&\x01NSMADC1\x00\x02\x0f\x00\x00" b"\x00\x02\x00\x00\x00\x00")
84 | )
85 |
86 | assert nsm.compare(dict(apid=re.compile("^NSM$")))
87 | assert not nsma.compare(dict(apid=re.compile("^NSM$")))
88 |
89 | assert nsm.compare(dict(apid="NSM"))
90 | assert not nsma.compare(dict(apid="NSM"))
91 |
92 | assert nsm.compare(dict(apid=re.compile("NSM")))
93 | assert nsma.compare(dict(apid=re.compile("NSM")))
94 |
95 | def test_compare_regexp_throw(self):
96 | nsm = create_messages(
97 | io.BytesIO(b"5\x00\x00 MGHS\xdd\xf6e\xca&\x01NSM\x00DC1\x00\x02\x0f\x00\x00" b"\x00\x02\x00\x00\x00\x00")
98 | )
99 | with pytest.raises(Exception):
100 | assert nsm.compare(dict(apid=b"NSM"), regexp=True)
101 |
102 | def test_compare_regexp_benoit(self):
103 | msg1 = create_messages(msg_benoit, from_file=True)[0]
104 | assert msg1.compare(
105 | {
106 | "apid": "DEMO",
107 | "ctid": "DATA",
108 | "payload_decoded": re.compile("Logging from the constructor of a global instance"),
109 | }
110 | )
111 |
112 | def test_compare_two_msgs(self):
113 | msgs = create_messages(stream_multiple, from_file=True)
114 | assert msgs[0] != msgs[-1]
115 |
116 | def test_compare_other_not_modified(self):
117 | msg = create_messages(stream_one)
118 | other = dict(apid="XX", ctid="DC1")
119 | assert not msg.compare(other)
120 | assert other == dict(apid="XX", ctid="DC1")
121 |
122 | def test_compare_quick_return(self):
123 | msg = create_messages(stream_one)
124 | other = dict(apid=b"DA1", ctid=b"XX", ecuid=b"FOO")
125 |
126 | with patch("dlt.dlt.DLTMessage.ecuid", new_callable=PropertyMock) as ecuid:
127 | ecuid.return_value = b"FOO"
128 | assert not msg.compare(other)
129 | ecuid.assert_not_called()
130 |
131 | def test_compare_matching_apid_ctid(self):
132 | msg = create_messages(stream_one)
133 | other = dict(apid="DA1", ctid="DC1", ecuid="FOO")
134 |
135 | with patch("dlt.dlt.DLTMessage.ecuid", new_callable=PropertyMock) as ecuid:
136 | ecuid.return_value = "BAR"
137 | assert not msg.compare(other)
138 | ecuid.assert_called_once()
139 |
140 | ecuid.return_value = "FOO"
141 | assert msg.compare(other)
142 | assert ecuid.call_count == 2
143 |
144 | def test_pickle_api(self):
145 | messages = create_messages(stream_multiple, from_file=True)
146 | for msg in messages:
147 | assert msg == pickle.loads(pickle.dumps(msg))
148 |
149 | def test_from_bytes_control(self):
150 | msg = DLTMessage.from_bytes(
151 | b"DLT\x011\xd9PY(<\x08\x00MGHS5\x00\x00 MGHS\x00\x00\x96\x85&\x01DA1\x00DC1"
152 | b"\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
153 | )
154 |
155 | assert msg.apid == "DA1"
156 | assert msg.ctid == "DC1"
157 | assert msg.ecuid == "MGHS"
158 | assert msg.tmsp == 3.8533
159 | assert msg.storage_timestamp == 1498470705.539688
160 | assert msg.payload_decoded == "[connection_info ok] connected "
161 |
162 | def test_from_bytes_log_multipayload(self):
163 | msg = DLTMessage.from_bytes(
164 | b"DLT\x011\xd9PYfI\x08\x00MGHS=\x00\x000MGHS\x00\x00\x03\x1e\x00\x00\x94\xc8A"
165 | b"\x01MON\x00CPUS\x00\x02\x00\x00\x10\x004 online cores\n\x00"
166 | )
167 |
168 | assert msg.apid == "MON"
169 | assert msg.ctid == "CPUS"
170 | assert msg.ecuid == "MGHS"
171 | assert msg.tmsp == 3.8088
172 | assert msg.payload_decoded == "4 online cores"
173 |
174 | def test_sort_data_control(self):
175 | data = (
176 | b"DLT\x011\xd9PY(<\x08\x00MGHS5\x00\x00 MGHS\x00\x00\x96\x85&\x01DA1\x00DC1"
177 | b"\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
178 | )
179 | tmsp, length, apid, ctid = DLTMessage.extract_sort_data(data)
180 |
181 | assert tmsp == 3.8533
182 | assert length == len(data)
183 | assert apid == "DA1"
184 | assert ctid == "DC1"
185 |
186 | def test_sort_data_log_multipayload(self):
187 | data = (
188 | b"DLT\x011\xd9PYfI\x08\x00MGHS=\x00\x000MGHS\x00\x00\x03\x1e\x00\x00\x94\xc8A"
189 | b"\x01MON\x00CPUS\x00\x02\x00\x00\x10\x004 online cores\n\x00"
190 | )
191 | tmsp, length, apid, ctid = DLTMessage.extract_sort_data(data)
192 |
193 | assert tmsp == 3.8088
194 | assert length == len(data)
195 | assert apid == "MON"
196 | assert ctid == "CPUS"
197 |
198 | def test_largelog(self):
199 | data = (
200 | b"DLT\x012\xd9PY)\x00\x01\x00MGHS=o\x02\x04MGHS\x00\x00\x03\x1e\x00\x00\x9e\xb7"
201 | b"A\x01MON\x00THRD\x00\x02\x00\x00\xe4\x01Process avb_streamhandl with pid: 307 "
202 | b'"/usr/bin/avb_streamhandler_app_someip -s pluginias-media_transport-avb_config'
203 | b"uration_bmw_mgu.so --bg setup --target Harman_MGU_B1 -p MGU_ICAM -k local.alsa"
204 | b".baseperiod=256 -k ptp.loopcount=0 -k ptp.pdelaycount=0 -k ptp.synccount=0 -k "
205 | b"sched.priority=20 -k tspec.vlanprio.low=3 -k tspec.presentation.time.offset.lo"
206 | b"w=2200000 -k tspec.interval.low=1333000 -k debug.loglevel._RXE=4 -k alsa.group"
207 | b'name=mgu_avbsh -n socnet0 -b 2 " started 2401 msec ago\x00'
208 | )
209 |
210 | msg = DLTMessage.from_bytes(data)
211 | assert msg.apid == "MON"
212 | assert msg.ctid == "THRD"
213 | assert msg.ecuid == "MGHS"
214 | assert msg.tmsp == 4.0631
215 | assert (
216 | msg.payload_decoded == 'Process avb_streamhandl with pid: 307 "/usr/bin/avb_streamhandler_app_someip -s '
217 | "pluginias-media_transport-avb_configuration_bmw_mgu.so --bg setup --target Harman_MGU_B1 -p MGU_ICAM "
218 | "-k local.alsa.baseperiod=256 -k ptp.loopcount=0 -k ptp.pdelaycount=0 -k ptp.synccount=0 "
219 | "-k sched.priority=20 -k tspec.vlanprio.low=3 -k tspec.presentation.time.offset.low=2200000 "
220 | "-k tspec.interval.low=1333000 -k debug.loglevel._RXE=4 -k alsa.groupname=mgu_avbsh -n socnet0 "
221 | '-b 2 " started 2401 msec ago'
222 | )
223 |
224 | tmsp, length, apid, ctid = DLTMessage.extract_sort_data(data)
225 | assert msg.tmsp == tmsp
226 | assert len(msg.to_bytes()) == length
227 | assert msg.apid == apid
228 | assert msg.ctid == ctid
229 |
230 |
231 | class TestsPayload(object):
232 | def test_split(self):
233 | msg = create_messages(stream_with_params, from_file=True)[0]
234 | payload = msg.payload
235 | assert len(payload) == msg.noar
236 | assert payload[0] == b"CLevelMonitor::notification() => commandType"
237 | assert payload[1] == 3
238 | assert payload[2] == b"deviceId"
239 | assert payload[3] == 5
240 | assert payload[4] == b"value"
241 | assert payload[5] == 4074
242 | assert payload[6] == b"simulation status"
243 | assert payload[7] == 0
244 |
245 | with pytest.raises(IndexError):
246 | payload.__getitem__(8)
247 |
248 |
249 | class TestsControl(object):
250 | def test_load(self):
251 | msg = create_messages(control_one, from_file=True)[0]
252 | assert msg.apid == "DA1"
253 | assert msg.ctid == "DC1"
254 | assert msg.is_mode_verbose == 0
255 | assert (
256 | msg.payload_decoded == "[get_log_info 7] get_log_info, 07, 01 00 48 44 44 4d 01 00 43 41 50 49 ff"
257 | " ff 04 00 43 41 50 49 06 00 68 64 64 6d 67 72 72 65 6d 6f"
258 | )
259 |
--------------------------------------------------------------------------------
/dlt/dlt_broker.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved.
2 | """DLT Broker is running in a loop in a separate thread until stop_flag is set and adding received messages
3 | to all registered queues
4 | """
5 | from contextlib import contextmanager
6 | import ipaddress as ip
7 | import logging
8 | from multiprocessing import Event, Queue
9 | import queue as tqueue
10 |
11 | from dlt.dlt_broker_handlers import (
12 | DLT_DAEMON_TCP_PORT,
13 | DLTContextHandler,
14 | DLTFilterAckMessageHandler,
15 | DLTMessageDispatcherBase,
16 | DLTMessageHandler,
17 | DLTFileSpinner,
18 | DLTTimeValue,
19 | )
20 |
21 | DLT_CLIENT_TIMEOUT = 5
22 |
23 | logger = logging.getLogger(__name__) # pylint: disable=invalid-name
24 |
25 |
26 | @contextmanager
27 | def create_filter_ack_queue(filter_ack_msg_handler):
28 | """Register and unregister a queue into DLTFilterAckMessageHandler"""
29 | queue = tqueue.Queue()
30 |
31 | try:
32 | filter_ack_msg_handler.register(queue)
33 |
34 | yield queue
35 |
36 | finally:
37 | filter_ack_msg_handler.unregister(queue)
38 |
39 |
40 | class DLTBroker(object):
41 | """DLT Broker class manages receiving and filtering of DLT Messages"""
42 |
43 | def __init__(
44 | self,
45 | ip_address=None,
46 | port=DLT_DAEMON_TCP_PORT,
47 | use_proxy=False,
48 | enable_dlt_time=False,
49 | enable_filter_set_ack=False,
50 | filter_set_ack_timeout=2.0,
51 | ignore_filter_set_ack_timeout=False,
52 | **kwargs,
53 | ):
54 | """Initialize the DLT Broker
55 |
56 | :param str | None ip_address: IP address of the DLT Daemon.
57 | If None, then dlt does not come with any ip listening, in other words, it comes from dlt log directly;
58 | Else, dlt comes from listening to some ip address. Defaults to TCP connection,
59 | unless a multicast address is used. In that case an UDP multicast connection will be used
60 | :param str post: Port of the DLT Daemon
61 | :param bool use_proxy: Ignored - compatibility option
62 | :param bool enable_dlt_time: Record the latest dlt message timestamp if enabled.
63 | :param bool enable_filter_set_ack: Wait an ack message when sending a filter-setting message
64 | :param float filter_set_ack_timeout: Waiting time for the ack message
65 | :param bool ignore_filter_set_ack_timeout: Ignore the timeout when the value is True
66 | :param **kwargs: All other args passed to DLTMessageHandler
67 | """
68 | # - dlt-time share memory init
69 | self._dlt_time_value = DLTTimeValue() if enable_dlt_time else None
70 |
71 | # - handlers init
72 | self.mp_stop_flag = Event()
73 | self.filter_queue = Queue()
74 | self.message_queue = Queue()
75 |
76 | # - filter ack queue setting
77 | self.enable_filter_set_ack = enable_filter_set_ack
78 | self.ignore_filter_set_ack_timeout = ignore_filter_set_ack_timeout
79 | self.filter_set_ack_timeout = filter_set_ack_timeout
80 | if enable_filter_set_ack:
81 | # Optional[multiprocessing.Queue[Tuple[int, bool]]]
82 | # int presents queue id, bool presents enable or not
83 | self.filter_ack_queue = Queue()
84 | self.filter_ack_msg_handler = DLTFilterAckMessageHandler(self.filter_ack_queue)
85 | else:
86 | self.filter_ack_queue = None
87 | self.filter_ack_msg_handler = None
88 |
89 | self.msg_handler = self.create_dlt_message_dispather(ip_address, port, kwargs)
90 |
91 | self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue)
92 |
93 | self._ip_address = ip_address
94 | self._port = port
95 | self._filename = kwargs.get("filename")
96 |
97 | def create_dlt_message_dispather(self, ip_address, port, client_cfg) -> DLTMessageDispatcherBase:
98 | if ip_address:
99 | # If ip_address is given, then messages are retrieved from dlt client at run-time
100 | return self._create_dlt_message_handler(ip_address, port, client_cfg)
101 | else:
102 | # If not ip_address is given, then messages are retrieved from the given filename
103 | # The logs are written to the given filename from another process
104 | return self._create_dlt_file_spinner(client_cfg.get("filename"))
105 |
106 | def _create_dlt_message_handler(self, ip_address, port, client_cfg):
107 | client_cfg["ip_address"] = ip_address
108 | client_cfg["port"] = port
109 | client_cfg["timeout"] = client_cfg.get("timeout", DLT_CLIENT_TIMEOUT)
110 | return DLTMessageHandler(
111 | self.filter_queue,
112 | self.message_queue,
113 | self.mp_stop_flag,
114 | client_cfg,
115 | dlt_time_value=self._dlt_time_value,
116 | filter_ack_queue=self.filter_ack_queue,
117 | )
118 |
119 | def _create_dlt_file_spinner(self, file_name):
120 | return DLTFileSpinner(
121 | self.filter_queue,
122 | self.message_queue,
123 | self.mp_stop_flag,
124 | file_name,
125 | dlt_time_value=self._dlt_time_value,
126 | filter_ack_queue=self.filter_ack_queue,
127 | )
128 |
129 | def start(self):
130 | """DLTBroker main worker method"""
131 | if isinstance(self.msg_handler, DLTMessageHandler):
132 | logger.debug(
133 | "Starting DLTBroker with parameters: use_proxy=%s, ip_address=%s, port=%s, filename=%s, multicast=%s",
134 | False,
135 | self._ip_address,
136 | self._port,
137 | self._filename,
138 | ip.ip_address(self._ip_address).is_multicast,
139 | )
140 | else:
141 | logger.debug("Starting DLTBroker by reading %s", self._filename)
142 |
143 | if self._dlt_time_value:
144 | logger.debug("Enable dlt time for DLTBroker.")
145 |
146 | self.msg_handler.start()
147 | self.context_handler.start()
148 | if self.enable_filter_set_ack:
149 | self.filter_ack_msg_handler.start()
150 |
151 | # - ensure we don't block on join_thread() in stop()
152 | # https://docs.python.org/2.7/library/multiprocessing.html#multiprocessing.Queue.cancel_join_thread
153 | self.filter_queue.cancel_join_thread()
154 | self.message_queue.cancel_join_thread()
155 | if self.enable_filter_set_ack:
156 | self.filter_ack_queue.cancel_join_thread()
157 |
158 | def _recv_filter_set_ack(self, context_filter_ack_queue, required_response):
159 | try:
160 | resp = context_filter_ack_queue.get(timeout=self.filter_set_ack_timeout)
161 | if resp != required_response:
162 | logger.debug("Filter-setting ack response not matched: %s, expected: %s", resp, required_response)
163 | return False
164 |
165 | return True
166 | except tqueue.Empty as err:
167 | if self.ignore_filter_set_ack_timeout:
168 | logger.info(
169 | "Timeout for getting filter-setting ack: %s, %s", id(context_filter_ack_queue), required_response
170 | )
171 | return None
172 |
173 | raise err
174 |
175 | return False
176 |
177 | def add_context(self, context_queue, filters=None):
178 | """Register context
179 |
180 | :param Queue context_queue: The queue to which new messages will
181 | be added
182 | :param tuple filters: An list of tuples (eg: [(apid, ctid)])
183 | used to filter messages that go into this
184 | queue.
185 | """
186 | filters = filters or [(None, None)]
187 |
188 | if not isinstance(filters, (tuple, list)):
189 | raise RuntimeError("Context queue filters must be a tuple. Ex. (('SYS', 'JOUR'), ('AUDI', 'CAPI'))")
190 |
191 | if self.enable_filter_set_ack:
192 | logger.debug("Send a filter-setting message with requesting ack")
193 | with create_filter_ack_queue(self.filter_ack_msg_handler) as context_filter_ack_queue:
194 | self.context_handler.register(
195 | context_queue, filters, context_filter_ack_queue=context_filter_ack_queue
196 | )
197 |
198 | if not self._recv_filter_set_ack(context_filter_ack_queue, True):
199 | failure_reason = ""
200 | if isinstance(self.msg_handler, DLTMessageHandler):
201 | failure_reason = (
202 | "It's possible that DLTClient client does not start."
203 | " If it's a test case, it might be an error"
204 | )
205 | elif isinstance(self.msg_handler, DLTFileSpinner):
206 | failure_reason = (
207 | f"It's possible that dlt file {self._filename} is empty now. No big issue, "
208 | f"filters would be added once after new message is available in dlt file"
209 | )
210 | logger.warning(
211 | (
212 | "Could not receive filter-setting message ack. %s. For now, Run it anyway. "
213 | "filters: %s, queue_id: %s"
214 | ),
215 | failure_reason,
216 | filters,
217 | id(context_queue),
218 | )
219 | else:
220 | self.context_handler.register(context_queue, filters)
221 |
222 | def remove_context(self, context_queue):
223 | """Unregister context
224 |
225 | :param Queue context_queue: The queue to unregister.
226 | """
227 | self.context_handler.unregister(context_queue)
228 |
229 | def stop(self):
230 | """Stop the broker"""
231 | logger.info("Stopping DLTContextHandler and %s", type(self.msg_handler).__name__)
232 |
233 | self.msg_handler.break_blocking_main_loop()
234 |
235 | logger.debug("Stop %s", type(self.msg_handler).__name__)
236 | self.mp_stop_flag.set()
237 |
238 | logger.debug("Stop DLTContextHandler")
239 | self.context_handler.stop()
240 |
241 | logger.debug("Waiting on DLTContextHandler ending")
242 | self.context_handler.join()
243 |
244 | if self.enable_filter_set_ack:
245 | logger.debug("Stop DLTFilterAckMessageHandler")
246 | self.filter_ack_msg_handler.stop()
247 |
248 | logger.debug("Waiting on DLTFilterAckMessageHandler ending")
249 | self.filter_ack_msg_handler.join()
250 |
251 | logger.debug("Waiting on %s ending", type(self.msg_handler).__name__)
252 | if self.msg_handler.is_alive():
253 | try:
254 | self.msg_handler.terminate()
255 | except OSError:
256 | pass
257 | else:
258 | self.msg_handler.join()
259 |
260 | logger.debug("DLTBroker execution done")
261 |
262 | # pylint: disable=invalid-name
263 | def isAlive(self):
264 | """Backwards compatibility method
265 |
266 | Called from mtee.testing.connectors.tools.broker_assert. Will
267 | need to be replaced in MTEE eventually.
268 | """
269 | return any((self.msg_handler.is_alive(), self.context_handler.is_alive()))
270 |
271 | def dlt_time(self):
272 | """Get time for the last dlt message
273 |
274 | The value is seconds from 1970/1/1 0:00:00
275 |
276 | :rtype: float
277 | """
278 | if self._dlt_time_value:
279 | return self._dlt_time_value.timestamp
280 |
281 | raise RuntimeError("Getting dlt time function is not enabled")
282 |
283 | @property
284 | def injection_support(self) -> bool:
285 | return bool(self._ip_address)
286 |
--------------------------------------------------------------------------------
/tests/dlt_broker_time_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021. BMW Car IT GmbH. All rights reserved.
2 | """Test DLTBroker with enabling dlt_time"""
3 | from contextlib import contextmanager
4 | from multiprocessing import Queue
5 | import queue as tqueue
6 | import time
7 | from unittest.mock import ANY, patch, MagicMock
8 |
9 | import pytest
10 |
11 | from dlt.dlt_broker import create_filter_ack_queue, DLTBroker, logger
12 | from dlt.dlt_broker_handlers import DLTContextHandler, DLTFilterAckMessageHandler, DLTMessageHandler
13 | from tests.utils import MockDLTMessage
14 |
15 |
16 | def fake_py_dlt_client_main_loop(client, callback, *args, **kwargs):
17 | return True
18 |
19 |
20 | @contextmanager
21 | def dlt_broker(pydlt_main_func=fake_py_dlt_client_main_loop, enable_dlt_time=True, enable_filter_set_ack=False):
22 | """Initialize a fake DLTBroker"""
23 | with patch("dlt.dlt_broker_handlers.DLTMessageHandler._client_connect"), patch(
24 | "dlt.dlt_broker_handlers.py_dlt_client_main_loop", side_effect=pydlt_main_func
25 | ):
26 | broker = DLTBroker("42.42.42.42", enable_dlt_time=enable_dlt_time, enable_filter_set_ack=enable_filter_set_ack)
27 | broker.msg_handler._client = MagicMock()
28 |
29 | try:
30 | broker.start()
31 |
32 | yield broker
33 |
34 | finally:
35 | broker.stop()
36 |
37 |
38 | @contextmanager
39 | def dlt_filter_ack_msg_handler():
40 | queue = Queue()
41 |
42 | handler = DLTFilterAckMessageHandler(queue)
43 | try:
44 | handler.start()
45 | queue.cancel_join_thread()
46 |
47 | yield (handler, queue)
48 | finally:
49 | handler.stop()
50 | queue.close()
51 |
52 |
53 | def fake_dlt_msg_handler(msg, with_filter_ack_queue):
54 | """Create a fake DLTMessageHandler"""
55 | filter_queue = MagicMock()
56 | filter_ack_queue = MagicMock() if with_filter_ack_queue else None
57 | client_cfg = {"ip_address": b"127.0.0.1", "filename": b"/dev/null", "verbose": 0, "port": "1234"}
58 |
59 | filter_queue.empty.side_effect = [False, True]
60 | filter_queue.get_nowait.return_value = msg
61 |
62 | return DLTMessageHandler(
63 | filter_queue, MagicMock(), MagicMock(), client_cfg, dlt_time_value=None, filter_ack_queue=filter_ack_queue
64 | )
65 |
66 |
67 | def test_start_stop_dlt_broker():
68 | """Test to stop DLTBroker with dlt-time normally"""
69 | with dlt_broker(fake_py_dlt_client_main_loop, enable_dlt_time=True) as broker:
70 | assert broker._dlt_time_value
71 |
72 |
73 | def test_start_stop_dlt_broker_without_dlt_time():
74 | """Test to stop DLTBroker without dlt-time normally"""
75 | with dlt_broker(fake_py_dlt_client_main_loop, enable_dlt_time=False) as broker:
76 | assert not broker._dlt_time_value
77 |
78 |
79 | @pytest.mark.parametrize(
80 | "input_sec,input_msec,expected_value",
81 | [
82 | (42, 42, 42.42), # normal test case
83 | (1618993559, 7377682, 1618993559.7377682), # big value. The value will be truncated when type is not double
84 | ],
85 | )
86 | def test_dlt_broker_get_dlt_time(input_sec, input_msec, expected_value):
87 | """Test to get time from DLTBroker"""
88 |
89 | def handle(client, callback=None, *args, **kwargs):
90 | return callback(MockDLTMessage(payload="test_payload", sec=input_sec, msec=input_msec))
91 |
92 | with dlt_broker(handle) as broker:
93 | time.sleep(0.01)
94 |
95 | assert broker.dlt_time() == expected_value
96 |
97 |
98 | def test_dlt_broker_get_latest_dlt_time():
99 | """Test to get the latest time from DLTBroker"""
100 | # ref: https://stackoverflow.com/questions/3190706/nonlocal-keyword-in-python-2-x
101 | time_value = {"v": 42}
102 |
103 | def handle(client, callback=None, *args, **kwargs):
104 | if time_value["v"] < 45:
105 | time_value["v"] += 1
106 |
107 | time.sleep(0.01)
108 | return callback(MockDLTMessage(payload="test_payload", sec=time_value["v"], msec=42))
109 |
110 | with dlt_broker(handle) as broker:
111 | time_vals = set()
112 | for i in range(10):
113 | time_vals.add(broker.dlt_time())
114 | time.sleep(0.01)
115 |
116 | assert sorted(time_vals) == [0.0, 43.42, 44.42, 45.42]
117 |
118 |
119 | def test_start_stop_dlt_broker_with_dlt_ack_msg_handler():
120 | """Test to stop DLTBroker with ack msg handler normally"""
121 | with dlt_broker(fake_py_dlt_client_main_loop, enable_dlt_time=True, enable_filter_set_ack=True) as broker:
122 | assert broker.filter_ack_msg_handler
123 |
124 |
125 | def test_start_stop_dlt_broker_without_dlt_ack_msg_handler():
126 | """Test to stop DLTBroker without ack msg handler normally"""
127 | with dlt_broker(fake_py_dlt_client_main_loop, enable_dlt_time=True, enable_filter_set_ack=False) as broker:
128 | assert not broker.filter_ack_msg_handler
129 |
130 |
131 | def test_create_filter_ack_queue():
132 | """Test to register and unregister an ack queue"""
133 | handler_mock = MagicMock()
134 |
135 | with create_filter_ack_queue(handler_mock) as queue:
136 | queue.put(True)
137 | assert queue.get()
138 |
139 | handler_mock.register.assert_called_with(queue)
140 | handler_mock.unregister.assert_called_with(queue)
141 |
142 |
143 | @pytest.mark.parametrize(
144 | "ack,required_ack,return_val",
145 | [
146 | (True, True, True),
147 | (False, False, True),
148 | (True, False, False),
149 | (False, True, False),
150 | ],
151 | )
152 | def test_recv_filter_set_ack(ack, required_ack, return_val):
153 | """Test to receive an ack value"""
154 | queue = tqueue.Queue()
155 |
156 | queue.put(ack)
157 | with dlt_broker(enable_filter_set_ack=True) as broker:
158 | assert return_val == broker._recv_filter_set_ack(queue, required_ack)
159 |
160 |
161 | def test_recv_filter_set_ack_timeout_ignore():
162 | """Test not to receive an ack value"""
163 | queue = tqueue.Queue()
164 |
165 | with dlt_broker(enable_filter_set_ack=True) as broker:
166 | broker.filter_set_ack_timeout = 0.01
167 | broker.ignore_filter_set_ack_timeout = True
168 |
169 | assert not broker._recv_filter_set_ack(queue, True)
170 |
171 |
172 | def test_recv_filter_set_ack_timeout_exception():
173 | """Test not to receive an ack value and with an exception"""
174 | queue = tqueue.Queue()
175 |
176 | with dlt_broker(enable_filter_set_ack=True) as broker:
177 | broker.filter_set_ack_timeout = 0.01
178 | broker.ignore_filter_set_ack_timeout = False
179 |
180 | with pytest.raises(tqueue.Empty) as err:
181 | broker._recv_filter_set_ack(queue, True)
182 |
183 | assert not str(err.value)
184 |
185 |
186 | def test_add_context_with_ack():
187 | """Test to send a filter-setting message with required ack"""
188 | queue = tqueue.Queue()
189 |
190 | with patch("dlt.dlt_broker.DLTBroker._recv_filter_set_ack", return_value=True) as ack_mock:
191 | with dlt_broker(enable_filter_set_ack=True) as broker:
192 | ori_context_handler = broker.context_handler
193 | broker.context_handler = MagicMock()
194 | try:
195 | broker.add_context(queue, [("APID", "CTID")])
196 |
197 | broker.context_handler.register.assert_called()
198 | ack_mock.assert_called()
199 | finally:
200 | broker.context_handler = ori_context_handler
201 |
202 |
203 | def test_add_context_with_ack_warning():
204 | """Test to send a filter-setting message but not received an ack"""
205 | queue = tqueue.Queue()
206 |
207 | with patch("dlt.dlt_broker.DLTBroker._recv_filter_set_ack", return_value=False) as ack_mock, patch.object(
208 | logger, "warning"
209 | ) as logger_mock:
210 | with dlt_broker(enable_filter_set_ack=True) as broker:
211 | ori_context_handler = broker.context_handler
212 | broker.context_handler = MagicMock()
213 | try:
214 | broker.add_context(queue, [("APID", "CTID")])
215 |
216 | broker.context_handler.register.assert_called()
217 | ack_mock.assert_called()
218 |
219 | logger_mock.assert_called_with(ANY, ANY, [("APID", "CTID")], id(queue))
220 | finally:
221 | broker.context_handler = ori_context_handler
222 |
223 |
224 | def test_start_stop_dlt_filter_ack_msg_handler():
225 | """Test to start/stop DLTFilterAckMessageHandler normally"""
226 | with dlt_filter_ack_msg_handler() as (handler, _):
227 | pass
228 |
229 | assert not handler.is_alive()
230 |
231 |
232 | def test_dlt_filter_ack_msg_handler_register():
233 | """Test to register a new ack queue into DLTFilterAckMessageHandler"""
234 | queue_ack = tqueue.Queue()
235 |
236 | with dlt_filter_ack_msg_handler() as (handler, queue):
237 | handler.register(queue_ack)
238 |
239 | queue.put((id(queue_ack), True))
240 | assert queue_ack.get()
241 |
242 |
243 | def test_dlt_filter_ack_msg_handler_unregister():
244 | """Test to unregister a new ack queue into DLTFilterAckMessageHandler"""
245 | queue_ack = tqueue.Queue()
246 |
247 | with dlt_filter_ack_msg_handler() as (handler, queue):
248 | handler.register(queue_ack)
249 |
250 | handler.unregister(queue_ack)
251 | with pytest.raises(tqueue.Empty):
252 | queue.put((id(queue_ack), False))
253 | queue_ack.get_nowait()
254 |
255 |
256 | def test_make_send_filter_msg():
257 | """Test to generate a filter message"""
258 | handler = DLTContextHandler(MagicMock(), MagicMock())
259 |
260 | is_register = True
261 | filters = [("APID", "CTID")]
262 | queue = MagicMock()
263 |
264 | assert handler._make_send_filter_msg(queue, filters, is_register) == (id(queue), filters, is_register)
265 |
266 |
267 | def test_make_send_filter_msg_with_ack_queue():
268 | """Test to generate a filter message with ack queue setting"""
269 | handler = DLTContextHandler(MagicMock(), MagicMock())
270 |
271 | is_register = True
272 | filters = [("APID", "CTID")]
273 | queue = MagicMock()
274 | queue_ack = MagicMock()
275 |
276 | assert handler._make_send_filter_msg(queue, filters, is_register, context_filter_ack_queue=queue_ack) == (
277 | id(queue),
278 | id(queue_ack),
279 | filters,
280 | is_register,
281 | )
282 |
283 |
284 | def test_dlt_message_handler_process_filter_queue_add():
285 | """Test to add a filter"""
286 | handler = fake_dlt_msg_handler(msg=(42, [("APID", "CTID")], True), with_filter_ack_queue=True)
287 | handler._process_filter_queue()
288 |
289 | assert handler.context_map[("APID", "CTID")] == [42]
290 | handler.filter_ack_queue.put.assert_not_called()
291 |
292 |
293 | def test_dlt_message_handler_process_filter_queue_add_ack():
294 | """Test to add a filter with ack"""
295 | handler = fake_dlt_msg_handler(msg=(42, 43, [("APID", "CTID")], True), with_filter_ack_queue=True)
296 | handler._process_filter_queue()
297 |
298 | assert handler.context_map[("APID", "CTID")] == [42]
299 | handler.filter_ack_queue.put.assert_called_with((43, True))
300 |
301 |
302 | def test_dlt_message_handler_process_filter_queue_remove():
303 | """Test to remove a filter"""
304 | handler = fake_dlt_msg_handler(msg=(42, [("APID", "CTID")], False), with_filter_ack_queue=True)
305 | handler.context_map[("APID", "CTID")].append(42)
306 |
307 | handler._process_filter_queue()
308 |
309 | assert ("APID", "CTID") not in handler.context_map
310 | handler.filter_ack_queue.put.assert_not_called()
311 |
312 |
313 | def test_dlt_message_handler_process_filter_queue_remove_ack():
314 | """Test to remove a filter with ack"""
315 | handler = fake_dlt_msg_handler(msg=(42, 43, [("APID", "CTID")], False), with_filter_ack_queue=True)
316 | handler.context_map[("APID", "CTID")].append(42)
317 |
318 | handler._process_filter_queue()
319 |
320 | assert ("APID", "CTID") not in handler.context_map
321 | handler.filter_ack_queue.put.assert_called_with((43, False))
322 |
323 |
324 | def test_dlt_message_handler_process_filter_queue_remove_exception():
325 | """Test to remove a filter when the filter does not exists"""
326 | handler = fake_dlt_msg_handler(msg=(42, [("APID", "CTID")], False), with_filter_ack_queue=True)
327 |
328 | handler._process_filter_queue()
329 |
330 | assert not handler.context_map[("APID", "CTID")]
331 | handler.filter_ack_queue.put.assert_not_called()
332 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | [MASTER]
2 | # Ignore requiring existence of __init__.py to be able to have generic pylint .
3 | disable=F0010
4 | # Specify a configuration file.
5 | #rcfile=
6 |
7 | # Python code to execute, usually for sys.path manipulation such as
8 | # pygtk.require().
9 | #init-hook=
10 |
11 | # Add files or directories to the blacklist. They should be base names, not
12 | # paths.
13 | ignore=third_party,zuul.d,docs
14 |
15 | # Add files or directories matching the regex patterns to the blacklist. The
16 | # regex matches against base names, not paths.
17 | ignore-patterns=object_detection_grpc_client.py,prediction_pb2.py,prediction_pb2_grpc.py
18 |
19 | # Pickle collected data for later comparisons.
20 | persistent=no
21 |
22 | # List of plugins (as comma separated values of python modules names) to load,
23 | # usually to register additional checkers.
24 | load-plugins=
25 |
26 | # Use multiple processes to speed up Pylint.
27 | jobs=4
28 |
29 | # Allow loading of arbitrary C extensions. Extensions are imported into the
30 | # active Python interpreter and may run arbitrary code.
31 | unsafe-load-any-extension=no
32 |
33 | # A comma-separated list of package or module names from where C extensions may
34 | # be loaded. Extensions are loading into the active Python interpreter and may
35 | # run arbitrary code
36 | extension-pkg-whitelist=
37 |
38 |
39 | [MESSAGES CONTROL]
40 |
41 | # Only show warnings with the listed confidence levels. Leave empty to show
42 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
43 | confidence=
44 |
45 | # Enable the message, report, category or checker with the given id(s). You can
46 | # either give multiple identifier separated by comma (,) or put this option
47 | # multiple time (only on the command line, not in the configuration file where
48 | # it should appear only once). See also the "--disable" option for examples.
49 | #enable=
50 |
51 | # Disable the message, report, category or checker with the given id(s). You
52 | # can either give multiple identifiers separated by comma (,) or put this
53 | # option multiple times (only on the command line, not in the configuration
54 | # file where it should appear only once).You can also use "--disable=all" to
55 | # disable everything first and then reenable specific checks. For example, if
56 | # you want to run only the similarities checker, you can use "--disable=all
57 | # --enable=similarities". If you want to run only the classes checker, but have
58 | # no Warning level messages displayed, use"--disable=all --enable=classes
59 | # --disable=W"
60 | #
61 | # Kubeflow disables string-interpolation because we are starting to use f
62 | # style strings
63 |
64 | [REPORTS]
65 |
66 | # Set the output format. Available formats are text, parseable, colorized, msvs
67 | # (visual studio) and html. You can also give a reporter class, eg
68 | # mypackage.mymodule.MyReporterClass.
69 | output-format=json
70 |
71 | # Tells whether to display a full report or only the messages
72 | reports=no
73 |
74 | # Python expression which should return a note less than 10 (10 is the highest
75 | # note). You have access to the variables errors warning, statement which
76 | # respectively contain the number of errors / warnings messages and the total
77 | # number of statements analyzed. This is used by the global evaluation report
78 | # (RP0004).
79 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
80 |
81 | # Template used to display messages. This is a python new-style format string
82 | # used to format the message information. See doc for all details
83 | #msg-template=
84 |
85 |
86 | [BASIC]
87 |
88 | # Good variable names which should always be accepted, separated by a comma
89 | good-names=i,j,k,ex,Run,_
90 |
91 | # Bad variable names which should always be refused, separated by a comma
92 | bad-names=foo,bar,baz,toto,tutu,tata
93 |
94 | # Colon-delimited sets of names that determine each other's naming style when
95 | # the name regexes allow several styles.
96 | name-group=
97 |
98 | # Include a hint for the correct naming format with invalid-name
99 | include-naming-hint=no
100 |
101 | # List of decorators that produce properties, such as abc.abstractproperty. Add
102 | # to this list to register other decorators that produce valid properties.
103 | property-classes=abc.abstractproperty
104 |
105 | # Regular expression matching correct function names
106 | function-rgx=[a-z_][a-z0-9_]{2,30}$
107 |
108 | # Regular expression matching correct variable names
109 | variable-rgx=[a-z_][a-z0-9_]{2,30}$
110 |
111 | # Regular expression matching correct constant names
112 | const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
113 |
114 | # Regular expression matching correct attribute names
115 | attr-rgx=[a-z_][a-z0-9_]{2,30}$
116 |
117 | # Regular expression matching correct argument names
118 | argument-rgx=[a-z_][a-z0-9_]{2,30}$
119 |
120 | # Regular expression matching correct class attribute names
121 | class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
122 |
123 | # Regular expression matching correct inline iteration names
124 | inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
125 |
126 | # Regular expression matching correct class names
127 | class-rgx=[A-Z_][a-zA-Z0-9]+$
128 |
129 | # Regular expression matching correct module names
130 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
131 |
132 | # Regular expression matching correct method names
133 | method-rgx=[a-z_][a-z0-9_]{2,30}$
134 |
135 | # Regular expression which should only match function or class names that do
136 | # not require a docstring.
137 | no-docstring-rgx=^_
138 |
139 | # Minimum line length for functions/classes that require docstrings, shorter
140 | # ones are exempt.
141 | docstring-min-length=-1
142 |
143 |
144 | [ELIF]
145 |
146 | # Maximum number of nested blocks for function / method body
147 | max-nested-blocks=5
148 |
149 |
150 | [TYPECHECK]
151 |
152 | # Tells whether missing members accessed in mixin class should be ignored. A
153 | # mixin class is detected if its name ends with "mixin" (case insensitive).
154 | ignore-mixin-members=yes
155 |
156 | # List of module names for which member attributes should not be checked
157 | # (useful for modules/projects where namespaces are manipulated during runtime
158 | # and thus existing member attributes cannot be deduced by static analysis. It
159 | # supports qualified module names, as well as Unix pattern matching.
160 | ignored-modules=
161 |
162 | # List of class names for which member attributes should not be checked (useful
163 | # for classes with dynamically set attributes). This supports the use of
164 | # qualified names.
165 | ignored-classes=optparse.Values,thread._local,_thread._local
166 |
167 | # List of members which are set dynamically and missed by pylint inference
168 | # system, and so shouldn't trigger E1101 when accessed. Python regular
169 | # expressions are accepted.
170 | generated-members=
171 |
172 | # List of decorators that produce context managers, such as
173 | # contextlib.contextmanager. Add to this list to register other decorators that
174 | # produce valid context managers.
175 | contextmanager-decorators=contextlib.contextmanager
176 |
177 |
178 | [FORMAT]
179 |
180 | # Maximum number of characters on a single line.
181 | max-line-length=120
182 |
183 | # Regexp for a line that is allowed to be longer than the limit.
184 | ignore-long-lines=^\s*(# )??$
185 |
186 | # Allow the body of an if to be on the same line as the test if there is no
187 | # else.
188 | single-line-if-stmt=no
189 |
190 | # Maximum number of lines in a module
191 | max-module-lines=1000
192 |
193 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
194 | # tab).
195 | # Use 2 spaces consistent with TensorFlow style.
196 | indent-string=' '
197 |
198 | # Number of spaces of indent required inside a hanging or continued line.
199 | indent-after-paren=4
200 |
201 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
202 | expected-line-ending-format=
203 |
204 |
205 | [MISCELLANEOUS]
206 |
207 | # List of note tags to take in consideration, separated by a comma.
208 | notes=FIXME,XXX,TODO
209 |
210 |
211 | [VARIABLES]
212 |
213 | # Tells whether we should check for unused import in __init__ files.
214 | init-import=no
215 |
216 | # A regular expression matching the name of dummy variables (i.e. expectedly
217 | # not used).
218 | dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
219 |
220 | # List of additional names supposed to be defined in builtins. Remember that
221 | # you should avoid to define new builtins when possible.
222 | additional-builtins=
223 |
224 | # List of strings which can identify a callback function by name. A callback
225 | # name must start or end with one of those strings.
226 | callbacks=cb_,_cb
227 |
228 | # List of qualified module names which can have objects that can redefine
229 | # builtins.
230 | redefining-builtins-modules=six.moves,future.builtins
231 |
232 |
233 | [LOGGING]
234 |
235 | # Logging modules to check that the string format arguments are in logging
236 | # function parameter format
237 | logging-modules=logging
238 |
239 |
240 | [SIMILARITIES]
241 |
242 | # Minimum lines number of a similarity.
243 | min-similarity-lines=4
244 |
245 | # Ignore comments when computing similarities.
246 | ignore-comments=yes
247 |
248 | # Ignore docstrings when computing similarities.
249 | ignore-docstrings=yes
250 |
251 | # Ignore imports when computing similarities.
252 | ignore-imports=no
253 |
254 |
255 | [SPELLING]
256 |
257 | # Spelling dictionary name. Available dictionaries: none. To make it working
258 | # install python-enchant package.
259 | spelling-dict=
260 |
261 | # List of comma separated words that should not be checked.
262 | spelling-ignore-words=
263 |
264 | # A path to a file that contains private dictionary; one word per line.
265 | spelling-private-dict-file=
266 |
267 | # Tells whether to store unknown words to indicated private dictionary in
268 | # --spelling-private-dict-file option instead of raising a message.
269 | spelling-store-unknown-words=no
270 |
271 |
272 | [IMPORTS]
273 |
274 | # Deprecated modules which should not be used, separated by a comma
275 | deprecated-modules=regsub,TERMIOS,Bastion,rexec
276 |
277 | # Create a graph of every (i.e. internal and external) dependencies in the
278 | # given file (report RP0402 must not be disabled)
279 | import-graph=
280 |
281 | # Create a graph of external dependencies in the given file (report RP0402 must
282 | # not be disabled)
283 | ext-import-graph=
284 |
285 | # Create a graph of internal dependencies in the given file (report RP0402 must
286 | # not be disabled)
287 | int-import-graph=
288 |
289 | # Force import order to recognize a module as part of the standard
290 | # compatibility libraries.
291 | known-standard-library=
292 |
293 | # Force import order to recognize a module as part of a third party library.
294 | known-third-party=enchant
295 |
296 | # Analyse import fallback blocks. This can be used to support both Python 2 and
297 | # 3 compatible code, which means that the block might have code that exists
298 | # only in one or another interpreter, leading to false positives when analysed.
299 | analyse-fallback-blocks=no
300 |
301 |
302 | [DESIGN]
303 |
304 | # Maximum number of arguments for function / method
305 | max-args=7
306 |
307 | # Argument names that match this expression will be ignored. Default to name
308 | # with leading underscore
309 | ignored-argument-names=_.*
310 |
311 | # Maximum number of locals for function / method body
312 | max-locals=15
313 |
314 | # Maximum number of return / yield for function / method body
315 | max-returns=6
316 |
317 | # Maximum number of branch for function / method body
318 | max-branches=12
319 |
320 | # Maximum number of statements in function / method body
321 | max-statements=50
322 |
323 | # Maximum number of parents for a class (see R0901).
324 | max-parents=7
325 |
326 | # Maximum number of attributes for a class (see R0902).
327 | max-attributes=7
328 |
329 | # Minimum number of public methods for a class (see R0903).
330 | min-public-methods=0
331 |
332 | # Maximum number of public methods for a class (see R0904).
333 | max-public-methods=20
334 |
335 | # Maximum number of boolean expressions in a if statement
336 | max-bool-expr=5
337 |
338 |
339 | [CLASSES]
340 |
341 | # List of method names used to declare (i.e. assign) instance attributes.
342 | defining-attr-methods=__init__,__new__,setUp
343 |
344 | # List of valid names for the first argument in a class method.
345 | valid-classmethod-first-arg=cls
346 |
347 | # List of valid names for the first argument in a metaclass class method.
348 | valid-metaclass-classmethod-first-arg=mcs
349 |
350 | # List of member names, which should be excluded from the protected access
351 | # warning.
352 | exclude-protected=_asdict,_fields,_replace,_source,_make
353 |
354 |
355 | [EXCEPTIONS]
356 |
357 | # Exceptions that will emit a warning when being caught. Defaults to
358 | # "Exception"
359 |
--------------------------------------------------------------------------------
/tests/dlt_file_spinner_unit_test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2023. BMW Car IT GmbH. All rights reserved.
2 | import logging
3 | from multiprocessing import Event, Queue
4 | import os
5 | import time
6 | import tempfile
7 | import unittest
8 | from queue import Empty
9 |
10 | from dlt.dlt_broker_handlers import DLTFileSpinner
11 | from tests.utils import (
12 | create_messages,
13 | stream_multiple,
14 | stream_with_params,
15 | append_stream_to_file,
16 | append_message_to_file,
17 | )
18 |
19 | logger = logging.getLogger(__name__)
20 |
21 |
22 | class TestDLTFileSpinner(unittest.TestCase):
23 | def setUp(self):
24 | self.filter_queue = Queue()
25 | self.message_queue = Queue()
26 | self.stop_event = Event()
27 | # Dlt file is created with empty content
28 | _, self.dlt_file_name = tempfile.mkstemp(suffix=b".dlt")
29 | self.dlt_file_spinner = DLTFileSpinner(
30 | self.filter_queue, self.message_queue, self.stop_event, self.dlt_file_name
31 | )
32 | # dispatched_messages from DLTFileSpinner.message_queue
33 | self.dispatched_messages = []
34 |
35 | def tearDown(self):
36 | if self.dlt_file_spinner.is_alive():
37 | self.dlt_file_spinner.break_blocking_main_loop()
38 | self.stop_event.set()
39 | self.dlt_file_spinner.join()
40 | if os.path.exists(self.dlt_file_name):
41 | os.remove(self.dlt_file_name)
42 |
43 | def test_init(self):
44 | self.assertFalse(self.dlt_file_spinner.mp_stop_flag.is_set())
45 | self.assertFalse(self.dlt_file_spinner.is_alive())
46 | self.assertTrue(self.dlt_file_spinner.filter_queue.empty())
47 | self.assertTrue(self.dlt_file_spinner.message_queue.empty())
48 |
49 | def test_run_basic_without_dlt_file(self):
50 | # Delete the created dlt file
51 | os.remove(self.dlt_file_name)
52 |
53 | self.assertFalse(self.dlt_file_spinner.is_alive())
54 | self.dlt_file_spinner.start()
55 | self.assertTrue(self.dlt_file_spinner.is_alive())
56 | self.assertNotEqual(self.dlt_file_spinner.pid, os.getpid())
57 | # DLT file does NOT exist
58 | self.assertFalse(os.path.exists(self.dlt_file_spinner.file_name))
59 |
60 | self.dlt_file_spinner.break_blocking_main_loop()
61 | self.stop_event.set()
62 | self.dlt_file_spinner.join()
63 | self.assertFalse(self.dlt_file_spinner.is_alive())
64 |
65 | def test_run_basic_with_empty_dlt_file(self):
66 | self.assertFalse(self.dlt_file_spinner.is_alive())
67 | self.dlt_file_spinner.start()
68 | self.assertTrue(self.dlt_file_spinner.is_alive())
69 | self.assertNotEqual(self.dlt_file_spinner.pid, os.getpid())
70 | # dlt_reader is instantiated and keeps alive
71 | self.assertTrue(os.path.exists(self.dlt_file_spinner.file_name))
72 | # Expect no dlt log is dispatched
73 | time.sleep(2)
74 | self.assertTrue(self.dlt_file_spinner.message_queue.empty())
75 | # First stop dlt reader, then stop DLTFileSpinner
76 | self.dlt_file_spinner.break_blocking_main_loop()
77 | self.stop_event.set()
78 | self.dlt_file_spinner.join()
79 | self.assertFalse(self.dlt_file_spinner.is_alive())
80 |
81 | def test_handle_add_new_filter(self):
82 | self.dlt_file_spinner.filter_queue.put(("queue_id", [("SYS", "JOUR")], True))
83 | time.sleep(0.01)
84 | self.dlt_file_spinner.handle(None)
85 | self.assertIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
86 | self.assertEqual(self.dlt_file_spinner.context_map[("SYS", "JOUR")], ["queue_id"])
87 |
88 | def test_handle_remove_filter_single_entry(self):
89 | self.dlt_file_spinner.filter_queue.put(("queue_id", [("SYS", "JOUR")], True))
90 | time.sleep(0.01)
91 | self.dlt_file_spinner.handle(None)
92 | self.assertIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
93 | self.assertEqual(self.dlt_file_spinner.context_map[("SYS", "JOUR")], ["queue_id"])
94 |
95 | self.dlt_file_spinner.filter_queue.put(("queue_id", [("SYS", "JOUR")], False))
96 | time.sleep(0.01)
97 | self.dlt_file_spinner.handle(None)
98 | self.assertNotIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
99 |
100 | def test_handle_remove_filter_multiple_entries(self):
101 | self.dlt_file_spinner.filter_queue.put(("queue_id1", [("SYS", "JOUR")], True))
102 | self.dlt_file_spinner.filter_queue.put(("queue_id2", [("SYS", "JOUR")], True))
103 | time.sleep(0.01)
104 | self.dlt_file_spinner.handle(None)
105 | self.assertIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
106 | self.assertEqual(self.dlt_file_spinner.context_map[("SYS", "JOUR")], ["queue_id1", "queue_id2"])
107 |
108 | self.dlt_file_spinner.filter_queue.put(("queue_id1", [("SYS", "JOUR")], False))
109 | time.sleep(0.01)
110 | self.dlt_file_spinner.handle(None)
111 | self.assertIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
112 | self.assertEqual(self.dlt_file_spinner.context_map[("SYS", "JOUR")], ["queue_id2"])
113 |
114 | def test_handle_multiple_similar_filters(self):
115 | self.dlt_file_spinner.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
116 | self.dlt_file_spinner.filter_queue.put(("queue_id1", [("SYS", "JOUR")], True))
117 | time.sleep(0.01)
118 | self.dlt_file_spinner.handle(None)
119 | self.assertIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
120 | self.assertEqual(self.dlt_file_spinner.context_map[("SYS", "JOUR")], ["queue_id0", "queue_id1"])
121 |
122 | def test_handle_multiple_different_filters(self):
123 | self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
124 | self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True))
125 | time.sleep(0.01)
126 | self.dlt_file_spinner.handle(None)
127 | self.assertIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
128 | self.assertIn(("DA1", "DC1"), self.dlt_file_spinner.context_map)
129 | self.assertEqual(self.dlt_file_spinner.context_map[("SYS", "JOUR")], ["queue_id0"])
130 | self.assertEqual(self.dlt_file_spinner.context_map[("DA1", "DC1")], ["queue_id1"])
131 |
132 | def test_handle_message_tag_and_distribute(self):
133 | self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
134 | self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True))
135 | self.filter_queue.put(("queue_id2", [("SYS", None)], True))
136 | self.filter_queue.put(("queue_id3", [(None, "DC1")], True))
137 | self.filter_queue.put(("queue_id4", [(None, None)], True))
138 | time.sleep(0.01)
139 |
140 | # - simulate receiving of messages
141 | for _ in range(10):
142 | for message in create_messages(stream_multiple, from_file=True):
143 | self.dlt_file_spinner.handle(message)
144 |
145 | self.assertIn(("SYS", "JOUR"), self.dlt_file_spinner.context_map)
146 | self.assertIn(("DA1", "DC1"), self.dlt_file_spinner.context_map)
147 | self.assertIn((None, None), self.dlt_file_spinner.context_map)
148 | self.assertIn(("SYS", None), self.dlt_file_spinner.context_map)
149 | self.assertIn((None, "DC1"), self.dlt_file_spinner.context_map)
150 | try:
151 | # 60 == 10 messages of each for SYS, JOUR and None combinations +
152 | # 10 for (None,None)
153 | messages = [self.message_queue.get(timeout=0.01) for _ in range(60)]
154 |
155 | # these queues should not get any messages from other queues
156 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id0"]), 10)
157 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id1"]), 10)
158 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id2"]), 10)
159 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id3"]), 10)
160 | # this queue should get all messages
161 | self.assertEqual(len([msg for qid, msg in messages if qid == "queue_id4"]), 20)
162 | except Empty:
163 | # - we should not get an Empty for at least 40 messages
164 | self.fail()
165 |
166 | def _update_dispatch_messages_from_dlt_file_spinner(self):
167 | for index in range(60):
168 | try:
169 | message = self.dlt_file_spinner.message_queue.get(timeout=0.01)
170 | if not self.dispatched_messages or message[1] != self.dispatched_messages[-1][1]:
171 | self.dispatched_messages.append(message)
172 | except: # noqa: E722
173 | pass
174 |
175 | def test_run_with_writing_to_file(self):
176 | """
177 | Test with real dlt file, which is written at runtime
178 |
179 | 1. set filter_queue properly, so that the handled messages could be added to message_queue later
180 | 2. start DLTFileSpinner
181 | At this moment, no messages are written to dlt file, so no messages in DLTFileSpinner.message_queue
182 | 3. write 2 sample messages to dlt file
183 | Expectation: we could dispatch 2 messages from DLTFileSpinner.message_queue
184 | 5. stop DLTFileSpinner
185 | """
186 | # 1. set filter_queue properly, so that the handled messages could be added to message_queue later
187 | self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
188 | self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True))
189 | self.filter_queue.put(("queue_id2", [("SYS", None)], True))
190 | self.filter_queue.put(("queue_id3", [(None, "DC1")], True))
191 | self.filter_queue.put(("queue_id4", [(None, None)], True))
192 | time.sleep(0.01)
193 | # 2. start DLTFileSpinner
194 | self.assertFalse(self.dlt_file_spinner.is_alive())
195 | self.dlt_file_spinner.start()
196 | self.assertTrue(self.dlt_file_spinner.is_alive())
197 | self.assertNotEqual(self.dlt_file_spinner.pid, os.getpid())
198 | # dlt_reader is instantiated and keeps alive
199 | self.assertTrue(os.path.exists(self.dlt_file_spinner.file_name))
200 | # With empty file content, no messages are dispatched to message_queue
201 | time.sleep(2)
202 | self.assertTrue(self.dlt_file_spinner.message_queue.empty())
203 | # 3. write 2 sample messages to dlt file
204 | append_stream_to_file(stream_multiple, self.dlt_file_name)
205 | # Expect the written dlt logs are dispatched to message_queue
206 | self._update_dispatch_messages_from_dlt_file_spinner()
207 | self.assertEqual(2, len(self.dispatched_messages))
208 | # 4. stop DLTFileSpinner
209 | self.dlt_file_spinner.break_blocking_main_loop()
210 | self.stop_event.set()
211 | self.dlt_file_spinner.join()
212 | self.assertFalse(self.dlt_file_spinner.is_alive())
213 |
214 | def test_run_with_writing_to_file_twice(self):
215 | """
216 | Test with real dlt file, which is written at runtime 2 times
217 |
218 | 1. set filter_queue properly, so that the handled messages could be added to message_queue later
219 | 2. start DLTFileSpinner
220 | 3. write 2 sample messages to dlt file
221 | Expectation: we could dispatch 2 messages from DLTFileSpinner.message_queue
222 | 4. append 1 sample message to dlt file
223 | Expectation: we could dispatch 3 messages from DLTFileSpinner.message_queue
224 | 5. stop DLTFileSpinner
225 | """
226 | # 1. set filter_queue properly, so that the handled messages could be added to message_queue later
227 | self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True))
228 | self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True))
229 | self.filter_queue.put(("queue_id2", [("SYS", None)], True))
230 | self.filter_queue.put(("queue_id3", [(None, "DC1")], True))
231 | self.filter_queue.put(("queue_id4", [(None, None)], True))
232 | time.sleep(0.01)
233 | # 2. start DLTFileSpinner
234 | self.assertFalse(self.dlt_file_spinner.is_alive())
235 | self.dlt_file_spinner.start()
236 | self.assertTrue(self.dlt_file_spinner.is_alive())
237 | self.assertNotEqual(self.dlt_file_spinner.pid, os.getpid())
238 | # dlt_reader is instantiated and keeps alive
239 | self.assertTrue(os.path.exists(self.dlt_file_spinner.file_name))
240 | # With empty file content, no messages are dispatched to message_queue
241 | time.sleep(2)
242 | self.assertTrue(self.dlt_file_spinner.message_queue.empty())
243 | # 3. write 2 sample messages to dlt file
244 | append_stream_to_file(stream_multiple, self.dlt_file_name)
245 | # Expect the written dlt logs are dispatched to message_queue
246 | self._update_dispatch_messages_from_dlt_file_spinner()
247 | self.assertEqual(2, len(self.dispatched_messages))
248 | # 4. append 1 sample message to dlt file
249 | append_stream_to_file(stream_with_params, self.dlt_file_name)
250 | self._update_dispatch_messages_from_dlt_file_spinner()
251 | self.assertEqual(3, len(self.dispatched_messages))
252 | # 5. stop DLTFileSpinner
253 | self.dlt_file_spinner.break_blocking_main_loop()
254 | self.stop_event.set()
255 | self.dlt_file_spinner.join()
256 | self.assertFalse(self.dlt_file_spinner.is_alive())
257 |
258 | def test_run_with_writing_empty_apid_ctid_to_file(self):
259 | """
260 | Test with real dlt file, which contains message with apid=b"" and ctid=b""
261 |
262 | 1. set filter_queue properly, so that the handled messages could be added to message_queue later
263 | 2. start DLTFileSpinner
264 | At this moment, no messages are written to dlt file, so no messages in DLTFileSpinner.message_queue
265 | 3. write message with apid=b"" and ctid=b"" to dlt file
266 | Expectation: we could dispatch 1 message from DLTFileSpinner.message_queue
267 | and, apid==b"" and ctid==b""
268 | 5. stop DLTFileSpinner
269 | """
270 | # 1. set filter_queue properly, so that the handled messages could be added to message_queue later
271 | self.filter_queue.put(("queue_id0", [(None, None)], True))
272 | time.sleep(0.01)
273 | # 2. start DLTFileSpinner
274 | self.assertFalse(self.dlt_file_spinner.is_alive())
275 | self.dlt_file_spinner.start()
276 | self.assertTrue(self.dlt_file_spinner.is_alive())
277 | self.assertNotEqual(self.dlt_file_spinner.pid, os.getpid())
278 | # dlt_reader is instantiated and keeps alive
279 | self.assertTrue(os.path.exists(self.dlt_file_spinner.file_name))
280 | # With empty file content, no messages are dispatched to message_queue
281 | time.sleep(2)
282 | self.assertTrue(self.dlt_file_spinner.message_queue.empty())
283 | # 3. write a message to dlt file
284 | # Construct a message with apid==b"" and ctid==b""
285 | message = create_messages(stream_with_params, from_file=True)[0]
286 | message.extendedheader.apid = b""
287 | message.extendedheader.ctid = b""
288 | # Write this message into dlt file
289 | append_message_to_file(message, self.dlt_file_name)
290 | # Expect the written dlt logs are dispatched to message_queue
291 | self._update_dispatch_messages_from_dlt_file_spinner()
292 | self.assertEqual(1, len(self.dispatched_messages))
293 | # Expectation: the received message should have apid==b"" and ctid==b""
294 | self.assertEqual("", self.dispatched_messages[0][1].apid)
295 | self.assertEqual("", self.dispatched_messages[0][1].ctid)
296 | # 4. stop DLTFileSpinner
297 | self.dlt_file_spinner.break_blocking_main_loop()
298 | self.stop_event.set()
299 | self.dlt_file_spinner.join()
300 | self.assertFalse(self.dlt_file_spinner.is_alive())
301 |
--------------------------------------------------------------------------------
/LICENCE.txt:
--------------------------------------------------------------------------------
1 | Mozilla Public License Version 2.0
2 |
3 | 1. Definitions
4 |
5 | 1.1. "Contributor" means each individual or legal entity that creates, contributes
6 | to the creation of, or owns Covered Software.
7 |
8 | 1.2. "Contributor Version" means the combination of the Contributions of others
9 | (if any) used by a Contributor and that particular Contributor's Contribution.
10 |
11 | 1.3. "Contribution" means Covered Software of a particular Contributor.
12 |
13 | 1.4. "Covered Software" means Source Code Form to which the initial Contributor
14 | has attached the notice in Exhibit A, the Executable Form of such Source Code
15 | Form, and Modifications of such Source Code Form, in each case including portions
16 | thereof.
17 |
18 | 1.5. "Incompatible With Secondary Licenses" means
19 |
20 | (a) that the initial Contributor has attached the notice described in Exhibit
21 | B to the Covered Software; or
22 |
23 | (b) that the Covered Software was made available under the terms of version
24 | 1.1 or earlier of the License, but not also under the terms of a Secondary
25 | License.
26 |
27 | 1.6. "Executable Form" means any form of the work other than Source Code Form.
28 |
29 | 1.7. "Larger Work" means a work that combines Covered Software with other
30 | material, in a separate file or files, that is not Covered Software.
31 |
32 | 1.8. "License" means this document.
33 |
34 | 1.9. "Licensable" means having the right to grant, to the maximum extent possible,
35 | whether at the time of the initial grant or subsequently, any and all of the
36 | rights conveyed by this License.
37 |
38 | 1.10. "Modifications" means any of the following:
39 |
40 | (a) any file in Source Code Form that results from an addition to, deletion
41 | from, or modification of the contents of Covered Software; or
42 |
43 | (b) any new file in Source Code Form that contains any Covered Software.
44 |
45 | 1.11. "Patent Claims" of a Contributor means any patent claim(s), including
46 | without limitation, method, process, and apparatus claims, in any patent Licensable
47 | by such Contributor that would be infringed, but for the grant of the License,
48 | by the making, using, selling, offering for sale, having made, import, or
49 | transfer of either its Contributions or its Contributor Version.
50 |
51 | 1.12. "Secondary License" means either the GNU General Public License, Version
52 | 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General
53 | Public License, Version 3.0, or any later versions of those licenses.
54 |
55 | 1.13. "Source Code Form" means the form of the work preferred for making modifications.
56 |
57 | 1.14. "You" (or "Your") means an individual or a legal entity exercising rights
58 | under this License. For legal entities, "You" includes any entity that controls,
59 | is controlled by, or is under common control with You. For purposes of this
60 | definition, "control" means (a) the power, direct or indirect, to cause the
61 | direction or management of such entity, whether by contract or otherwise,
62 | or (b) ownership of more than fifty percent (50%) of the outstanding shares
63 | or beneficial ownership of such entity.
64 |
65 | 2. License Grants and Conditions
66 |
67 | 2.1. Grants
68 |
69 | Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive
70 | license:
71 |
72 | (a) under intellectual property rights (other than patent or trademark) Licensable
73 | by such Contributor to use, reproduce, make available, modify, display, perform,
74 | distribute, and otherwise exploit its Contributions, either on an unmodified
75 | basis, with Modifications, or as part of a Larger Work; and
76 |
77 | (b) under Patent Claims of such Contributor to make, use, sell, offer for
78 | sale, have made, import, and otherwise transfer either its Contributions or
79 | its Contributor Version.
80 |
81 | 2.2. Effective Date
82 |
83 | The licenses granted in Section 2.1 with respect to any Contribution become
84 | effective for each Contribution on the date the Contributor first distributes
85 | such Contribution.
86 |
87 | 2.3. Limitations on Grant Scope
88 |
89 | The licenses granted in this Section 2 are the only rights granted under this
90 | License. No additional rights or licenses will be implied from the distribution
91 | or licensing of Covered Software under this License. Notwithstanding Section
92 | 2.1(b) above, no patent license is granted by a Contributor:
93 |
94 | (a) for any code that a Contributor has removed from Covered Software; or
95 |
96 | (b) for infringements caused by: (i) Your and any other third party's modifications
97 | of Covered Software, or (ii) the combination of its Contributions with other
98 | software (except as part of its Contributor Version); or
99 |
100 | (c) under Patent Claims infringed by Covered Software in the absence of its
101 | Contributions.
102 |
103 | This License does not grant any rights in the trademarks, service marks, or
104 | logos of any Contributor (except as may be necessary to comply with the notice
105 | requirements in Section 3.4).
106 |
107 | 2.4. Subsequent Licenses
108 |
109 | No Contributor makes additional grants as a result of Your choice to distribute
110 | the Covered Software under a subsequent version of this License (see Section
111 | 10.2) or under the terms of a Secondary License (if permitted under the terms
112 | of Section 3.3).
113 |
114 | 2.5. Representation
115 |
116 | Each Contributor represents that the Contributor believes its Contributions
117 | are its original creation(s) or it has sufficient rights to grant the rights
118 | to its Contributions conveyed by this License.
119 |
120 | 2.6. Fair Use
121 |
122 | This License is not intended to limit any rights You have under applicable
123 | copyright doctrines of fair use, fair dealing, or other equivalents.
124 |
125 | 2.7. Conditions
126 |
127 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
128 | Section 2.1.
129 |
130 | 3. Responsibilities
131 |
132 | 3.1. Distribution of Source Form
133 |
134 | All distribution of Covered Software in Source Code Form, including any Modifications
135 | that You create or to which You contribute, must be under the terms of this
136 | License. You must inform recipients that the Source Code Form of the Covered
137 | Software is governed by the terms of this License, and how they can obtain
138 | a copy of this License. You may not attempt to alter or restrict the recipients'
139 | rights in the Source Code Form.
140 |
141 | 3.2. Distribution of Executable Form
142 |
143 | If You distribute Covered Software in Executable Form then:
144 |
145 | (a) such Covered Software must also be made available in Source Code Form,
146 | as described in Section 3.1, and You must inform recipients of the Executable
147 | Form how they can obtain a copy of such Source Code Form by reasonable means
148 | in a timely manner, at a charge no more than the cost of distribution to the
149 | recipient; and
150 |
151 | (b) You may distribute such Executable Form under the terms of this License,
152 | or sublicense it under different terms, provided that the license for the
153 | Executable Form does not attempt to limit or alter the recipients' rights
154 | in the Source Code Form under this License.
155 |
156 | 3.3. Distribution of a Larger Work
157 |
158 | You may create and distribute a Larger Work under terms of Your choice, provided
159 | that You also comply with the requirements of this License for the Covered
160 | Software. If the Larger Work is a combination of Covered Software with a work
161 | governed by one or more Secondary Licenses, and the Covered Software is not
162 | Incompatible With Secondary Licenses, this License permits You to additionally
163 | distribute such Covered Software under the terms of such Secondary License(s),
164 | so that the recipient of the Larger Work may, at their option, further distribute
165 | the Covered Software under the terms of either this License or such Secondary
166 | License(s).
167 |
168 | 3.4. Notices
169 |
170 | You may not remove or alter the substance of any license notices (including
171 | copyright notices, patent notices, disclaimers of warranty, or limitations
172 | of liability) contained within the Source Code Form of the Covered Software,
173 | except that You may alter any license notices to the extent required to remedy
174 | known factual inaccuracies.
175 |
176 | 3.5. Application of Additional Terms
177 |
178 | You may choose to offer, and to charge a fee for, warranty, support, indemnity
179 | or liability obligations to one or more recipients of Covered Software. However,
180 | You may do so only on Your own behalf, and not on behalf of any Contributor.
181 | You must make it absolutely clear that any such warranty, support, indemnity,
182 | or liability obligation is offered by You alone, and You hereby agree to indemnify
183 | every Contributor for any liability incurred by such Contributor as a result
184 | of warranty, support, indemnity or liability terms You offer. You may include
185 | additional disclaimers of warranty and limitations of liability specific to
186 | any jurisdiction.
187 |
188 | 4. Inability to Comply Due to Statute or Regulation
189 |
190 | If it is impossible for You to comply with any of the terms of this License
191 | with respect to some or all of the Covered Software due to statute, judicial
192 | order, or regulation then You must: (a) comply with the terms of this License
193 | to the maximum extent possible; and (b) describe the limitations and the code
194 | they affect. Such description must be placed in a text file included with
195 | all distributions of the Covered Software under this License. Except to the
196 | extent prohibited by statute or regulation, such description must be sufficiently
197 | detailed for a recipient of ordinary skill to be able to understand it.
198 |
199 | 5. Termination
200 |
201 | 5.1. The rights granted under this License will terminate automatically if
202 | You fail to comply with any of its terms. However, if You become compliant,
203 | then the rights granted under this License from a particular Contributor are
204 | reinstated (a) provisionally, unless and until such Contributor explicitly
205 | and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor
206 | fails to notify You of the non-compliance by some reasonable means prior to
207 | 60 days after You have come back into compliance. Moreover, Your grants from
208 | a particular Contributor are reinstated on an ongoing basis if such Contributor
209 | notifies You of the non-compliance by some reasonable means, this is the first
210 | time You have received notice of non-compliance with this License from such
211 | Contributor, and You become compliant prior to 30 days after Your receipt
212 | of the notice.
213 |
214 | 5.2. If You initiate litigation against any entity by asserting a patent infringement
215 | claim (excluding declaratory judgment actions, counter-claims, and cross-claims)
216 | alleging that a Contributor Version directly or indirectly infringes any patent,
217 | then the rights granted to You by any and all Contributors for the Covered
218 | Software under Section 2.1 of this License shall terminate.
219 |
220 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end
221 | user license agreements (excluding distributors and resellers) which have
222 | been validly granted by You or Your distributors under this License prior
223 | to termination shall survive termination.
224 |
225 | 6. Disclaimer of Warranty
226 |
227 | Covered Software is provided under this License on an "as is" basis, without
228 | warranty of any kind, either expressed, implied, or statutory, including,
229 | without limitation, warranties that the Covered Software is free of defects,
230 | merchantable, fit for a particular purpose or non-infringing. The entire risk
231 | as to the quality and performance of the Covered Software is with You. Should
232 | any Covered Software prove defective in any respect, You (not any Contributor)
233 | assume the cost of any necessary servicing, repair, or correction. This disclaimer
234 | of warranty constitutes an essential part of this License. No use of any Covered
235 | Software is authorized under this License except under this disclaimer.
236 |
237 | 7. Limitation of Liability
238 |
239 | Under no circumstances and under no legal theory, whether tort (including
240 | negligence), contract, or otherwise, shall any Contributor, or anyone who
241 | distributes Covered Software as permitted above, be liable to You for any
242 | direct, indirect, special, incidental, or consequential damages of any character
243 | including, without limitation, damages for lost profits, loss of goodwill,
244 | work stoppage, computer failure or malfunction, or any and all other commercial
245 | damages or losses, even if such party shall have been informed of the possibility
246 | of such damages. This limitation of liability shall not apply to liability
247 | for death or personal injury resulting from such party's negligence to the
248 | extent applicable law prohibits such limitation. Some jurisdictions do not
249 | allow the exclusion or limitation of incidental or consequential damages,
250 | so this exclusion and limitation may not apply to You.
251 |
252 | 8. Litigation
253 |
254 | Any litigation relating to this License may be brought only in the courts
255 | of a jurisdiction where the defendant maintains its principal place of business
256 | and such litigation shall be governed by laws of that jurisdiction, without
257 | reference to its conflict-of-law provisions. Nothing in this Section shall
258 | prevent a party's ability to bring cross-claims or counter-claims.
259 |
260 | 9. Miscellaneous
261 |
262 | This License represents the complete agreement concerning the subject matter
263 | hereof. If any provision of this License is held to be unenforceable, such
264 | provision shall be reformed only to the extent necessary to make it enforceable.
265 | Any law or regulation which provides that the language of a contract shall
266 | be construed against the drafter shall not be used to construe this License
267 | against a Contributor.
268 |
269 | 10. Versions of the License
270 |
271 | 10.1. New Versions
272 |
273 | Mozilla Foundation is the license steward. Except as provided in Section 10.3,
274 | no one other than the license steward has the right to modify or publish new
275 | versions of this License. Each version will be given a distinguishing version
276 | number.
277 |
278 | 10.2. Effect of New Versions
279 |
280 | You may distribute the Covered Software under the terms of the version of
281 | the License under which You originally received the Covered Software, or under
282 | the terms of any subsequent version published by the license steward.
283 |
284 | 10.3. Modified Versions
285 |
286 | If you create software not governed by this License, and you want to create
287 | a new license for such software, you may create and use a modified version
288 | of this License if you rename the license and remove any references to the
289 | name of the license steward (except to note that such modified license differs
290 | from this License).
291 |
292 | 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
293 |
294 | If You choose to distribute Source Code Form that is Incompatible With Secondary
295 | Licenses under the terms of this version of the License, the notice described
296 | in Exhibit B of this License must be attached. Exhibit A - Source Code Form
297 | License Notice
298 |
299 | This Source Code Form is subject to the terms of the Mozilla Public License,
300 | v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
301 | one at http://mozilla.org/MPL/2.0/.
302 |
303 | If it is not possible or desirable to put the notice in a particular file,
304 | then You may include the notice in a location (such as a LICENSE file in a
305 | relevant directory) where a recipient would be likely to look for such a notice.
306 |
307 | You may add additional accurate notices of copyright ownership.
308 |
309 | Exhibit B - "Incompatible With Secondary Licenses" Notice
310 |
311 | This Source Code Form is "Incompatible With Secondary Licenses", as defined
312 | by the Mozilla Public License, v. 2.0.
313 |
--------------------------------------------------------------------------------
/dlt/dlt_broker_handlers.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved.
2 | """Handlers are classes that assist dlt_broker in receiving and
3 | filtering DLT messages
4 | """
5 | from abc import ABC, abstractmethod
6 | from collections import defaultdict
7 | import ctypes
8 | import logging
9 | from multiprocessing import Lock, Process, Value
10 | from queue import Empty
11 | import socket
12 | import time
13 | from threading import Thread, Event
14 |
15 | from dlt.dlt import (
16 | DLTClient,
17 | DLT_DAEMON_TCP_PORT,
18 | DLT_UDP_MULTICAST_BUFFER_SIZE,
19 | DLT_UDP_MULTICAST_FD_BUFFER_SIZE,
20 | cDLT_FILE_NOT_OPEN_ERROR,
21 | load,
22 | py_dlt_client_main_loop,
23 | py_dlt_file_main_loop,
24 | )
25 |
26 |
27 | DLT_CLIENT_TIMEOUT = 5
28 | logger = logging.getLogger(__name__) # pylint: disable=invalid-name
29 |
30 |
31 | class DLTTimeValue(object):
32 | """Create a share memory to pass the timestamp between processes
33 |
34 | The type of dlt time is float (4 bytes). There are several ways to send
35 | the value between DLTMessageHandler (it's a process) and DLTBroker. Since
36 | DLTMessageHandler has to send the value many times every second, choosing a
37 | lightweight solution is must.
38 |
39 | khiz678 studied and evaluated the following solutions for the problem.
40 | - multiprocessing.Queue (Queue in the following discussion)
41 | - multiprocessing.Pipe (Pipe in the following discussion)
42 | - multiprocessing.Value (Value in the following discussion)
43 |
44 | Value is our final solution. Queue's implementation is based on Pipe (in
45 | cpython). If the solution is based on Queue or Pipe, it needs another
46 | thread in DLTBroker process to receive and write the value to a local
47 | variable. The solution based on Value does not have such problem, only
48 | assigns the value to the shared memory directly.
49 |
50 | khiz678 also did a simple benchmark for the Value solution. It could
51 | receive more than 100000 timestamps per second. It's twice faster than
52 | Pipe's implementation.
53 | """
54 |
55 | def __init__(self, default_value=0.0):
56 | self._timestamp_mem = Value(ctypes.c_double, default_value)
57 |
58 | @property
59 | def timestamp(self):
60 | """Get the seconds from 1970/1/1 0:00:00
61 |
62 | :rtype: float
63 | """
64 | with self._timestamp_mem.get_lock():
65 | return self._timestamp_mem.value
66 |
67 | @timestamp.setter
68 | def timestamp(self, new_timestamp):
69 | with self._timestamp_mem.get_lock():
70 | self._timestamp_mem.value = new_timestamp
71 |
72 |
73 | class DLTFilterAckMessageHandler(Thread):
74 | """Receive filter-set ack message and pass it to the corresponding ack queue"""
75 |
76 | def __init__(self, filter_ack_queue): # (multiprocessing.Queue[Tuple[ContextQueueID, bool]]) -> None
77 | super(DLTFilterAckMessageHandler, self).__init__()
78 |
79 | self.filter_ack_queue = filter_ack_queue
80 |
81 | self.stop_flag = Event()
82 |
83 | self.context_map_lock = Lock()
84 | self.context_map = {} # Dict[ContextQueueID, Queue.Queue[bool]]
85 |
86 | def stop(self):
87 | """Stops thread execution"""
88 | self.stop_flag.set()
89 | self.filter_ack_queue.put((None, None))
90 | if self.is_alive():
91 | self.join()
92 |
93 | def register(self, filter_ack_queue): # (Queue.Queue[bool]) -> None
94 | """Register an ack queue"""
95 | with self.context_map_lock:
96 | self.context_map[id(filter_ack_queue)] = filter_ack_queue
97 |
98 | def unregister(self, filter_ack_queue): # (Queue.Queue[bool]) -> None
99 | """Unregister an ack queue"""
100 | with self.context_map_lock:
101 | key = id(filter_ack_queue)
102 |
103 | if key in self.context_map:
104 | del self.context_map[key]
105 |
106 | def run(self):
107 | """Run the thread to recieve the message and dispatch it"""
108 | while not self.stop_flag.is_set():
109 | queue_ack_id, enable = self.filter_ack_queue.get()
110 |
111 | if not queue_ack_id:
112 | continue
113 |
114 | with self.context_map_lock:
115 | if queue_ack_id in self.context_map:
116 | self.context_map[queue_ack_id].put(enable)
117 | else:
118 | logger.warning("Could not send an ack to the queue: %s", queue_ack_id)
119 |
120 |
121 | class DLTContextHandler(Thread):
122 | """Communication layer between the DLTContext instances and
123 | DLTMessageHandler Process.
124 |
125 | This class handles the transfer of messages between the process
126 | receiving traces from the DLT Daemon and the DLTContext queues.
127 | """
128 |
129 | def __init__(self, filter_queue, message_queue):
130 | super(DLTContextHandler, self).__init__()
131 | self.stop_flag = Event()
132 | self.context_map = {}
133 | self.lock = Lock()
134 | self.filter_queue = filter_queue
135 | self.message_queue = message_queue
136 |
137 | def _make_send_filter_msg(self, queue, filters, is_register, context_filter_ack_queue=None):
138 | """Send a filter message to the filter message queue"""
139 | queue_id = id(queue)
140 |
141 | if context_filter_ack_queue:
142 | return queue_id, id(context_filter_ack_queue), filters, is_register
143 |
144 | return queue_id, filters, is_register
145 |
146 | def register(self, queue, filters=None, context_filter_ack_queue=None):
147 | """Register a queue to collect messages matching specific filters
148 |
149 | :param Queue queue: The new queue to add
150 | :param tuple filters: An tuple with (apid, ctid) used to filter
151 | messages that go into this queue.
152 | """
153 | if filters is None:
154 | filters = [(None, None)]
155 |
156 | queue_id = id(queue) # - unique identifier for this queue
157 | with self.lock:
158 | self.context_map[queue_id] = (queue, filters)
159 |
160 | # - inform the DLTMessageHandler process about this new
161 | # (queue, filter) pair
162 | self.filter_queue.put(
163 | self._make_send_filter_msg(queue, filters, True, context_filter_ack_queue=context_filter_ack_queue)
164 | )
165 |
166 | def unregister(self, queue, context_filter_ack_queue=None):
167 | """Remove a queue from set of queues being handled
168 |
169 | :param Queue queue: The queue to remove
170 | """
171 | queue_id = id(queue)
172 | _, filters = self.context_map.get(queue_id, (None, None))
173 | if filters:
174 | with self.lock:
175 | try:
176 | del self.context_map[queue_id]
177 | except KeyError:
178 | pass
179 |
180 | # - inform the DLTMessageHandler process about removal of this
181 | # (queue, filter) pair
182 | self.filter_queue.put(
183 | self._make_send_filter_msg(queue, filters, False, context_filter_ack_queue=context_filter_ack_queue)
184 | )
185 |
186 | def run(self):
187 | """The thread's main loop"""
188 | while not self.stop_flag.is_set():
189 | queue_id, message = None, None
190 | try:
191 | if self.message_queue.full():
192 | logger.error("message_queue is full ! put() on this queue will block")
193 | queue_id, message = self.message_queue.get_nowait()
194 | except Empty:
195 | pass
196 |
197 | if message:
198 | queue, _ = self.context_map.get(queue_id, (None, None))
199 | if queue:
200 | queue.put(message)
201 | else:
202 | time.sleep(0.01)
203 |
204 | def stop(self):
205 | """Stops thread execution"""
206 | self.stop_flag.set()
207 | self.filter_queue.close()
208 | if self.is_alive():
209 | self.join()
210 |
211 |
212 | class DLTMessageDispatcherBase(ABC, Process):
213 | """Base class for different dlt message dispatchers
214 |
215 | The derived class could dispatch dlt messages from dlt-daemon, or from at-runtime written file.
216 | """
217 |
218 | def __init__(self, filter_queue, message_queue, mp_stop_event, dlt_time_value=None, filter_ack_queue=None):
219 | """
220 | Common members needed for common dispatching behavirours
221 |
222 | :param Queue filter_queue: contexts for filtering received dlt message
223 | :param Queue message_queue: received dlt messages after filtering against context
224 | :param multiprocessing.Event mp_stop_event: stop signal for this process
225 | :param bool enable_dlt_time: Record the latest dlt message timestamp if enabled.
226 | :param bool filter_ack_queue: acks for accepting contexts
227 | """
228 | super().__init__()
229 | self.filter_queue = filter_queue
230 | self.filter_ack_queue = filter_ack_queue
231 | self.message_queue = message_queue
232 | self.mp_stop_flag = mp_stop_event
233 | # - dict mapping filters to queue ids
234 | self.context_map = defaultdict(list)
235 | self._dlt_time_value = dlt_time_value
236 |
237 | def _process_filter_queue(self):
238 | """Check if filters have been added or need to be removed"""
239 | while not self.filter_queue.empty():
240 | queue_ack_id = None
241 |
242 | msg = self.filter_queue.get_nowait()
243 | logger.debug("Process filter queue message: %s", msg)
244 | if isinstance(msg, tuple) and len(msg) == 4:
245 | queue_id, queue_ack_id, filters, add = msg
246 | else:
247 | queue_id, filters, add = msg
248 |
249 | if add:
250 | for apid_ctid in filters:
251 | self.context_map[apid_ctid].append(queue_id)
252 | else:
253 | try:
254 | for apid_ctid in filters:
255 | self.context_map[apid_ctid].remove(queue_id)
256 | if not self.context_map[apid_ctid]:
257 | del self.context_map[apid_ctid]
258 | except (KeyError, ValueError):
259 | # - queue_id already removed or not inserted
260 | pass
261 |
262 | if self.filter_ack_queue and queue_ack_id:
263 | logger.debug("Send filter ack message: queue_ack_id: %s, add: %s", queue_ack_id, add)
264 | self.filter_ack_queue.put((queue_ack_id, add))
265 |
266 | @abstractmethod
267 | def is_valid_message(self, message):
268 | """Validate if the received message is a valid message according to AUTOSAR doc"""
269 | return True
270 |
271 | def handle(self, message):
272 | """Function to be called for every message received
273 |
274 | :param DLTMessage message: received new DLTMessage instance
275 | :returns: True if the loop should continue, False to stop the loop and exit
276 | :rtype: bool
277 | """
278 | self._process_filter_queue()
279 |
280 | if self.is_valid_message(message):
281 | # Dispatch the message
282 | msg_ctx = ((message.apid, message.ctid), (None, None), (message.apid, None), (None, message.ctid))
283 | qids = (
284 | queue_id
285 | for filters, queue_ids in self.context_map.items()
286 | for queue_id in queue_ids
287 | if filters in msg_ctx
288 | )
289 | for queue_id in qids:
290 | if self.message_queue.full():
291 | logger.error("message_queue is full ! put() on this queue will block")
292 | self.message_queue.put((queue_id, message))
293 |
294 | # Send the message's timestamp
295 | if self._dlt_time_value:
296 | self._dlt_time_value.timestamp = message.storage_timestamp
297 |
298 | return not self.mp_stop_flag.is_set()
299 |
300 | @abstractmethod
301 | def run(self) -> None:
302 | pass
303 |
304 | def break_blocking_main_loop(self):
305 | """All message dispatchers need a main loop to fetch dlt messages from source.
306 | If it could constantly dispatch messages, then the main loop will not get into blocking state.
307 | Only when no more message could not be dispatched, the main loop would get into blocking state.
308 |
309 | Not all message dispatchers need to implement this method
310 | """
311 | pass
312 |
313 |
314 | class DLTFileSpinner(DLTMessageDispatcherBase):
315 | """Process receiving the DLT messages and handing them to DLTContextHandler
316 |
317 | This process instance is responsible for collecting messages from
318 | the at-runtime written dlt log, tagging them with the correct queue id and placing
319 | them on the messages queue.
320 | """
321 |
322 | def __init__(
323 | self, filter_queue, message_queue, mp_stop_event, file_name, dlt_time_value=None, filter_ack_queue=None
324 | ):
325 | super().__init__(filter_queue, message_queue, mp_stop_event, dlt_time_value, filter_ack_queue)
326 | self.file_name = file_name
327 | self.dlt_reader = load(filename=self.file_name, live_run=True)
328 |
329 | def is_valid_message(self, message):
330 | """According to AUTOSAR doc, message with empty apid and empty ctid is still valid"""
331 | return message is not None
332 |
333 | def run(self):
334 | """DLTFileSpinner worker method"""
335 | logger.info("Start to process dlt file %s", self.file_name)
336 | # Even though dlt connector for ioc should only be instantiated after successful SerialConsole with fibex,
337 | # the corner case of not-existing dlt file will still be handled here with max 5 retires
338 | retries_for_non_existing_file = 5
339 |
340 | while not self.mp_stop_flag.is_set():
341 | try:
342 | logger.debug("py_dlt_file_main_loop")
343 | res = py_dlt_file_main_loop(self.dlt_reader, callback=self.handle)
344 | if res is False and not self.mp_stop_flag.is_set(): # main loop returned False
345 | logger.error("Too many bad messages read from %s", self.file_name)
346 | self.mp_stop_flag.set()
347 | break
348 | except KeyboardInterrupt:
349 | logger.debug("main loop manually interrupted")
350 | break
351 | except IOError as err:
352 | if str(err) == cDLT_FILE_NOT_OPEN_ERROR:
353 | # Not every time of non-existing file, cDLTFile will report error
354 | # Sometimes, it simply works through without issue.
355 | # So, no unittest could be done for this error handling
356 | if retries_for_non_existing_file == 0:
357 | logger.error("After retries, dlt file %s still does not exist", self.file_name)
358 | raise err
359 | logger.warning(
360 | "DLT file %s does not exist, will try %d times again",
361 | self.file_name,
362 | retries_for_non_existing_file,
363 | )
364 | retries_for_non_existing_file = retries_for_non_existing_file - 1
365 | time.sleep(1)
366 | else:
367 | raise err
368 | except Exception: # pylint: disable=broad-except
369 | logger.exception("Exception during the DLT message receive")
370 |
371 | logger.debug("DLTFileSpinner starts to quit...")
372 | if not self.dlt_reader.stop_reading_proc.is_set():
373 | self.dlt_reader.stop_reading_proc.set()
374 | self.message_queue.close()
375 | logger.info("DLTFileSpinner worker execution complete")
376 |
377 | def break_blocking_main_loop(self):
378 | """A big user for DLTFileSpinner is IOC dlt, which does not have so many dlt messages as HU,
379 | so it is quite easy for the main loop to get into blocking state,
380 | at the moment that no more dlt messages could be dispatched.
381 | """
382 | logger.debug("Stop iterating to file %s", self.file_name)
383 | self.dlt_reader.stop_reading_proc.set()
384 |
385 |
386 | class DLTMessageHandler(DLTMessageDispatcherBase):
387 | """Process receiving the DLT messages and handing them to DLTContextHandler
388 |
389 | This process instance is responsible for collecting messages from
390 | the DLT daemon, tagging them with the correct queue id and placing
391 | them on the messages queue.
392 | """
393 |
394 | def __init__(
395 | self, filter_queue, message_queue, mp_stop_event, client_cfg, dlt_time_value=None, filter_ack_queue=None
396 | ):
397 | super().__init__(filter_queue, message_queue, mp_stop_event, dlt_time_value, filter_ack_queue)
398 | self._ip_address = client_cfg["ip_address"]
399 | self._port = client_cfg.get("port", DLT_DAEMON_TCP_PORT)
400 | self._filename = client_cfg.get("filename")
401 | self.verbose = client_cfg.get("verbose", 0)
402 | self.timeout = client_cfg.get("timeout", DLT_CLIENT_TIMEOUT)
403 | self._client = None
404 | self.tracefile = None
405 | self.last_connected = time.time()
406 | self.last_message = time.time() - 120.0
407 | self._udp_fd_buffer_size_bytes = client_cfg.get("udp_fd_buffer_size_bytes", DLT_UDP_MULTICAST_FD_BUFFER_SIZE)
408 | self._udp_buffer_size_bytes = client_cfg.get("udp_buffer_size_bytes", DLT_UDP_MULTICAST_BUFFER_SIZE)
409 |
410 | def is_valid_message(self, message):
411 | return message and (message.apid != "" or message.ctid != "")
412 |
413 | def _client_connect(self):
414 | """Create a new DLTClient
415 |
416 | :param int timeout: Time in seconds to wait for connection.
417 | :returns: True if connected, False otherwise
418 | :rtype: bool
419 | """
420 | if self.verbose:
421 | logger.debug(
422 | "Creating DLTClient (ip_address='%s', Port='%s', logfile='%s')",
423 | self._ip_address,
424 | self._port,
425 | self._filename,
426 | )
427 | self._client = DLTClient(
428 | servIP=self._ip_address,
429 | port=self._port,
430 | verbose=self.verbose,
431 | udp_fd_buffer_size_bytes=self._udp_fd_buffer_size_bytes,
432 | udp_buffer_size_bytes=self._udp_buffer_size_bytes,
433 | )
434 | connected = self._client.connect(self.timeout)
435 | if connected:
436 | logger.info("DLTClient connected to %s", self._client.servIP)
437 | return connected
438 |
439 | def run(self):
440 | """DLTMessageHandler worker method"""
441 | if self._filename is not None:
442 | logger.info("Opening the DLT trace file '%s'", self._filename)
443 | self.tracefile = open(self._filename, mode="ab", buffering=False)
444 |
445 | while not self.mp_stop_flag.is_set():
446 | exception_occured = False
447 | if not self._client_connect():
448 | # keep trying to reconnect, until we either successfully
449 | # connect or the stop_flag is set
450 | if time.time() - self.last_message > 60:
451 | # Once per minute log that we still have no DLT Connection
452 | logger.info(
453 | "DLT connection to %s missing since %s seconds",
454 | self._ip_address,
455 | time.time() - self.last_connected,
456 | )
457 | self.last_message = time.time()
458 | continue
459 | try:
460 | if self.last_connected:
461 | logger.info(
462 | "DLT connection to %s re-established after %s seconds",
463 | self._ip_address,
464 | time.time() - self.last_connected,
465 | )
466 | self.last_connected = time.time()
467 | res = py_dlt_client_main_loop(self._client, verbose=0, callback=self.handle, dumpfile=self.tracefile)
468 | if res is False and not self.mp_stop_flag.is_set(): # main loop returned False
469 | logger.warning("DLT connection to %s lost. Restarting DLT client", self._ip_address)
470 | self.last_connected = time.time()
471 | self.last_message = time.time()
472 | exception_occured = True
473 | except KeyboardInterrupt:
474 | exception_occured = True
475 | logger.debug("main loop manually interrupted")
476 | break
477 | except socket.timeout as exc:
478 | exception_occured = True
479 | logger.error("socket timeout error")
480 | logger.debug(exc)
481 | except Exception: # pylint: disable=broad-except
482 | exception_occured = True
483 | logger.exception("Exception during the DLT message receive")
484 |
485 | finally:
486 | if exception_occured:
487 | logger.debug("Closing open socket connections.")
488 | self._client.disconnect()
489 |
490 | self.message_queue.close()
491 | self._client.disconnect()
492 | logger.info("DLTMessageHandler worker execution complete")
493 |
--------------------------------------------------------------------------------