├── docs ├── index.rst ├── requirements.txt ├── README.md ├── Makefile ├── INDEX.md ├── getting-started.md ├── code-generation.md ├── how-to.md ├── examples.md ├── python-can.md ├── conf.py ├── api-reference.md └── tutorial.md ├── pydbc ├── asam │ ├── __init__.py │ └── types.py ├── py3 │ ├── __init__.py │ ├── dbc.tokens │ ├── dbcLexer.tokens │ ├── ncf.tokens │ ├── ncfLexer.tokens │ ├── ldf.tokens │ ├── ldfLexer.tokens │ ├── ncfVisitor.py │ └── ldfVisitor.py ├── tests │ ├── __init__.py │ ├── base.py │ ├── test_types.py │ └── test_dbc_parser.py ├── scripts │ ├── __init__.py │ ├── vndb_exporter.py │ └── vndb_importer.py ├── integrations │ ├── __init__.py │ └── python_can.py ├── buildGrammars.cmd ├── buildGrammars.sh ├── version.py ├── __init__.py ├── cgen │ ├── __init__.py │ ├── templates │ │ ├── __init__.py │ │ ├── micropython_can.py.tmpl │ │ ├── ldf.tmpl │ │ └── socketcan.c.tmpl │ └── generators.py ├── exceptions.py ├── template.py ├── logger.py ├── examples │ └── create_candb.py ├── utils.py ├── db │ ├── imex.py │ └── __init__.py ├── types.py ├── ncf.g4 └── ncfListener.py ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE.md ├── ISSUE_TEMPLATE │ ├── custom.md │ ├── feature_request.md │ └── bug_report.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── pythonapp.yml ├── .bumpversion.cfg ├── .gitignore ├── MANIFEST.in ├── .travis.yml ├── .codeclimate.yml ├── .readthedocs.yml ├── pyproject.toml ├── appveyor.yml ├── CODE_OF_CONDUCT.md └── README.md /docs/index.rst: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /pydbc/asam/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydbc/py3/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydbc/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydbc/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydbc/integrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | myst_parser 2 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | liberapay: christoph2 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | If you just have a question, suggestion, or feature-request feel free to ignore the following list :smile: 3 | 4 | 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/custom.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Custom issue template 3 | about: Describe this issue template's purpose here. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.5.1 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:pydbc/__init__.py] 7 | 8 | [bumpversion:file:pydbc/version.py] 9 | 10 | [bumpversion:file:pyproject.toml] 11 | -------------------------------------------------------------------------------- /pydbc/buildGrammars.cmd: -------------------------------------------------------------------------------- 1 | java org.antlr.v4.Tool -Dlanguage=Python3 -long-messages -visitor dbc.g4 -o ./py3/ 2 | java org.antlr.v4.Tool -Dlanguage=Python3 -long-messages -visitor ncf.g4 -o ./py3/ 3 | java org.antlr.v4.Tool -Dlanguage=Python3 -long-messages -visitor ldf.g4 -o ./py3/ 4 | -------------------------------------------------------------------------------- /pydbc/buildGrammars.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | java org.antlr.v4.Tool -Dlanguage=Python3 -long-messages -visitor dbc.g4 -o ./py3/ 3 | java org.antlr.v4.Tool -Dlanguage=Python3 -long-messages -visitor ncf.g4 -o ./py3/ 4 | java org.antlr.v4.Tool -Dlanguage=Python3 -long-messages -visitor ldf.g4 -o ./py3/ 5 | -------------------------------------------------------------------------------- /pydbc/tests/base.py: -------------------------------------------------------------------------------- 1 | 2 | import pytest 3 | 4 | from pydbc.api.db import Database 5 | from pydbc.api.db import BaseObject 6 | 7 | 8 | class BaseTest: 9 | 10 | @pytest.fixture(autouse = True) 11 | def setup_database(self, db_in_memory): 12 | self.db = db_in_memory 13 | 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.o 2 | *.bak 3 | *.obj 4 | *.o 5 | *.div 6 | *.elf 7 | *.s19 8 | *.out 9 | *.log 10 | *.dbg 11 | *.lis 12 | *.lst 13 | *.swp 14 | *.tmp 15 | *.TMP 16 | files/ 17 | test-files/ 18 | *.i 19 | *dblite 20 | *stackdump 21 | *.exe 22 | *~ 23 | archive/ 24 | build/ 25 | dist/ 26 | pydbc.egg-info/ 27 | *.pyc 28 | *.pyo 29 | *.zip 30 | *.bin 31 | *.pyd 32 | *.kpf 33 | *.html 34 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # pyDBC Docs (GFM) 2 | 3 | This folder contains the GitHub‑Flavored Markdown documentation for pyDBC. 4 | 5 | - INDEX.md — Documentation home 6 | - getting-started.md — Installation and quickstarts 7 | - tutorial.md — Step‑by‑step walkthrough 8 | - how-to.md — Task‑oriented guides 9 | - examples.md — Ready‑to‑run examples 10 | - api-reference.md — API reference 11 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md 2 | include *.txt 3 | include *.yml 4 | include .bumpversion.cfg 5 | include LICENSE 6 | recursive-include docs *.py 7 | recursive-include docs *.rst 8 | recursive-include docs *.txt 9 | recursive-include docs Makefile 10 | recursive-include pydbc *.cmd 11 | recursive-include pydbc *.g4 12 | recursive-include pydbc *.py 13 | recursive-include pydbc *.tmpl 14 | recursive-include pydbc *.tokens 15 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = pyXCP 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Types of changes 2 | 3 | - [ ] Bug fix (non-breaking change which fixes an issue) 4 | - [ ] New feature (non-breaking change which adds functionality) 5 | - [ ] Breaking change (fix or feature that would cause existing functionality to change) 6 | - [ ] I have read the **CONTRIBUTING** document. 7 | - [ ] My code follows the code style of this project. 8 | - [ ] My change requires a change to the documentation. 9 | - [ ] I have updated the documentation accordingly. 10 | - [ ] I have added tests to cover my changes. 11 | - [ ] All new and existing tests passed. 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # https://travis-ci.org/christoph2/pydbc 2 | language: python 3 | sudo: required 4 | python: 5 | - 3.5 6 | - 3.6 7 | #- "pypy" 8 | 9 | os: 10 | - linux 11 | # - osx 12 | 13 | notifications: 14 | email: false 15 | 16 | 17 | install: 18 | - sudo curl -O http://www.antlr.org/download/antlr-4.7.1-complete.jar 19 | - export CLASSPATH=".:/home/travis/build/christoph2/pyA2L/antlr-4.7.1-complete.jar:$CLASSPATH" 20 | - alias antlr4='java -jar /usr/local/lib/antlr-4.7-complete.jar' 21 | - pip install -r requirements_testings.txt 22 | - travis_retry pip install pylint 23 | - travis_retry pip install pytest-cov coverage coveralls codacy-coverage 24 | 25 | 26 | script: 27 | coverage run --source=pydbc setup.py test 28 | 29 | after_success: 30 | - pylint app 31 | - coveralls 32 | - coverage xml && python-codacy-coverage -r coverage.xml 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | engines: 4 | fixme: 5 | enabled: true 6 | pep8: 7 | enabled: false 8 | radon: 9 | enabled: true 10 | ratings: 11 | paths: 12 | - "**.py" 13 | 14 | checks: 15 | argument-count: 16 | config: 17 | threshold: 4 18 | complex-logic: 19 | config: 20 | threshold: 4 21 | file-lines: 22 | config: 23 | threshold: 1000 24 | method-complexity: 25 | config: 26 | threshold: 5 27 | method-count: 28 | config: 29 | threshold: 20 30 | method-lines: 31 | config: 32 | threshold: 25 33 | nested-control-flow: 34 | config: 35 | threshold: 4 36 | return-statements: 37 | config: 38 | threshold: 4 39 | similar-code: 40 | config: 41 | threshold: # language-specific defaults. an override will affect all languages. 42 | identical-code: 43 | config: 44 | threshold: # language-specific defaults. an override will affect all languages. 45 | 46 | exclude_patterns: 47 | - "pydbc/py3/**" 48 | - "pydbc/py2/**" 49 | - "pydbc/tests/**" -------------------------------------------------------------------------------- /docs/INDEX.md: -------------------------------------------------------------------------------- 1 | # pyDBC Documentation 2 | 3 | Welcome to the pyDBC documentation. This site provides a GitHub‑Flavored Markdown overview of the library, tutorials, examples, how‑to guides, and API reference. 4 | 5 | pyDBC is a Python library for creating and editing automotive network description artifacts, including: 6 | - DBC (CAN Database) 7 | - LDF (LIN Description File) 8 | - NCF (Network Configuration File) 9 | 10 | It exposes high‑level creational APIs to build networks, nodes, messages, signals, and related metadata; it also contains a parser and exporters to transform between standardized formats and an internal SQLAlchemy‑backed model. 11 | 12 | Useful links: 13 | - Getting Started: docs/getting-started.md 14 | - Tutorial: docs/tutorial.md 15 | - How‑to Guides: docs/how-to.md 16 | - Examples: docs/examples.md 17 | - API Reference: docs/api-reference.md 18 | - Code Generation: docs/code-generation.md 19 | 20 | If you view this on GitHub, use the links above; otherwise, the files are located alongside this INDEX.md in the repository under docs/. 21 | 22 | - python-can integration: docs/python-can.md — Encode/decode CAN frames and send/receive with python-can 23 | -------------------------------------------------------------------------------- /pydbc/version.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2018 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.5.1' 29 | 30 | 31 | VNDB_SCHEMA_VERSION = 1 32 | 33 | -------------------------------------------------------------------------------- /pydbc/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2017 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.5.1' 29 | 30 | #import pkg_resources 31 | #pkg_resources.declare_namespace(__name__) 32 | 33 | -------------------------------------------------------------------------------- /pydbc/cgen/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2017 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.1.0' 29 | 30 | #import pkg_resources 31 | #pkg_resources.declare_namespace(__name__) 32 | 33 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-24.04 10 | tools: 11 | python: "3.12" 12 | # You can also specify other tool versions: 13 | # nodejs: "20" 14 | # rust: "1.70" 15 | # golang: "1.20" 16 | 17 | # Build documentation in the "docs/" directory with Sphinx 18 | sphinx: 19 | configuration: docs/conf.py 20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs 21 | # builder: "dirhtml" 22 | # Fail on all warnings to avoid broken references 23 | # fail_on_warning: true 24 | 25 | # Optionally build your docs in additional formats such as PDF and ePub 26 | formats: 27 | - pdf 28 | # - epub 29 | 30 | # Optional but recommended, declare the Python requirements required 31 | # to build your documentation 32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 33 | python: 34 | install: 35 | - requirements: docs/requirements.txt 36 | 37 | 38 | -------------------------------------------------------------------------------- /pydbc/exceptions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2018 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.1.0' 29 | 30 | 31 | class DuplicateKeyError(Exception): pass 32 | 33 | class RangeError(Exception): pass 34 | 35 | -------------------------------------------------------------------------------- /pydbc/cgen/templates/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2017 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.1.0' 29 | 30 | #import pkg_resources 31 | #pkg_resources.declare_namespace(__name__) 32 | 33 | -------------------------------------------------------------------------------- /pydbc/scripts/vndb_exporter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | """ 6 | 7 | __copyright__ = """ 8 | pySART - Simplified AUTOSAR-Toolkit for Python. 9 | 10 | ( C) 2010-2020 by Christoph Schueler 11 | 12 | All Rights Reserved 13 | 14 | This program is free software; you can redistribute it and/or modify 15 | it under the terms of the GNU General Public License as published by 16 | the Free Software Foundation; either version 2 of the License, or 17 | (at your option) any later version. 18 | 19 | This program is distributed in the hope that it will be useful, 20 | but WITHOUT ANY WARRANTY; without even the implied warranty of 21 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 22 | GNU General Public License for more details. 23 | 24 | You should have received a copy of the GNU General Public License along 25 | with this program; if not, write to the Free Software Foundation, Inc., 26 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 27 | 28 | s. FLOSS-EXCEPTION.txt 29 | """ 30 | __author__ = 'Christoph Schueler' 31 | __version__ = '0.1.0' 32 | 33 | import argparse 34 | import io 35 | import pathlib 36 | import sys 37 | 38 | from pydbc.db.imex import DbcExporter, LdfExporter 39 | import pydbc.db.model as model 40 | 41 | def exportFile(pth): 42 | fnext = pth.suffix[ 1 : ].lower() 43 | 44 | print("Processing '{}'".format(pth)) 45 | #exporter = DbcExporter(pth) 46 | exporter = LdfExporter(pth, debug = False) 47 | 48 | exporter.run() 49 | print("OK, done.\n", flush = True) 50 | 51 | exportFile(pathlib.Path(sys.argv[1])) 52 | 53 | 54 | -------------------------------------------------------------------------------- /pydbc/py3/dbc.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | T__22=23 24 | T__23=24 25 | T__24=25 26 | T__25=26 27 | T__26=27 28 | T__27=28 29 | T__28=29 30 | T__29=30 31 | T__30=31 32 | T__31=32 33 | T__32=33 34 | T__33=34 35 | T__34=35 36 | T__35=36 37 | T__36=37 38 | T__37=38 39 | T__38=39 40 | T__39=40 41 | T__40=41 42 | T__41=42 43 | T__42=43 44 | T__43=44 45 | T__44=45 46 | T__45=46 47 | T__46=47 48 | T__47=48 49 | T__48=49 50 | DUMMY_NODE_VECTOR=50 51 | C_IDENTIFIER=51 52 | FLOAT=52 53 | INT=53 54 | WS=54 55 | STRING=55 56 | SIGN=56 57 | 'BO_TX_BU_'=1 58 | ':'=2 59 | ';'=3 60 | 'SIG_VALTYPE_'=4 61 | 'BO_'=5 62 | 'SG_'=6 63 | '|'=7 64 | '@'=8 65 | '('=9 66 | ','=10 67 | ')'=11 68 | '['=12 69 | ']'=13 70 | 'VAL_TABLE_'=14 71 | 'BU_'=15 72 | 'BS_'=16 73 | 'NS_'=17 74 | 'NS_DESC_'=18 75 | 'CM_'=19 76 | 'BA_DEF_'=20 77 | 'BA_'=21 78 | 'VAL_'=22 79 | 'CAT_DEF_'=23 80 | 'CAT_'=24 81 | 'FILTER'=25 82 | 'BA_DEF_DEF_'=26 83 | 'EV_DATA_'=27 84 | 'ENVVAR_DATA_'=28 85 | 'SGTYPE_'=29 86 | 'SGTYPE_VAL_'=30 87 | 'BA_DEF_SGTYPE_'=31 88 | 'BA_SGTYPE_'=32 89 | 'SIG_TYPE_REF_'=33 90 | 'SIG_GROUP_'=34 91 | 'SIGTYPE_VALTYPE_'=35 92 | 'BA_DEF_REL_'=36 93 | 'BA_REL_'=37 94 | 'BA_DEF_DEF_REL_'=38 95 | 'BU_SG_REL_'=39 96 | 'BU_EV_REL_'=40 97 | 'BU_BO_REL_'=41 98 | 'SG_MUL_VAL_'=42 99 | 'VERSION'=43 100 | 'EV_'=44 101 | 'INT'=45 102 | 'HEX'=46 103 | 'FLOAT'=47 104 | 'STRING'=48 105 | 'ENUM'=49 106 | -------------------------------------------------------------------------------- /pydbc/py3/dbcLexer.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | T__22=23 24 | T__23=24 25 | T__24=25 26 | T__25=26 27 | T__26=27 28 | T__27=28 29 | T__28=29 30 | T__29=30 31 | T__30=31 32 | T__31=32 33 | T__32=33 34 | T__33=34 35 | T__34=35 36 | T__35=36 37 | T__36=37 38 | T__37=38 39 | T__38=39 40 | T__39=40 41 | T__40=41 42 | T__41=42 43 | T__42=43 44 | T__43=44 45 | T__44=45 46 | T__45=46 47 | T__46=47 48 | T__47=48 49 | T__48=49 50 | DUMMY_NODE_VECTOR=50 51 | C_IDENTIFIER=51 52 | FLOAT=52 53 | INT=53 54 | WS=54 55 | STRING=55 56 | SIGN=56 57 | 'BO_TX_BU_'=1 58 | ':'=2 59 | ';'=3 60 | 'SIG_VALTYPE_'=4 61 | 'BO_'=5 62 | 'SG_'=6 63 | '|'=7 64 | '@'=8 65 | '('=9 66 | ','=10 67 | ')'=11 68 | '['=12 69 | ']'=13 70 | 'VAL_TABLE_'=14 71 | 'BU_'=15 72 | 'BS_'=16 73 | 'NS_'=17 74 | 'NS_DESC_'=18 75 | 'CM_'=19 76 | 'BA_DEF_'=20 77 | 'BA_'=21 78 | 'VAL_'=22 79 | 'CAT_DEF_'=23 80 | 'CAT_'=24 81 | 'FILTER'=25 82 | 'BA_DEF_DEF_'=26 83 | 'EV_DATA_'=27 84 | 'ENVVAR_DATA_'=28 85 | 'SGTYPE_'=29 86 | 'SGTYPE_VAL_'=30 87 | 'BA_DEF_SGTYPE_'=31 88 | 'BA_SGTYPE_'=32 89 | 'SIG_TYPE_REF_'=33 90 | 'SIG_GROUP_'=34 91 | 'SIGTYPE_VALTYPE_'=35 92 | 'BA_DEF_REL_'=36 93 | 'BA_REL_'=37 94 | 'BA_DEF_DEF_REL_'=38 95 | 'BU_SG_REL_'=39 96 | 'BU_EV_REL_'=40 97 | 'BU_BO_REL_'=41 98 | 'SG_MUL_VAL_'=42 99 | 'VERSION'=43 100 | 'EV_'=44 101 | 'INT'=45 102 | 'HEX'=46 103 | 'FLOAT'=47 104 | 'STRING'=48 105 | 'ENUM'=49 106 | -------------------------------------------------------------------------------- /.github/workflows/pythonapp.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: pyvndb 5 | 6 | on: 7 | push: 8 | branches: [master, develop] 9 | pull_request: 10 | branches: [master, develop] 11 | 12 | jobs: 13 | 14 | build_sdist: 15 | name: Build source distribution 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v3 19 | 20 | - name: Build sdist 21 | run: | 22 | pip install -U build poetry 23 | poetry install 24 | python -m build --sdist 25 | 26 | - uses: actions/upload-artifact@v4 27 | with: 28 | path: dist/*.tar.gz 29 | include-hidden-files: true 30 | retention-days: 1 31 | - name: Test with pytest 32 | run: | 33 | pip install pytest poetry 34 | poetry install 35 | pytest 36 | 37 | upload_pypi: 38 | needs: [build_sdist] # , build_wheels 39 | runs-on: ubuntu-latest 40 | # if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/') 41 | # alternatively, to publish when a GitHub Release is created, use the following rule: 42 | # if: github.event_name == 'release' && github.event.action == 'published' 43 | steps: 44 | - uses: actions/download-artifact@v4.1.7 45 | with: 46 | # name: artifact 47 | path: dist 48 | merge-multiple: true 49 | # pattern: dist/* 50 | 51 | - uses: pypa/gh-action-pypi-publish@v1.13.0 52 | with: 53 | user: __token__ 54 | password: ${{ secrets.PYPI_PASSWORD }} 55 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "pydbc" 7 | version = "0.5.1" 8 | description = "Vehicle description file handling for Python" 9 | authors = ["Christoph Schueler "] 10 | readme = "README.md" 11 | license = "GPL-2.0-only" 12 | repository = "https://www.github.com/Christoph2/pydbc" 13 | classifiers = [ 14 | "Development Status :: 4 - Beta", 15 | "Intended Audience :: Developers", 16 | "Topic :: Software Development", 17 | "Topic :: Scientific/Engineering", 18 | "License :: OSI Approved :: GNU General Public License v2 (GPLv2)", 19 | "Programming Language :: Python :: 3.10", 20 | "Programming Language :: Python :: 3.11", 21 | "Programming Language :: Python :: 3.12", 22 | ] 23 | packages = [{include = "pydbc"}] 24 | include = ["pydbc/cgen/templates/*.tmpl"] 25 | 26 | [tool.poetry.dependencies] 27 | python = ">=3.10" 28 | antlr4-python3-runtime = "4.13.2" 29 | chardet = ">=5.0.0" 30 | mako = ">=1.2.0" 31 | colorama = ">=0.4.6" 32 | SQLAlchemy = ">=2.0.0" 33 | typing-extensions = ">=4.0.0" 34 | python-can = { version = "*", optional = true } 35 | 36 | [tool.poetry.group.dev.dependencies] 37 | pytest = ">=7.0.0" 38 | pytest-runner = ">=6.0.0" 39 | black = "^25.1.0" 40 | 41 | [tool.poetry.group.docs.dependencies] 42 | sphinx = "*" 43 | myst-parser = "*" 44 | numpydoc = "*" 45 | bumpversion = "*" 46 | 47 | [tool.poetry.scripts] 48 | vndb_importer = "pydbc.scripts.vndb_importer:main" 49 | vndb_exporter = "pydbc.scripts.vndb_exporter:main" 50 | 51 | [tool.poetry.extras] 52 | python-can = ["python-can"] 53 | 54 | [tool.pytest.ini_options] 55 | addopts = "--verbose --tb=short -o junit_family=xunit2" 56 | testpaths = ["pydbc/tests"] 57 | -------------------------------------------------------------------------------- /pydbc/tests/test_types.py: -------------------------------------------------------------------------------- 1 | 2 | from pydbc import types 3 | 4 | def test_j1939_address(): 5 | j0 = types.J1939Address.from_int(217056510) 6 | assert j0.priority == 3 7 | assert j0.reserved == 0 8 | assert j0.datapage == 0 9 | assert j0.pdu_format == 240 10 | assert j0.pdu_specific == 4 11 | assert j0.source_address == 254 12 | assert j0.pgn == 61444 13 | assert j0.canID == 217056510 14 | 15 | def test_j1939_set_pgn(): 16 | j0 = types.J1939Address.from_int(217056510) 17 | j0.pgn = 64850 18 | assert j0.canID == 217928446 19 | assert j0.pgn == 64850 20 | 21 | def test_j1939_str(capsys): 22 | j0 = types.J1939Address.from_int(419283454) 23 | print(str(j0)) 24 | captured = capsys.readouterr() 25 | assert captured.out == "J1939Address(priority = 6, reserved = 0, datapage = 0, pdu_format = 253, pdu_specific = 193, source_address = 254)\n" 26 | 27 | def test_j1939_repr(capsys): 28 | j0 = types.J1939Address.from_int(419283454) 29 | print(repr(j0)) 30 | captured = capsys.readouterr() 31 | assert captured.out == "J1939Address(priority = 6, reserved = 0, datapage = 0, pdu_format = 253, pdu_specific = 193, source_address = 254)\n" 32 | 33 | def test_lin_product_id(): 34 | lp = types.LinProductIdType(0x3e, 2, 4) 35 | assert lp.supplier_id == 0x3e 36 | assert lp.function_id == 2 37 | assert lp.variant == 4 38 | 39 | def test_lin_product_id_str(capsys): 40 | lp = types.LinProductIdType(0x3e, 2, 4) 41 | print(str(lp)) 42 | captured = capsys.readouterr() 43 | assert captured.out == "LinProductIdType(supplier_id = 62, function_id = 2, variant = 4)\n" 44 | 45 | def test_lin_product_id_repr(capsys): 46 | lp = types.LinProductIdType(0x3e, 2, 4) 47 | print(repr(lp)) 48 | captured = capsys.readouterr() 49 | assert captured.out == "LinProductIdType(supplier_id = 62, function_id = 2, variant = 4)\n" 50 | 51 | -------------------------------------------------------------------------------- /pydbc/py3/ncf.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | T__22=23 24 | T__23=24 25 | T__24=25 26 | T__25=26 27 | T__26=27 28 | T__27=28 29 | T__28=29 30 | T__29=30 31 | T__30=31 32 | T__31=32 33 | T__32=33 34 | T__33=34 35 | T__34=35 36 | T__35=36 37 | T__36=37 38 | T__37=38 39 | T__38=39 40 | T__39=40 41 | T__40=41 42 | T__41=42 43 | T__42=43 44 | T__43=44 45 | T__44=45 46 | T__45=46 47 | T__46=47 48 | T__47=48 49 | T__48=49 50 | T__49=50 51 | T__50=51 52 | T__51=52 53 | T__52=53 54 | T__53=54 55 | T__54=55 56 | T__55=56 57 | C_IDENTIFIER=57 58 | FLOAT=58 59 | INT=59 60 | HEX=60 61 | WS=61 62 | COMMENT=62 63 | STRING=63 64 | SIGN=64 65 | 'node_capability_file'=1 66 | ';'=2 67 | 'LIN_language_version'=3 68 | '='=4 69 | 'node'=5 70 | '{'=6 71 | '}'=7 72 | 'general'=8 73 | 'LIN_protocol_version'=9 74 | 'supplier'=10 75 | 'function'=11 76 | 'variant'=12 77 | 'bitrate'=13 78 | 'sends_wake_up_signal'=14 79 | 'yes'=15 80 | 'no'=16 81 | 'volt_range'=17 82 | ','=18 83 | 'temp_range'=19 84 | 'conformance'=20 85 | 'automatic'=21 86 | 'min'=22 87 | 'max'=23 88 | 'select'=24 89 | 'kbps'=25 90 | 'diagnostic'=26 91 | 'NAD'=27 92 | 'to'=28 93 | 'diagnostic_class'=29 94 | 'P2_min'=30 95 | 'ms'=31 96 | 'ST_min'=32 97 | 'N_As_timeout'=33 98 | 'N_Cr_timeout'=34 99 | 'support_sid'=35 100 | 'max_message_length'=36 101 | 'frames'=37 102 | 'publish'=38 103 | 'subscribe'=39 104 | 'length'=40 105 | 'min_period'=41 106 | 'max_period'=42 107 | 'event_triggered_frame'=43 108 | 'signals'=44 109 | 'size'=45 110 | 'offset'=46 111 | 'init_value'=47 112 | 'encoding'=48 113 | 'logical_value'=49 114 | 'physical_value'=50 115 | 'bcd_value'=51 116 | 'ascii_value'=52 117 | 'status_management'=53 118 | 'response_error'=54 119 | 'fault_state_signals'=55 120 | 'free_text'=56 121 | -------------------------------------------------------------------------------- /pydbc/py3/ncfLexer.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | T__22=23 24 | T__23=24 25 | T__24=25 26 | T__25=26 27 | T__26=27 28 | T__27=28 29 | T__28=29 30 | T__29=30 31 | T__30=31 32 | T__31=32 33 | T__32=33 34 | T__33=34 35 | T__34=35 36 | T__35=36 37 | T__36=37 38 | T__37=38 39 | T__38=39 40 | T__39=40 41 | T__40=41 42 | T__41=42 43 | T__42=43 44 | T__43=44 45 | T__44=45 46 | T__45=46 47 | T__46=47 48 | T__47=48 49 | T__48=49 50 | T__49=50 51 | T__50=51 52 | T__51=52 53 | T__52=53 54 | T__53=54 55 | T__54=55 56 | T__55=56 57 | C_IDENTIFIER=57 58 | FLOAT=58 59 | INT=59 60 | HEX=60 61 | WS=61 62 | COMMENT=62 63 | STRING=63 64 | SIGN=64 65 | 'node_capability_file'=1 66 | ';'=2 67 | 'LIN_language_version'=3 68 | '='=4 69 | 'node'=5 70 | '{'=6 71 | '}'=7 72 | 'general'=8 73 | 'LIN_protocol_version'=9 74 | 'supplier'=10 75 | 'function'=11 76 | 'variant'=12 77 | 'bitrate'=13 78 | 'sends_wake_up_signal'=14 79 | 'yes'=15 80 | 'no'=16 81 | 'volt_range'=17 82 | ','=18 83 | 'temp_range'=19 84 | 'conformance'=20 85 | 'automatic'=21 86 | 'min'=22 87 | 'max'=23 88 | 'select'=24 89 | 'kbps'=25 90 | 'diagnostic'=26 91 | 'NAD'=27 92 | 'to'=28 93 | 'diagnostic_class'=29 94 | 'P2_min'=30 95 | 'ms'=31 96 | 'ST_min'=32 97 | 'N_As_timeout'=33 98 | 'N_Cr_timeout'=34 99 | 'support_sid'=35 100 | 'max_message_length'=36 101 | 'frames'=37 102 | 'publish'=38 103 | 'subscribe'=39 104 | 'length'=40 105 | 'min_period'=41 106 | 'max_period'=42 107 | 'event_triggered_frame'=43 108 | 'signals'=44 109 | 'size'=45 110 | 'offset'=46 111 | 'init_value'=47 112 | 'encoding'=48 113 | 'logical_value'=49 114 | 'physical_value'=50 115 | 'bcd_value'=51 116 | 'ascii_value'=52 117 | 'status_management'=53 118 | 'response_error'=54 119 | 'fault_state_signals'=55 120 | 'free_text'=56 121 | -------------------------------------------------------------------------------- /pydbc/py3/ldf.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | T__22=23 24 | T__23=24 25 | T__24=25 26 | T__25=26 27 | T__26=27 28 | T__27=28 29 | T__28=29 30 | T__29=30 31 | T__30=31 32 | T__31=32 33 | T__32=33 34 | T__33=34 35 | T__34=35 36 | T__35=36 37 | T__36=37 38 | T__37=38 39 | T__38=39 40 | T__39=40 41 | T__40=41 42 | T__41=42 43 | T__42=43 44 | T__43=44 45 | T__44=45 46 | T__45=46 47 | T__46=47 48 | T__47=48 49 | T__48=49 50 | T__49=50 51 | T__50=51 52 | T__51=52 53 | T__52=53 54 | T__53=54 55 | T__54=55 56 | T__55=56 57 | T__56=57 58 | T__57=58 59 | C_IDENTIFIER=59 60 | FLOAT=60 61 | INT=61 62 | HEX=62 63 | WS=63 64 | COMMENT=64 65 | STRING=65 66 | SIGN=66 67 | 'LIN_description_file'=1 68 | ';'=2 69 | 'LIN_protocol_version'=3 70 | '='=4 71 | 'LIN_language_version'=5 72 | 'LDF_file_revision'=6 73 | 'LIN_speed'=7 74 | 'kbps'=8 75 | 'Channel_name'=9 76 | 'Nodes'=10 77 | '{'=11 78 | 'Master'=12 79 | ':'=13 80 | ','=14 81 | 'ms'=15 82 | 'bits'=16 83 | '%'=17 84 | 'Slaves'=18 85 | '}'=19 86 | 'Node_attributes'=20 87 | 'LIN_protocol'=21 88 | 'configured_NAD'=22 89 | 'initial_NAD'=23 90 | 'product_id'=24 91 | 'response_error'=25 92 | 'fault_state_signals'=26 93 | 'P2_min'=27 94 | 'ST_min'=28 95 | 'N_As_timeout'=29 96 | 'N_Cr_timeout'=30 97 | 'response_tolerance'=31 98 | 'configurable_frames'=32 99 | 'composite'=33 100 | 'configuration'=34 101 | 'Signals'=35 102 | 'Diagnostic_signals'=36 103 | 'Signal_groups'=37 104 | 'Frames'=38 105 | 'Sporadic_frames'=39 106 | 'Event_triggered_frames'=40 107 | 'Diagnostic_frames'=41 108 | 'MasterReq'=42 109 | 'SlaveResp'=43 110 | 'Schedule_tables'=44 111 | 'delay'=45 112 | 'AssignNAD'=46 113 | 'ConditionalChangeNAD'=47 114 | 'DataDump'=48 115 | 'SaveConfiguration'=49 116 | 'AssignFrameIdRange'=50 117 | 'FreeFormat'=51 118 | 'AssignFrameId'=52 119 | 'Signal_encoding_types'=53 120 | 'logical_value'=54 121 | 'physical_value'=55 122 | 'bcd_value'=56 123 | 'ascii_value'=57 124 | 'Signal_representation'=58 125 | -------------------------------------------------------------------------------- /pydbc/py3/ldfLexer.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | T__22=23 24 | T__23=24 25 | T__24=25 26 | T__25=26 27 | T__26=27 28 | T__27=28 29 | T__28=29 30 | T__29=30 31 | T__30=31 32 | T__31=32 33 | T__32=33 34 | T__33=34 35 | T__34=35 36 | T__35=36 37 | T__36=37 38 | T__37=38 39 | T__38=39 40 | T__39=40 41 | T__40=41 42 | T__41=42 43 | T__42=43 44 | T__43=44 45 | T__44=45 46 | T__45=46 47 | T__46=47 48 | T__47=48 49 | T__48=49 50 | T__49=50 51 | T__50=51 52 | T__51=52 53 | T__52=53 54 | T__53=54 55 | T__54=55 56 | T__55=56 57 | T__56=57 58 | T__57=58 59 | C_IDENTIFIER=59 60 | FLOAT=60 61 | INT=61 62 | HEX=62 63 | WS=63 64 | COMMENT=64 65 | STRING=65 66 | SIGN=66 67 | 'LIN_description_file'=1 68 | ';'=2 69 | 'LIN_protocol_version'=3 70 | '='=4 71 | 'LIN_language_version'=5 72 | 'LDF_file_revision'=6 73 | 'LIN_speed'=7 74 | 'kbps'=8 75 | 'Channel_name'=9 76 | 'Nodes'=10 77 | '{'=11 78 | 'Master'=12 79 | ':'=13 80 | ','=14 81 | 'ms'=15 82 | 'bits'=16 83 | '%'=17 84 | 'Slaves'=18 85 | '}'=19 86 | 'Node_attributes'=20 87 | 'LIN_protocol'=21 88 | 'configured_NAD'=22 89 | 'initial_NAD'=23 90 | 'product_id'=24 91 | 'response_error'=25 92 | 'fault_state_signals'=26 93 | 'P2_min'=27 94 | 'ST_min'=28 95 | 'N_As_timeout'=29 96 | 'N_Cr_timeout'=30 97 | 'response_tolerance'=31 98 | 'configurable_frames'=32 99 | 'composite'=33 100 | 'configuration'=34 101 | 'Signals'=35 102 | 'Diagnostic_signals'=36 103 | 'Signal_groups'=37 104 | 'Frames'=38 105 | 'Sporadic_frames'=39 106 | 'Event_triggered_frames'=40 107 | 'Diagnostic_frames'=41 108 | 'MasterReq'=42 109 | 'SlaveResp'=43 110 | 'Schedule_tables'=44 111 | 'delay'=45 112 | 'AssignNAD'=46 113 | 'ConditionalChangeNAD'=47 114 | 'DataDump'=48 115 | 'SaveConfiguration'=49 116 | 'AssignFrameIdRange'=50 117 | 'FreeFormat'=51 118 | 'AssignFrameId'=52 119 | 'Signal_encoding_types'=53 120 | 'logical_value'=54 121 | 'physical_value'=55 122 | 'bcd_value'=56 123 | 'ascii_value'=57 124 | 'Signal_representation'=58 125 | -------------------------------------------------------------------------------- /docs/getting-started.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | This guide helps you install pyDBC and create your first network database using its high‑level creational APIs. 4 | 5 | ## Installation 6 | 7 | pyDBC uses Poetry for development and dependency management. You can also install it into an existing environment. 8 | 9 | - Python: 3.10+ 10 | - SQLAlchemy: 2.0+ 11 | 12 | Using Poetry in a clone of the repository: 13 | 14 | ```powershell 15 | # from the project root 16 | poetry install 17 | 18 | # run tests (if any) 19 | poetry run pytest 20 | ``` 21 | 22 | Using pip directly (when published): 23 | 24 | ```powershell 25 | pip install pydbc 26 | ``` 27 | 28 | ## Quickstart: Create a CAN DBC in memory 29 | 30 | ```python 31 | from pydbc.api.dbc import DBCCreator 32 | 33 | # create an in‑memory DB 34 | dbc = DBCCreator(":memory:") 35 | 36 | # nodes (ECUs) 37 | engine = dbc.create_node("Engine") 38 | gateway = dbc.create_node("Gateway") 39 | 40 | # message 41 | engine_data = dbc.create_message("EngineData", message_id=100, dlc=8, sender=engine) 42 | 43 | # signals 44 | petrol_level = dbc.create_signal( 45 | "PetrolLevel", bitsize=8, 46 | byteorder=1, sign=1, 47 | formula_factor=1.0, formula_offset=0.0, 48 | minimum=0, maximum=255, unit="l", 49 | ) 50 | 51 | # assign signal to message at bit offset 24 52 | dbc.add_signal_to_message(engine_data, petrol_level, offset=24) 53 | 54 | # commit to persist 55 | dbc.commit() 56 | ``` 57 | 58 | ## Quickstart: Create a LIN network (LDF) 59 | 60 | ```python 61 | from pydbc.api.ldf import LDFCreator 62 | 63 | ldf = LDFCreator(":memory:") 64 | network = ldf.create_network("LINNetwork1", protocol_version="2.1", speed=19.2) 65 | master = ldf.create_master_node("MasterECU", timebase=0.005, jitter=0.0001) 66 | signal = ldf.create_signal("MotorSpeed", signal_size=16, init_value=0, publisher=master) 67 | frame = ldf.create_unconditional_frame("MasterFrame", frame_id=0x10, size=2, publisher=master) 68 | ldf.add_signal_to_frame(frame, signal, signal_offset=0) 69 | ldf.commit() 70 | ``` 71 | 72 | ## Quickstart: Create a vehicle configuration (NCF) 73 | 74 | ```python 75 | from pydbc.api.ncf import NCFCreator 76 | 77 | ncf = NCFCreator(":memory:") 78 | vehicle = ncf.create_vehicle("TestVehicle") 79 | can_network = ncf.create_network("CANNetwork", protocol="CAN", speed=500) 80 | engine_ecu = ncf.create_ecu("EngineECU") 81 | 82 | ncf.add_network_to_vehicle(vehicle, can_network) 83 | ncf.add_ecu_to_vehicle(vehicle, engine_ecu) 84 | 85 | ncf.commit() 86 | ``` 87 | 88 | ## Where to go next 89 | 90 | - Tutorial: docs/tutorial.md — Build a small but complete model step‑by‑step. 91 | - How‑to Guides: docs/how-to.md — Task‑oriented recipes (import/export, parsing, querying). 92 | - API Reference: docs/api-reference.md — Classes and methods. 93 | -------------------------------------------------------------------------------- /pydbc/template.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | (C) 2010-2018 by Christoph Schueler 7 | 8 | All Rights Reserved 9 | 10 | This program is free software; you can redistribute it and/or modify 11 | it under the terms of the GNU General Public License as published by 12 | the Free Software Foundation; either version 2 of the License, or 13 | (at your option) any later version. 14 | 15 | This program is distributed in the hope that it will be useful, 16 | but WITHOUT ANY WARRANTY; without even the implied warranty of 17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 18 | GNU General Public License for more details. 19 | 20 | You should have received a copy of the GNU General Public License along 21 | with this program; if not, write to the Free Software Foundation, Inc., 22 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23 | 24 | s. FLOSS-EXCEPTION.txt 25 | """ 26 | __author__ = 'Christoph Schueler' 27 | __version__ = '0.9' 28 | 29 | ## 30 | ## Convenience functions for Mako Templates. 31 | ## 32 | 33 | from io import StringIO 34 | import re 35 | 36 | from mako.template import Template 37 | from mako.runtime import Context 38 | from mako import exceptions 39 | 40 | 41 | indentText = lambda text, leftmargin = 0: '\n'.join(["%s%s" % ((" " * leftmargin), line, ) for line in text.splitlines()]) 42 | 43 | def renderTemplate(filename, namespace = {}, leftMargin = 0, rightMargin = 80, formatExceptions = False, encoding = 'utf-8'): 44 | # TODO: filename und text parameter ==> Nur noch EINE funktion!!! 45 | buf = StringIO() 46 | ctx = Context(buf, **namespace) 47 | try: 48 | tobj = Template(filename = filename, output_encoding = encoding, format_exceptions = formatExceptions) 49 | tobj.render_context(ctx) 50 | except: 51 | print(exceptions.text_error_template().render()) 52 | return None 53 | ##return strings.reformat(buf.getvalue(), leftMargin, rightMargin) 54 | return buf.getvalue() 55 | 56 | def renderTemplateFromText(tmpl, namespace = {}, leftMargin = 0, rightMargin = 80, formatExceptions = True, encoding = 'utf-8'): 57 | buf = StringIO() 58 | ctx = Context(buf, **namespace) 59 | try: 60 | tobj = Template(text = tmpl, output_encoding = encoding, format_exceptions = formatExceptions) 61 | tobj.render_context(ctx) 62 | except: 63 | print(exceptions.text_error_template().render()) 64 | return None 65 | return indentText(buf.getvalue(), leftMargin) #, rightMargin) 66 | 67 | 68 | def callDef(template, definition, *args, **kwargs): 69 | return template.get_def(definition).render(*args, **kwargs) 70 | 71 | -------------------------------------------------------------------------------- /pydbc/logger.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2020 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.1.0' 29 | 30 | import logging 31 | import os 32 | 33 | #logging.basicConfig() 34 | 35 | class Logger(object): 36 | 37 | LOGGER_BASE_NAME = 'pydbc' 38 | FORMAT = "[%(levelname)s (%(name)s)]: %(message)s" 39 | 40 | def __init__(self, name, level = logging.WARN): 41 | self.logger = logging.getLogger("{0}.{1}".format(self.LOGGER_BASE_NAME, name)) 42 | self.setLevel(level) 43 | self.handler = logging.StreamHandler() 44 | #self.handler.setLevel(level) 45 | self.formatter = logging.Formatter(self.FORMAT) 46 | self.handler.setFormatter(self.formatter) 47 | self.logger.addHandler(self.handler) 48 | self.lastMessage = None 49 | self.lastSeverity = None 50 | 51 | def getLastError(self): 52 | result = (self.lastSeverity, self.lastMessage) 53 | self.lastSeverity = self.lastMessage = None 54 | return result 55 | 56 | def log(self, message, level): 57 | self.lastSeverity = level 58 | self.lastMessage = message 59 | self.logger.log(level, message) 60 | 61 | def info(self, message): 62 | self.log(message, logging.INFO) 63 | 64 | def warn(self, message): 65 | self.log(message, logging.WARN) 66 | 67 | def debug(self, message): 68 | self.log(message, logging.DEBUG) 69 | 70 | def error(self, message): 71 | self.log(message, logging.ERROR) 72 | 73 | def critical(self, message): 74 | self.log(message, logging.CRITICAL) 75 | 76 | def verbose(self): 77 | self.logger.setLevel(logging.DEBUG) 78 | 79 | def silent(self): 80 | self.logger.setLevel(logging.CRITICAL) 81 | 82 | def setLevel(self, level): 83 | LEVEL_MAP = { 84 | "INFO": logging.INFO, 85 | "WARN": logging.WARN, 86 | "DEBUG": logging.DEBUG, 87 | "ERROR": logging.ERROR, 88 | "CRITICAL": logging.CRITICAL, 89 | } 90 | if isinstance(level, str): 91 | level = LEVEL_MAP.get(level.upper(), logging.WARN) 92 | self.logger.setLevel(level) 93 | 94 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | version: 1.0.{build} 2 | 3 | environment: 4 | 5 | matrix: 6 | 7 | # For Python versions available on Appveyor, see 8 | # http://www.appveyor.com/docs/installed-software#python 9 | # The list here is complete (excluding Python 2.6, which 10 | # isn't covered by this document) at the time of writing. 11 | 12 | - PYTHON: "C:\\Python35" 13 | #- PYTHON: "C:\\Python34" 14 | - PYTHON: "C:\\Python36" 15 | #- PYTHON: "C:\\Python34-x64" 16 | - PYTHON: "C:\\Python35-x64" 17 | - PYTHON: "C:\\Python36-x64" 18 | 19 | install: 20 | # We need wheel installed to build wheels 21 | - cmd: set PATH=%PATH%;%PYTHON%\Scripts 22 | - "%PYTHON%\\python.exe -m pip install --upgrade pip" 23 | - "%PYTHON%\\python.exe -m pip install wheel" 24 | - "%PYTHON%\\python.exe -m pip install -r requirements_testings.txt" 25 | - "%PYTHON%\\python.exe -m pip install pytest-cov coverage coveralls codacy-coverage" 26 | - cmd: curl -O https://www.antlr.org/download/antlr-4.7.2-complete.jar 27 | - cmd: set CLASSPATH = "%APPVEYOR_BUILD_FOLDER%\antlr-4.7.2-complete.jar" 28 | - cmd: set CLASSPATH 29 | - cmd: dir 30 | - cmd: cd pydbc 31 | - cmd: java -jar ..\antlr-4.7.2-complete.jar -Dlanguage=Python3 -long-messages -visitor dbc.g4 -o ./py3/ 32 | - cmd: java -jar ..\antlr-4.7.2-complete.jar -Dlanguage=Python3 -long-messages -visitor ncf.g4 -o ./py3/ 33 | - cmd: java -jar ..\antlr-4.7.2-complete.jar -Dlanguage=Python3 -long-messages -visitor ldf.g4 -o ./py3/ 34 | - cmd: cd .. 35 | 36 | 37 | build: off 38 | # mode: Script 39 | # verbosity: normal 40 | 41 | test_script: 42 | # Put your test command here. 43 | # If you don't need to build C extensions on 64-bit Python 3.3 or 3.4, 44 | # you can remove "build.cmd" from the front of the command, as it's 45 | # only needed to support those cases. 46 | # Note that you must use the environment variable %PYTHON% to refer to 47 | # the interpreter you're using - Appveyor does not do anything special 48 | # to put the Python evrsion you want to use on PATH. 49 | - "%PYTHON%\\python.exe setup.py test" 50 | #- "%PYTHON%\\scripts\\coveralls" 51 | #- "%PYTHON%\\scripts\\coverage xml" 52 | #- "%PYTHON%\\scripts\\python-codacy-coverage -r coverage.xml" 53 | 54 | 55 | after_test: 56 | # This step builds your wheels. 57 | # Again, you only need build.cmd if you're building C extensions for 58 | # 64-bit Python 3.3/3.4. And you need to use %PYTHON% to get the correct 59 | # interpreter 60 | - "%PYTHON%\\python.exe setup.py bdist_wheel" 61 | - "%PYTHON%\\python.exe setup.py sdist --formats=zip,gztar" 62 | 63 | artifacts: 64 | # bdist_wheel puts your built wheel in the dist directory 65 | - path: dist\* 66 | 67 | #on_success: 68 | # You can use this step to upload your artifacts to a public website. 69 | # See Appveyor's documentation for more details. Or you can simply 70 | # access your wheels from the Appveyor "artifacts" tab for your build. 71 | 72 | #deploy: 73 | #- provider: GitHub 74 | # artifact: '*.*' 75 | # description: Test release -- do not use 76 | # auth_token: 77 | # secure: Wiweaot1JAHzV9mMZ3Zhn/5XUuLe5XYAyfpRZmWNpLW7fMW1FhAdoRliiAhz3Sv6 78 | # draft: false 79 | # prerelease: true 80 | 81 | -------------------------------------------------------------------------------- /docs/code-generation.md: -------------------------------------------------------------------------------- 1 | # Code Generation (Mako) 2 | 3 | This guide shows how to generate standalone applications from your pyDBC session using Mako templates. 4 | The generated code embeds your CAN database (messages and signals); no runtime database is required. 5 | 6 | Currently supported generators: 7 | - MicroPython CAN application (Python script) 8 | - Linux SocketCAN C program 9 | 10 | ## Prerequisites 11 | - A pyDBC SQLAlchemy session containing your messages/signals (via DBCCreator or ParserWrapper). 12 | 13 | ## Usage 14 | 15 | ```python 16 | from pathlib import Path 17 | from pydbc.api.dbc import DBCCreator 18 | from pydbc.cgen.generators import MicroPythonCanAppGenerator, SocketCanCGenerator 19 | 20 | # Build a tiny in-memory session (or parse an existing DBC to get a session) 21 | dbc = DBCCreator(":memory:") 22 | node = dbc.create_node("Engine") 23 | dbc.create_message("EngineData", message_id=0x100, dlc=8, sender=node) 24 | dbc.commit() 25 | session = dbc.session 26 | 27 | mpy = MicroPythonCanAppGenerator(session) 28 | py_code = mpy.render(app_name="mpy_app", only=["EngineData"]) # optional filter by message names 29 | Path("micropython_app.py").write_text(py_code, encoding="utf-8") 30 | 31 | sc = SocketCanCGenerator(session) 32 | c_code = sc.render(program_name="socketcan_app", only=["EngineData"]) # optional filter 33 | Path("socketcan_app.c").write_text(c_code, encoding="utf-8") 34 | ``` 35 | 36 | See a ready-to-run example at: 37 | - pydbc/examples/generate_code_examples.py 38 | 39 | ## MicroPython CAN app 40 | The generated micropython_app.py includes: 41 | - Little-endian (Intel) bit packing/unpacking helpers 42 | - Embedded message/signal metadata 43 | - encode_message_by_name(name, values) -> (id, data, dlc) 44 | - decode_message_by_id(id, data) -> {"message": name, "signals": {...}} 45 | - A simple demo_send_once() that attempts to send one frame using machine.CAN or pyb.CAN 46 | 47 | Note: You may need to adapt CAN initialization parameters (prescaler, timing) to your board. 48 | 49 | ## Linux SocketCAN C program 50 | The generated socketcan_app.c includes: 51 | - Little-endian bit packing/unpacking helpers 52 | - Embedded message/signal metadata tables 53 | - encode_message/decode_message functions using parallel double arrays 54 | - A minimal main() that opens the given CAN interface, sends the first embedded message (zeroed values), and waits for a frame to decode 55 | 56 | Build example: 57 | ```bash 58 | gcc -O2 -Wall -o socketcan_app socketcan_app.c 59 | sudo ./socketcan_app vcan0 60 | ``` 61 | 62 | To create a virtual CAN interface (Linux): 63 | ```bash 64 | sudo modprobe vcan 65 | sudo ip link add dev vcan0 type vcan 66 | sudo ip link set up vcan0 67 | ``` 68 | 69 | ## Limitations 70 | - Signals assumed little-endian (Intel). Big-endian (Motorola) signals are not yet generated. 71 | - Multiplexed signals are not yet handled specially. 72 | - DLC up to 8 supported in the simple templates (CAN FD can be added later). 73 | 74 | ## Extending 75 | The templates live under pydbc/cgen/templates. You can copy and customize them or contribute improvements: 76 | - pydbc/cgen/templates/micropython_can.py.tmpl 77 | - pydbc/cgen/templates/socketcan.c.tmpl 78 | 79 | Rendering is done via pydbc.template.renderTemplateFromText, and templates are packaged via pkgutil.get_data. 80 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | * Using welcoming and inclusive language 12 | * Being respectful of differing viewpoints and experiences 13 | * Gracefully accepting constructive criticism 14 | * Focusing on what is best for the community 15 | * Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | * Trolling, insulting/derogatory comments, and personal or political attacks 21 | * Public or private harassment 22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | * Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at cpu12.gems@googlemail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ 47 | -------------------------------------------------------------------------------- /pydbc/examples/create_candb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2020 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | 28 | import pydbc 29 | from pydbc.types import BusType 30 | import pydbc.db.model as model 31 | from pydbc.db import VNDB 32 | 33 | from sqlalchemy import event 34 | 35 | @event.listens_for(model.Message.message_signals, 'append', retval = True) 36 | def my_append_listener(target, value, initiator): 37 | print("target: {} \n\tvalue: {}\n".format(target, value, initiator)) 38 | return value 39 | 40 | #event.listen(model.Message.message_signals, 'append', my_append_listener) 41 | 42 | DEBUG = False 43 | 44 | 45 | def main(): 46 | #cdb = VNDB.create("create_candb", debug = DEBUG) 47 | cdb = VNDB.create(":memory:", debug = DEBUG) 48 | #cdb = VNDB.open("create_candb", debug = DEBUG) 49 | session = cdb.session 50 | 51 | engine = model.Node(name = "Engine") 52 | gateway = model.Node(name = "Gateway") 53 | session.add_all([engine, gateway]) 54 | 55 | 56 | engine_data = model.Message(name = "EngineData", message_id = 100, dlc = 8, sender = engine.rid) 57 | session.add(engine_data) 58 | 59 | petrolLevel = model.Signal(name = "PetrolLevel", bitsize = 8, byteorder = 1, 60 | sign = +1, formula_factor = 1.0, formula_offset = 0.0, minimum = 0, maximum = 255, unit = "l" 61 | ) 62 | engPower = model.Signal(name = "EngPower", bitsize = 16, byteorder = 1, 63 | sign = +1, formula_factor = 0.01, formula_offset = 0.0, minimum = 0, maximum = 150, unit = "kw" 64 | ) 65 | engForce = model.Signal(name = "EngForce", bitsize = 16, byteorder = 1, 66 | sign = +1, formula_factor = 1.0, formula_offset = 0.0, minimum = 0, maximum = 0, unit = "N" 67 | ) 68 | session.add_all([petrolLevel, engPower, engForce]) 69 | 70 | ms0 = model.Message_Signal(message = engine_data, signal = petrolLevel, offset = 24) 71 | ms1 = model.Message_Signal(message = engine_data, signal = engPower, offset = 48) 72 | ms2 = model.Message_Signal(message = engine_data, signal = engForce, offset = 32) 73 | 74 | #petrolLevel.receiver.append(gateway) 75 | session.add_all([ms0, ms1, ms2]) 76 | 77 | v0 = model.Value_Description(value = 0, value_description = "Running") 78 | v1 = model.Value_Description(value = 1, value_description = "Idle") 79 | vtgIdleRunning = model.Valuetable(name = "vtgIdleRunning", values = [v0, v1]) 80 | session.add_all([v0, v1, vtgIdleRunning]) 81 | 82 | msgs = session.query(model.Message).all() 83 | for msg in msgs: 84 | print(msg.signals) 85 | 86 | session.flush() 87 | session.commit() 88 | 89 | if __name__ == '__main__': 90 | main() 91 | 92 | """ 93 | 94 | 95 | """ 96 | -------------------------------------------------------------------------------- /pydbc/asam/types.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__=""" 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2009-2018 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | """ 25 | 26 | import struct 27 | 28 | INTEL = "<" 29 | MOTOROLA = ">" 30 | 31 | """ 32 | A_VOID: pseudo type for non-existing elements 33 | A_BIT: one bit 34 | A_ASCIISTRING: string, ISO-8859-1 encoded 35 | A_UTF8STRING: string, UTF-8 encoded 36 | A_UNICODE2STRING: string, UCS-2 encoded 37 | A_BYTEFIELD: Field of bytes 38 | """ 39 | 40 | class AsamBaseType(object): 41 | """Base class for ASAM codecs. 42 | 43 | Note 44 | ---- 45 | Always use derived classes. 46 | """ 47 | 48 | def __init__(self, byteorder): 49 | """ 50 | 51 | Parameters 52 | ---------- 53 | byteorder: char {'<', '>'} 54 | - '<' Little-endian 55 | - '>' Big-endian 56 | """ 57 | if not byteorder in ("<", ">"): 58 | raise ValueError("Invalid byteorder.") 59 | self.byteorder = byteorder 60 | 61 | def encode(self, value): 62 | """Encode a value. 63 | 64 | Encode means convert a value, eg. an integer, to a byte-string. 65 | 66 | Parameters 67 | ---------- 68 | value: data-type 69 | data-type is determined by derived class. 70 | 71 | Returns 72 | ------- 73 | bytes 74 | Encoded value. 75 | """ 76 | return struct.pack("{}{}".format(self.byteorder, self.FMT), value) 77 | 78 | def decode(self, value): 79 | """Decode a value. 80 | 81 | Decode means convert a byte-string to a meaningful data-type, eg. an integer. 82 | 83 | Parameters 84 | ---------- 85 | value: bytes 86 | 87 | Returns 88 | ------- 89 | data-type 90 | data-type is determined by derived class. 91 | """ 92 | return struct.unpack("{}{}".format(self.byteorder, self.FMT), bytes(value))[0] 93 | 94 | 95 | class A_Uint8(AsamBaseType): 96 | """ASAM A_UINT8 codec. 97 | """ 98 | FMT = "B" 99 | 100 | 101 | class A_Uint16(AsamBaseType): 102 | """ASAM A_UINT16 codec. 103 | """ 104 | FMT = "H" 105 | 106 | 107 | class A_Uint32(AsamBaseType): 108 | """ASAM A_UINT32 codec. 109 | """ 110 | FMT = "I" 111 | 112 | 113 | class A_Uint64(AsamBaseType): 114 | """ASAM A_UINT64 codec. 115 | """ 116 | FMT = "Q" 117 | 118 | 119 | class A_Int8(AsamBaseType): 120 | """ASAM A_INT8 codec. 121 | """ 122 | FMT = "b" 123 | 124 | 125 | class A_Int16(AsamBaseType): 126 | """ASAM A_INT16 codec. 127 | """ 128 | FMT = "h" 129 | 130 | 131 | class A_Int32(AsamBaseType): 132 | """ASAM A_INT32 codec. 133 | """ 134 | FMT = "i" 135 | 136 | 137 | class A_Int64(AsamBaseType): 138 | """ASAM A_INT64 codec. 139 | """ 140 | FMT = "q" 141 | 142 | 143 | class A_Float32(AsamBaseType): 144 | """ASAM A_FLOAT32 codec. 145 | """ 146 | FMT = "f" 147 | 148 | 149 | class A_Float64(AsamBaseType): 150 | """ASAM A_FLOAT64 codec. 151 | """ 152 | FMT = "d" 153 | 154 | -------------------------------------------------------------------------------- /docs/how-to.md: -------------------------------------------------------------------------------- 1 | # How‑to Guides 2 | 3 | Task‑oriented guides that show how to accomplish common tasks with pyDBC. 4 | 5 | ## Parse a DBC file into the database and extend it 6 | 7 | ```python 8 | from pydbc.parser import ParserWrapper 9 | from pydbc.dbcListener import DbcListener 10 | from pydbc.api.dbc import DBCCreator 11 | 12 | wrapper = ParserWrapper( 13 | grammarName="dbc", 14 | startSymbol="dbcfile", 15 | listenerClass=DbcListener, 16 | ) 17 | 18 | session = wrapper.parseFromFile("C:\\path\\to\\file.dbc") 19 | print("syntax errors:", wrapper.numberOfSyntaxErrors) 20 | 21 | # Attach creator to existing session to add/modify content 22 | creator = DBCCreator.from_session(session) 23 | creator.index_existing() # optional 24 | node = creator.create_node("AppNode") 25 | msg = creator.create_message("AppMsg", message_id=0x321, dlc=8, sender=node) 26 | sig = creator.create_signal("Counter", bitsize=8) 27 | creator.add_signal_to_message(msg, sig, offset=0) 28 | creator.commit() 29 | ``` 30 | 31 | Notes: 32 | - File encoding is auto‑detected (pydbc.utils.detect_encoding); override by pre‑reading into a string and using parseFromString. 33 | - The returned object is an SQLAlchemy session bound to the internal VNDB. You can attach DBCCreator to this session with DBCCreator.from_session. 34 | 35 | ## Export the current database as DBC/LDF text 36 | 37 | ```python 38 | from pydbc.db.imex import DbcExporter, LdfExporter 39 | 40 | DbcExporter(":memory:").run() # produces testfile.txt.render 41 | LdfExporter(":memory:").run() 42 | ``` 43 | 44 | To export a specific on‑disk VNDB instead of ":memory:", pass a Path to the .vndb file stem (see imex.Exporter for behavior), or adapt the exporter to your workflow. 45 | 46 | ## Create and link signals and messages (DBC) 47 | 48 | ```python 49 | from pydbc.api.dbc import DBCCreator 50 | 51 | dbc = DBCCreator(":memory:") 52 | node = dbc.create_node("Sender") 53 | msg = dbc.create_message("Msg", message_id=1, dlc=8, sender=node) 54 | sig = dbc.create_signal("Sig", bitsize=8) 55 | dbc.add_signal_to_message(msg, sig, offset=0) 56 | dbc.commit() 57 | ``` 58 | 59 | ## Add LIN subscribers to a signal (LDF) 60 | 61 | ```python 62 | from pydbc.api.ldf import LDFCreator 63 | 64 | ldf = LDFCreator(":memory:") 65 | master = ldf.create_master_node("Master", timebase=0.005, jitter=0.0001) 66 | slave = ldf.create_slave_node("Slave", configured_NAD=1) 67 | sig = ldf.create_signal("Status", signal_size=8, init_value=0, publisher=master) 68 | ldf.add_signal_subscriber(sig, slave) 69 | ldf.commit() 70 | ``` 71 | 72 | ## Add environment variables and access nodes (NCF) 73 | 74 | ```python 75 | from pydbc.api.ncf import NCFCreator 76 | 77 | ncf = NCFCreator(":memory:") 78 | node = ncf.create_node("GatewayNode") 79 | ecu = ncf.create_ecu("GatewayECU") 80 | var = ncf.create_env_var("VehicleSpeed", var_type="INT", unit="km/h", minimum=0, maximum=250, initial_value="0") 81 | 82 | ncf.add_env_var_to_ecu(ecu, var) 83 | ncf.add_access_node_to_env_var(var, node, access_type="readWrite") 84 | ncf.commit() 85 | ``` 86 | 87 | ## Query data with SQLAlchemy 88 | 89 | ```python 90 | from pydbc.db.model import Message, Signal 91 | from pydbc.api.dbc import DBCCreator 92 | 93 | session = DBCCreator(":memory:").session 94 | print(session.query(Message).count()) 95 | ``` 96 | 97 | ## Use python-can to send/receive 98 | 99 | See docs/python-can.md for a complete guide. Quick start: 100 | 101 | ```python 102 | from pydbc.integrations.python_can import PythonCanSender, PythonCanReceiver, encode_message 103 | 104 | # session: SQLAlchemy session after building/parsing your DBC 105 | sender = PythonCanSender(bustype='virtual') 106 | receiver = PythonCanReceiver(session, bustype='virtual') 107 | 108 | arb_id, data, dlc = encode_message(session, 'YourMessage', {'YourSignal': 1.0}) 109 | sender.send(arb_id, data) 110 | frame = receiver.recv(timeout=1.0) 111 | ``` 112 | 113 | ## Troubleshooting 114 | 115 | - If you encounter unexpected characters while parsing, verify the input encoding. You can pre‑open the file with the correct codec and call parseFromString. 116 | - Multiplexing in DBC: use add_signal_to_message with multiplexor_signal/multiplex_dependent/multiplexor_value parameters when modeling multiplexed signals. 117 | - Windows paths: In Python strings, escape backslashes (e.g., "C:\\Users\\...\\file.dbc"). 118 | -------------------------------------------------------------------------------- /docs/examples.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | The following examples mirror and extend pydbc/examples/api_examples.py and demonstrate how to use the high‑level APIs. 4 | 5 | Run examples interactively (Windows PowerShell): 6 | 7 | ```powershell 8 | poetry run python -i pydbc\examples\api_examples.py 9 | ``` 10 | 11 | Or import and call the functions: 12 | 13 | ```python 14 | from pydbc.examples.api_examples import dbc_example, ldf_example, ncf_example 15 | 16 | dbc_example() 17 | ldf_example() 18 | ncf_example() 19 | ``` 20 | 21 | ## DBC example (CAN) 22 | 23 | ```python 24 | from pydbc.api.dbc import DBCCreator 25 | 26 | dbc = DBCCreator(":memory:") 27 | engine = dbc.create_node("Engine") 28 | gateway = dbc.create_node("Gateway") 29 | engine_data = dbc.create_message("EngineData", 100, 8, engine) 30 | 31 | petrol_level = dbc.create_signal( 32 | "PetrolLevel", 8, 33 | byteorder=1, sign=1, 34 | formula_factor=1.0, formula_offset=0.0, 35 | minimum=0, maximum=255, unit="l", 36 | ) 37 | 38 | vehicle_speed = dbc.create_signal( 39 | "VehicleSpeed", 16, 40 | byteorder=1, sign=1, 41 | formula_factor=0.1, formula_offset=0.0, 42 | minimum=0, maximum=300, unit="km/h", 43 | ) 44 | 45 | dbc.add_signal_to_message(engine_data, petrol_level, 24) 46 | dbc.add_signal_to_message(engine_data, vehicle_speed, 0) 47 | 48 | # optional: receivers 49 | # dbc.add_node_as_receiver(vehicle_speed, gateway) 50 | 51 | dbc.commit() 52 | dbc.close() 53 | ``` 54 | 55 | ## LDF example (LIN) 56 | 57 | ```python 58 | from pydbc.api.ldf import LDFCreator 59 | 60 | ldf = LDFCreator(":memory:") 61 | network = ldf.create_network("LINNetwork1", protocol_version="2.1", speed=19.2) 62 | master = ldf.create_master_node("MasterECU", timebase=0.005, jitter=0.0001) 63 | slave1 = ldf.create_slave_node("SlaveNode1", protocol_version="2.1", configured_NAD=1, initial_NAD=1) 64 | 65 | motor_speed = ldf.create_signal("MotorSpeed", signal_size=16, init_value=0, publisher=master) 66 | frame = ldf.create_unconditional_frame("MasterFrame", frame_id=0x10, size=2, publisher=master) 67 | ldf.add_signal_to_frame(frame, motor_speed, 0) 68 | 69 | ldf.commit() 70 | ldf.close() 71 | ``` 72 | 73 | ## NCF example (Vehicle config) 74 | 75 | ```python 76 | from pydbc.api.ncf import NCFCreator 77 | 78 | ncf = NCFCreator(":memory:") 79 | vehicle = ncf.create_vehicle("TestVehicle") 80 | can_network = ncf.create_network("CANNetwork", protocol="CAN", speed=500) 81 | engine_ecu = ncf.create_ecu("EngineECU") 82 | engine_node = ncf.create_node("EngineNode") 83 | 84 | ncf.add_network_to_vehicle(vehicle, can_network) 85 | ncf.add_ecu_to_vehicle(vehicle, engine_ecu) 86 | ncf.add_node_to_network(can_network, engine_node) 87 | ncf.add_node_to_ecu(engine_ecu, engine_node) 88 | 89 | ncf.commit() 90 | ncf.close() 91 | ``` 92 | 93 | ## python-can integration example 94 | 95 | ```python 96 | # Run the dedicated example script 97 | # poetry run python pydbc\examples\python_can_examples.py 98 | ``` 99 | 100 | ## Code generation examples 101 | 102 | See pydbc/examples/generate_code_examples.py to generate: 103 | - MicroPython CAN app (micropython_app.py) 104 | - Linux SocketCAN C program (socketcan_app.c) 105 | 106 | Run: 107 | ```powershell 108 | poetry run python pydbc\examples\generate_code_examples.py 109 | ``` 110 | 111 | ## Exporters 112 | 113 | ```python 114 | from pydbc.db.imex import DbcExporter, LdfExporter 115 | 116 | DbcExporter(":memory:").run() 117 | LdfExporter(":memory:").run() 118 | ``` 119 | 120 | The exporters use templates in pydbc/cgen/templates and render a text file named testfile.txt.render. 121 | 122 | 123 | ## Import a .dbc and query messages 124 | 125 | ```python 126 | from pydbc.api.imports import import_dbc 127 | from pydbc.db.model import Message 128 | 129 | # Parse the DBC file and get a SQLAlchemy session 130 | session = import_dbc("C:\\path\\to\\file.dbc") 131 | 132 | # Count all messages 133 | print("Messages:", session.query(Message).count()) 134 | 135 | # List message names and IDs 136 | for m in session.query(Message).order_by(Message.message_id).all(): 137 | print(f"0x{m.message_id:X} {m.name}") 138 | ``` 139 | 140 | ## Open an existing .vndb and query messages/signals 141 | 142 | ```python 143 | from pydbc.api.imports import open_vndb 144 | from pydbc.db.model import Message 145 | 146 | vndb = open_vndb("C:\\path\\to\\database.vndb") 147 | session = vndb.session 148 | 149 | # Find one message and print its signals 150 | msg = session.query(Message).filter_by(name="EngineData").first() 151 | if msg: 152 | print("Message:", msg.name, hex(msg.message_id), "dlc=", msg.dlc) 153 | for ms in msg.signals: # Message_Signal association 154 | s = ms.signal 155 | print(f" {s.name}: start={ms.offset} size={s.bitsize} unit={s.unit}") 156 | ``` 157 | -------------------------------------------------------------------------------- /pydbc/cgen/generators.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Code generators for standalone applications using Mako templates. 5 | 6 | This module provides two generators: 7 | - MicroPython CAN application (Python script) using machine.CAN or pyb.CAN depending on platform. 8 | - Linux SocketCAN C source file with simple encode/decode and send/recv demo. 9 | 10 | Both generators work from an SQLAlchemy session populated by pydbc (either 11 | created via DBCCreator or returned by ParserWrapper/DbcListener). No runtime 12 | DB access is required by the generated code; all message/signal metadata is 13 | embedded at generation time. 14 | """ 15 | from __future__ import annotations 16 | 17 | from dataclasses import dataclass 18 | from typing import List, Optional, Dict, Any, Iterable 19 | import pkgutil 20 | 21 | from sqlalchemy.orm import Session 22 | 23 | from pydbc.db.model import Message, Message_Signal, Signal 24 | from pydbc.template import renderTemplateFromText 25 | 26 | 27 | @dataclass 28 | class CGSignal: 29 | name: str 30 | start_bit: int 31 | size: int 32 | little_endian: bool 33 | signed: bool 34 | factor: float 35 | offset: float 36 | minimum: float 37 | maximum: float 38 | 39 | 40 | @dataclass 41 | class CGMessage: 42 | name: Optional[str] 43 | message_id: int 44 | dlc: int 45 | signals: List[CGSignal] 46 | 47 | 48 | def _collect_messages(session: Session, only: Optional[Iterable[str]] = None) -> List[CGMessage]: 49 | q = session.query(Message) 50 | if only: 51 | q = q.filter(Message.name.in_(list(only))) 52 | msgs: List[CGMessage] = [] 53 | for m in q.all(): 54 | sigs: List[CGSignal] = [] 55 | for ms in m.message_signals: 56 | s: Signal = ms.signal 57 | sigs.append( 58 | CGSignal( 59 | name=s.name, 60 | start_bit=ms.offset, 61 | size=s.bitsize, 62 | little_endian=True if (s.byteorder == 1) else False, 63 | signed=(s.sign == 0), # 0=signed, 1=unsigned in current model 64 | factor=s.formula_factor or 1.0, 65 | offset=s.formula_offset or 0.0, 66 | minimum=float(s.minimum) if s.minimum is not None else 0.0, 67 | maximum=float(s.maximum) if s.maximum is not None else 0.0, 68 | ) 69 | ) 70 | msgs.append( 71 | CGMessage( 72 | name=m.name, 73 | message_id=m.message_id or 0, 74 | dlc=m.dlc, 75 | signals=sigs, 76 | ) 77 | ) 78 | # Sort for stable output 79 | msgs.sort(key=lambda x: (x.message_id, x.name or "")) 80 | for msg in msgs: 81 | msg.signals.sort(key=lambda s: s.start_bit) 82 | return msgs 83 | 84 | 85 | class MicroPythonCanAppGenerator: 86 | """Generates a standalone MicroPython CAN app (.py) with encode/decode helpers. 87 | 88 | Usage: 89 | gen = MicroPythonCanAppGenerator(session) 90 | code = gen.render(only=["EngineData"]) # optional filter 91 | Path("micropython_app.py").write_text(code, encoding="utf-8") 92 | """ 93 | 94 | TEMPLATE = pkgutil.get_data("pydbc", "cgen/templates/micropython_can.py.tmpl") 95 | 96 | def __init__(self, session: Session): 97 | self._session = session 98 | 99 | def render(self, only: Optional[Iterable[str]] = None, app_name: str = "pydbc_mpy_app") -> str: 100 | msgs = _collect_messages(self._session, only) 101 | ns: Dict[str, Any] = { 102 | "app_name": app_name, 103 | "messages": msgs, 104 | } 105 | text = self.TEMPLATE.decode("utf-8") if isinstance(self.TEMPLATE, (bytes, bytearray)) else str(self.TEMPLATE) 106 | return renderTemplateFromText(text, ns, encoding="utf-8") 107 | 108 | 109 | class SocketCanCGenerator: 110 | """Generates a standalone Linux SocketCAN C program (.c) with encode/decode helpers. 111 | 112 | Usage: 113 | gen = SocketCanCGenerator(session) 114 | code = gen.render(only=["EngineData"]) # optional filter 115 | Path("socketcan_app.c").write_text(code, encoding="utf-8") 116 | """ 117 | 118 | TEMPLATE = pkgutil.get_data("pydbc", "cgen/templates/socketcan.c.tmpl") 119 | 120 | def __init__(self, session: Session): 121 | self._session = session 122 | 123 | def render(self, only: Optional[Iterable[str]] = None, program_name: str = "pydbc_socketcan_app") -> str: 124 | msgs = _collect_messages(self._session, only) 125 | ns: Dict[str, Any] = { 126 | "program_name": program_name, 127 | "messages": msgs, 128 | } 129 | text = self.TEMPLATE.decode("utf-8") if isinstance(self.TEMPLATE, (bytes, bytearray)) else str(self.TEMPLATE) 130 | return renderTemplateFromText(text, ns, encoding="utf-8") 131 | -------------------------------------------------------------------------------- /pydbc/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2021 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = "Christoph Schueler" 28 | __version__ = "0.1.0" 29 | 30 | import itertools 31 | import os 32 | import pathlib 33 | 34 | import chardet 35 | 36 | 37 | def slicer(iterable, sliceLength, converter=None): 38 | if converter is None: 39 | converter = type(iterable) 40 | length = len(iterable) 41 | return [ 42 | converter(*(iterable[item : item + sliceLength])) 43 | for item in range(0, length, sliceLength) 44 | ] 45 | 46 | 47 | import sys 48 | 49 | if sys.version_info.major == 3: 50 | from io import BytesIO as StringIO 51 | else: 52 | try: 53 | from cStringIO import StringIO 54 | except ImportError: 55 | from StringIO import StringIO 56 | 57 | 58 | def runningOnTravis(): 59 | return os.environ.get("TRAVIS") == "true" 60 | 61 | 62 | def createStringBuffer(*args): 63 | """Create a string with file-like behaviour (StringIO on Python 2.x).""" 64 | return StringIO(*args) 65 | 66 | 67 | CYG_PREFIX = "/cygdrive/" 68 | 69 | 70 | def cygpathToWin(path): 71 | if path.startswith(CYG_PREFIX): 72 | path = path[len(CYG_PREFIX) :] 73 | driveLetter = "{0}:\\".format(path[0]) 74 | path = path[2:].replace("/", "\\") 75 | path = "{0}{1}".format(driveLetter, path) 76 | return path 77 | 78 | 79 | import ctypes 80 | 81 | 82 | class StructureWithEnums(ctypes.Structure): 83 | """Add missing enum feature to ctypes Structures.""" 84 | 85 | _map = {} 86 | 87 | def __getattribute__(self, name): 88 | _map = ctypes.Structure.__getattribute__(self, "_map") 89 | value = ctypes.Structure.__getattribute__(self, name) 90 | if name in _map: 91 | EnumClass = _map[name] 92 | if isinstance(value, ctypes.Array): 93 | return [EnumClass(x) for x in value] 94 | else: 95 | return EnumClass(value) 96 | else: 97 | return value 98 | 99 | def __str__(self): 100 | result = [] 101 | result.append("struct {0} {{".format(self.__class__.__name__)) 102 | for field in self._fields_: 103 | attr, attrType = field 104 | if attr in self._map: 105 | attrType = self._map[attr] 106 | value = getattr(self, attr) 107 | result.append( 108 | " {0} [{1}] = {2!r};".format(attr, attrType.__name__, value) 109 | ) 110 | result.append("};") 111 | return "\n".join(result) 112 | 113 | __repr__ = __str__ 114 | 115 | 116 | def flatten(*args): 117 | result = [] 118 | for arg in list(args): 119 | if isinstance(arg, (list, tuple)): 120 | result.extend(flatten(*arg)) 121 | else: 122 | result.append(arg) 123 | return result 124 | 125 | 126 | import subprocess 127 | 128 | 129 | class CommandError(Exception): 130 | pass 131 | 132 | 133 | def runCommand(cmd): 134 | proc = subprocess.Popen( 135 | cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE 136 | ) 137 | result = proc.communicate() 138 | proc.wait() 139 | if proc.returncode: 140 | raise CommandError("{0}".format(result[1])) 141 | return result[0] 142 | 143 | 144 | from unicodedata import normalize 145 | 146 | 147 | def nfc_equal(str1, str2): 148 | return normalize("NFC", str1) == normalize("NFC", str2) 149 | 150 | 151 | def fold_equal(str1, str2): 152 | return normalize("NFC", str1).casefold() == normalize("NFC", str2).casefold() 153 | 154 | 155 | def detect_encoding(file_name: str) -> str: 156 | """Detect encoding of a text file. 157 | 158 | Parameters 159 | ---------- 160 | file_name: str 161 | 162 | Returns 163 | ------- 164 | str: Useable as `encoding` paramter to `open`. 165 | """ 166 | if isinstance(file_name, pathlib.WindowsPath): 167 | file_name = str(file_name) 168 | with open(file_name, "rb") as inf: 169 | data = inf.read() 170 | encoding = chardet.detect(data).get("encoding") 171 | return encoding 172 | -------------------------------------------------------------------------------- /docs/python-can.md: -------------------------------------------------------------------------------- 1 | # Using pyDBC with python-can 2 | 3 | This guide shows how to use a DBC you create (or parse) with pyDBC to send and receive 4 | CAN frames via python-can. 5 | 6 | Prerequisites: 7 | - Python 3.10+ 8 | - pyDBC installed (this repository or from package) 9 | - Optional: python-can installed for real sending/receiving 10 | 11 | Installation (python-can): 12 | 13 | ```powershell 14 | pip install python-can 15 | ``` 16 | 17 | On Windows you can use the built-in software-only "virtual" bus from python-can for experiments. 18 | 19 | ## 1) Create a simple DBC and encode a frame 20 | 21 | ```python 22 | from pydbc.api.dbc import DBCCreator 23 | from pydbc.integrations.python_can import encode_message, decode_message 24 | 25 | # Build a tiny CAN database in memory 26 | dbc = DBCCreator(":memory:") 27 | engine = dbc.create_node("Engine") 28 | msg = dbc.create_message("EngineData", message_id=0x100, dlc=8, sender=engine) 29 | 30 | rpm = dbc.create_signal("EngineRPM", bitsize=16, byteorder=1, sign=1, formula_factor=0.25, formula_offset=0.0) 31 | speed = dbc.create_signal("VehicleSpeed", bitsize=16, byteorder=1, sign=1, formula_factor=0.1, formula_offset=0.0) 32 | temp = dbc.create_signal("CoolantTemp", bitsize=8, byteorder=1, sign=0, formula_factor=1.0, formula_offset=-40.0) 33 | 34 | dbc.add_signal_to_message(msg, rpm, 0) 35 | dbc.add_signal_to_message(msg, speed, 16) 36 | dbc.add_signal_to_message(msg, temp, 32) 37 | 38 | dbc.commit() 39 | session = dbc.session 40 | 41 | # Encode physical values -> CAN payload 42 | can_id, data, dlc = encode_message(session, "EngineData", { 43 | "EngineRPM": 3000.0, 44 | "VehicleSpeed": 123.4, 45 | "CoolantTemp": 90.0, 46 | }) 47 | print(f"Encoded: id=0x{can_id:X}, dlc={dlc}, data={data.hex()}") 48 | 49 | # Decode back to physical values 50 | decoded = decode_message(session, can_id, data) 51 | print(decoded) 52 | ``` 53 | 54 | Notes: 55 | - Currently, little-endian (Intel) signals are supported. Big-endian (Motorola) raises NotImplementedError. 56 | - sign=1 is treated as unsigned, sign=0 as signed. 57 | - Physical conversion: phys = raw * factor + offset. 58 | 59 | ## 2) Send/receive with python-can virtual bus 60 | 61 | ```python 62 | from pydbc.integrations.python_can import PythonCanSender, PythonCanReceiver 63 | 64 | sender = PythonCanSender(bustype='virtual') 65 | receiver = PythonCanReceiver(session, bustype='virtual') 66 | 67 | arb_id, data, dlc = encode_message(session, 'EngineData', { 68 | 'EngineRPM': 2000.0, 69 | 'VehicleSpeed': 50.0, 70 | 'CoolantTemp': 85.0, 71 | }) 72 | 73 | sender.send(arb_id, data) 74 | frame = receiver.recv(timeout=1.0) 75 | if frame is not None: 76 | print(receiver.decode_frame(frame)) 77 | 78 | sender.shutdown() 79 | receiver.shutdown() 80 | ``` 81 | 82 | If python-can is not installed, PythonCanSender/Receiver will raise a RuntimeError with guidance. 83 | 84 | ## 3) Parse an existing DBC, extend it, then use python-can 85 | 86 | If you have a DBC file, you can parse it with pyDBC, optionally create additional 87 | messages/signals on top of the parsed database, and then use the same encoding/decoding APIs. 88 | 89 | ```python 90 | from pydbc.parser import ParserWrapper 91 | from pydbc.dbcListener import DbcListener 92 | from pydbc.api.dbc import DBCCreator 93 | from pydbc.integrations.python_can import encode_message, decode_message 94 | 95 | # Parse the DBC into the internal database and get a SQLAlchemy session 96 | wrapper = ParserWrapper( 97 | grammarName='dbc', startSymbol='dbcfile', listenerClass=DbcListener 98 | ) 99 | session = wrapper.parseFromFile('C:\\path\\to\\file.dbc') 100 | 101 | # Attach a creator to the existing session to add new content 102 | creator = DBCCreator.from_session(session) 103 | creator.index_existing() # optional: preload existing items for convenience 104 | 105 | # Example: add a new message and signal next to the parsed ones 106 | node = creator.create_node('MyNewNode') 107 | msg = creator.create_message('AppStatus', message_id=0x555, dlc=8, sender=node) 108 | sig = creator.create_signal('AliveCounter', bitsize=4, byteorder=1, sign=1) 109 | creator.add_signal_to_message(msg, sig, offset=0) 110 | creator.commit() 111 | 112 | # Now encode a frame using either parsed or newly created messages 113 | can_id, data, dlc = encode_message(session, 'AppStatus', {'AliveCounter': 7}) 114 | ``` 115 | 116 | ## 4) Example script 117 | 118 | A ready-to-run example is provided: 119 | - pydbc/examples/python_can_examples.py 120 | 121 | Run encode/decode cycle without bus: 122 | 123 | ```powershell 124 | poetry run python pydbc\examples\python_can_examples.py 125 | ```entries 126 | 127 | Try the virtual bus demo by uncommenting the call in that script and ensuring python-can is installed. 128 | 129 | ## 5) Limitations and next steps 130 | 131 | - Only little-endian signals are supported at the moment. Support for big-endian (Motorola) bit packing can be added next. 132 | - Multiplexed signals are not handled specifically here; if present in the DB, the current simple encoder will just place bits. For complex multiplexing behavior, extend the adapter to respect multiplexor rules. 133 | - This module intentionally keeps python-can as an optional dependency. 134 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | pyDBC 2 | ===== 3 | 4 | [![Build status](https://ci.appveyor.com/api/projects/status/6lf6kt2vle4jjou7?svg=true)](https://ci.appveyor.com/project/christoph2/pydbc) 5 | [![Maintainability](https://api.codeclimate.com/v1/badges/ee1e493f62896f3fea61/maintainability)](https://codeclimate.com/github/christoph2/pydbc/maintainability) 6 | [![Build Status](https://travis-ci.org/christoph2/pydbc.svg)](https://travis-ci.org/christoph2/pydbc) 7 | 8 | [![GPL License](http://img.shields.io/badge/license-GPL-blue.svg)](http://opensource.org/licenses/GPL-2.0) 9 | 10 | pyDBC is a library for creating and editing automotive network description files, including: 11 | - DBC (CAN Database) files for CAN bus systems 12 | - LDF (LIN Description File) for LIN networks 13 | - NCF (Network Configuration File) for vehicle network configurations 14 | 15 | --- 16 | 17 | ## Installation 18 | 19 | pyDBC is hosted on Github, get the latest release: [https://github.com/christoph2/pydbc](https://github.com/christoph2/pydbc) 20 | 21 | ### Using Poetry (recommended) 22 | 23 | This project uses [Poetry](https://python-poetry.org/) for dependency management and packaging. If you don't have Poetry installed, you can install it by following the instructions on the [Poetry website](https://python-poetry.org/docs/#installation). 24 | 25 | ```bash 26 | # Install the package 27 | poetry install 28 | 29 | # Run the tests 30 | poetry run pytest 31 | ``` 32 | 33 | ### Requirements 34 | 35 | - Python >= 3.10 36 | - SQLAlchemy >= 2.0.0 37 | - Other dependencies are managed by Poetry 38 | 39 | ## First steps 40 | 41 | pyDBC provides high-level creational APIs for working with automotive network description files. These APIs make it easy to create and manipulate DBC, LDF, and NCF components. 42 | 43 | ### Creating a CAN database (DBC) 44 | 45 | ```python 46 | from pydbc.api.dbc import DBCCreator 47 | 48 | # Create a new DBC creator with an in-memory database 49 | dbc = DBCCreator(":memory:") 50 | 51 | # Create nodes (ECUs) 52 | engine = dbc.create_node("Engine") 53 | gateway = dbc.create_node("Gateway") 54 | 55 | # Create a message 56 | engine_data = dbc.create_message("EngineData", 100, 8, engine) 57 | 58 | # Create signals 59 | petrol_level = dbc.create_signal( 60 | "PetrolLevel", 8, byteorder=1, sign=1, 61 | formula_factor=1.0, formula_offset=0.0, 62 | minimum=0, maximum=255, unit="l" 63 | ) 64 | 65 | # Add signals to messages 66 | dbc.add_signal_to_message(engine_data, petrol_level, 24) 67 | 68 | # Add signal receivers 69 | dbc.add_node_as_receiver(petrol_level, gateway) 70 | 71 | # Commit changes to the database 72 | dbc.commit() 73 | ``` 74 | 75 | ### Creating a LIN network (LDF) 76 | 77 | ```python 78 | from pydbc.api.ldf import LDFCreator 79 | 80 | # Create a new LDF creator with an in-memory database 81 | ldf = LDFCreator(":memory:") 82 | 83 | # Create a LIN network 84 | network = ldf.create_network( 85 | "LINNetwork1", 86 | protocol_version="2.1", 87 | speed=19.2 88 | ) 89 | 90 | # Create master and slave nodes 91 | master = ldf.create_master_node("MasterECU", timebase=0.005, jitter=0.0001) 92 | slave = ldf.create_slave_node("SlaveNode1", configured_NAD=1) 93 | 94 | # Create signals and frames 95 | signal = ldf.create_signal("MotorSpeed", signal_size=16, init_value=0, publisher=master) 96 | frame = ldf.create_unconditional_frame("MasterFrame", frame_id=0x10, size=2, publisher=master) 97 | 98 | # Add signals to frames 99 | ldf.add_signal_to_frame(frame, signal, 0) 100 | 101 | # Commit changes to the database 102 | ldf.commit() 103 | ``` 104 | 105 | ### Creating a network configuration (NCF) 106 | 107 | ```python 108 | from pydbc.api.ncf import NCFCreator 109 | 110 | # Create a new NCF creator with an in-memory database 111 | ncf = NCFCreator(":memory:") 112 | 113 | # Create a vehicle 114 | vehicle = ncf.create_vehicle("TestVehicle") 115 | 116 | # Create networks and ECUs 117 | can_network = ncf.create_network("CANNetwork", protocol="CAN", speed=500) 118 | engine_ecu = ncf.create_ecu("EngineECU") 119 | 120 | # Add networks to vehicle 121 | ncf.add_network_to_vehicle(vehicle, can_network) 122 | 123 | # Add ECUs to vehicle 124 | ncf.add_ecu_to_vehicle(vehicle, engine_ecu) 125 | 126 | # Commit changes to the database 127 | ncf.commit() 128 | ``` 129 | 130 | For more detailed examples, see the `pydbc/examples/api_examples.py` file. 131 | 132 | ## Features 133 | 134 | - High-level creational APIs for DBC, LDF, and NCF components 135 | - SQLAlchemy-based database model for storing network configurations 136 | - Support for all major components of automotive network description files 137 | - Comprehensive examples demonstrating API usage 138 | 139 | ## Documentation 140 | 141 | The full documentation in GitHub‑Flavored Markdown is available in the docs/ folder: 142 | - docs/INDEX.md — Documentation home 143 | - docs/getting-started.md — Installation and quickstarts 144 | - docs/tutorial.md — Step‑by‑step walkthrough 145 | - docs/how-to.md — Task‑oriented guides 146 | - docs/examples.md — Ready‑to‑run examples 147 | - docs/api-reference.md — API reference 148 | - docs/python-can.md — Using pyDBC with python-can (send/receive CAN frames) 149 | 150 | ## License 151 | 152 | GNU General Public License v2.0 153 | -------------------------------------------------------------------------------- /pydbc/db/imex.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ """ 5 | 6 | __copyright__ = """ 7 | pySART - Simplified AUTOSAR-Toolkit for Python. 8 | 9 | ( C) 2010-2020 by Christoph Schueler 10 | 11 | All Rights Reserved 12 | 13 | This program is free software; you can redistribute it and/or modify 14 | it under the terms of the GNU General Public License as published by 15 | the Free Software Foundation; either version 2 of the License, or 16 | (at your option) any later version. 17 | 18 | This program is distributed in the hope that it will be useful, 19 | but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 21 | GNU General Public License for more details. 22 | 23 | You should have received a copy of the GNU General Public License along 24 | with this program; if not, write to the Free Software Foundation, Inc., 25 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 26 | 27 | s. FLOSS-EXCEPTION.txt 28 | """ 29 | __author__ = "Christoph Schueler" 30 | __version__ = "0.1.0" 31 | 32 | import io 33 | import os 34 | import pkgutil 35 | 36 | import sqlalchemy as sa 37 | 38 | from pydbc.logger import Logger 39 | from pydbc.db import VNDB 40 | from pydbc import parser 41 | from pydbc.dbcListener import DbcListener 42 | 43 | # from pydbc.ldfListener import LdfListener 44 | # from pydbc.ncfListener import NcfListener 45 | from pydbc.template import renderTemplateFromText 46 | import pydbc.db.model as model 47 | 48 | 49 | def fetch_attributes(db): 50 | # Attribute structure is currently to inconvenient for ad-hoc queries, so pre-fetch them. 51 | 52 | from collections import defaultdict 53 | from itertools import groupby 54 | from operator import itemgetter 55 | 56 | from pydbc.types import AttributeType, ValueType, CategoryType 57 | 58 | data = ( 59 | db.session.query( 60 | model.Attribute_Value.num_value, 61 | model.Attribute_Value.string_value, 62 | model.Attribute_Definition.valuetype, 63 | model.Attribute_Definition.array, 64 | model.Attribute_Definition.objecttype, 65 | model.Attribute_Value.object_id, 66 | model.Attribute_Definition.name, 67 | ) 68 | .join(model.Attribute_Definition) 69 | .order_by(model.Attribute_Definition.objecttype) 70 | ) 71 | result = {} 72 | groups = [] 73 | keyfunc = itemgetter(4) 74 | data = sorted(data, key=keyfunc) 75 | for k, g in groupby(data, keyfunc): 76 | group = list(g) 77 | result[AttributeType(k).name] = defaultdict(dict) 78 | groups.append(list(group)) 79 | keyfunc = itemgetter(5) 80 | group2 = sorted(group, key=keyfunc) 81 | for k2, g2 in groupby(group2, keyfunc): 82 | for ( 83 | num_value, 84 | string_value, 85 | value_type, 86 | array, 87 | object_type, 88 | object_id, 89 | name, 90 | ) in list(g2): 91 | if num_value is None and string_value is None: 92 | value = None 93 | else: 94 | if array: 95 | conv = float if value_type == ValueType.FLOAT else int 96 | value = [conv(x) for x in string_value.split(";")] 97 | else: 98 | if value_type in (ValueType.INT, ValueType.FLOAT): 99 | conv = float if value_type == ValueType.FLOAT else int 100 | value = conv(num_value) 101 | else: 102 | value = string_value 103 | item = {"value": value} 104 | result[AttributeType(k).name][k2][name] = value 105 | return result 106 | 107 | 108 | class Exporter: 109 | """ """ 110 | 111 | def __init__(self, name, encoding="latin-1", debug=False): 112 | self.logger = Logger(__name__) 113 | self.encoding = encoding 114 | if name == ":memory:": 115 | self.fnbase = "testfile.txt" 116 | self.db = VNDB(":memory:", debug=debug) 117 | else: 118 | self.fname = name.parts[-1] 119 | self.fnbase = name.stem 120 | self.fnext = name.suffix[1:].lower() 121 | self.fabsolute = name.absolute() 122 | 123 | self.db = VNDB(r"{}.vndb".format(self.fnbase), debug=debug) 124 | # res = renderTemplateFromText(self.TEMPLATE, namespace, formatExceptions = True, encoding = "utf-8" if ucout else "latin-1") 125 | 126 | def run(self): 127 | 128 | # xxx = self.db.session.query(model.Attribute_Value).join(model.Attribute_Definition).\ 129 | # filter(model.Attribute_Definition.name == "LIN_is_master").one() 130 | # node = self.db.session.query(model.Node).filter(model.Node.rid == xxx.object_id).one() 131 | # print("ATTRS:", fetch_attributes(self.db)) 132 | namespace = dict( 133 | db=self.db, model=model, attributes=fetch_attributes(self.db), sa=sa 134 | ) 135 | res = renderTemplateFromText( 136 | self.TEMPLATE, namespace, formatExceptions=False, encoding=self.encoding 137 | ) 138 | # print("RES:", res) 139 | with io.open( 140 | "{}.render".format(self.fnbase), "w", encoding=self.encoding, newline="\r\n" 141 | ) as outf: 142 | outf.write(res) 143 | 144 | 145 | class DbcExporter(Exporter): 146 | """ """ 147 | 148 | TEMPLATE = pkgutil.get_data("pydbc", "cgen/templates/dbc.tmpl") 149 | 150 | 151 | class LdfExporter(Exporter): 152 | """ """ 153 | 154 | TEMPLATE = pkgutil.get_data("pydbc", "cgen/templates/ldf.tmpl") 155 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | import os 16 | import sys 17 | sys.path.insert(0, os.path.abspath('../pydbc')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'pydbc' 23 | copyright = '2019, Christoph Schueler' 24 | author = 'Christoph Schueler' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '0.9' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | 'sphinx.ext.autodoc', 43 | 'sphinx.ext.doctest', 44 | 'myst_parser', 45 | ] 46 | 47 | # Add any paths that contain templates here, relative to this directory. 48 | templates_path = ['_templates'] 49 | 50 | # The suffix(es) of source filenames. 51 | # You can specify multiple suffix as a list of string: 52 | # 53 | source_suffix = ['.md'] 54 | 55 | # The master toctree document. 56 | # Our docs are Markdown with top-level INDEX.md 57 | master_doc = 'INDEX' 58 | 59 | # The language for content autogenerated by Sphinx. Refer to documentation 60 | # for a list of supported languages. 61 | # 62 | # This is also used if you do content translation via gettext catalogs. 63 | # Usually you set "language" from the command line for these cases. 64 | language = "en" 65 | 66 | # List of patterns, relative to source directory, that match files and 67 | # directories to ignore when looking for source files. 68 | # This pattern also affects html_static_path and html_extra_path . 69 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 70 | 71 | # The name of the Pygments (syntax highlighting) style to use. 72 | pygments_style = 'sphinx' 73 | 74 | 75 | # -- Options for HTML output ------------------------------------------------- 76 | 77 | # The theme to use for HTML and HTML Help pages. See the documentation for 78 | # a list of builtin themes. 79 | # 80 | #html_theme = 'alabaster' 81 | 82 | # Theme options are theme-specific and customize the look and feel of a theme 83 | # further. For a list of options available for each theme, see the 84 | # documentation. 85 | # 86 | # html_theme_options = {} 87 | 88 | # Add any paths that contain custom static files (such as style sheets) here, 89 | # relative to this directory. They are copied after the builtin static files, 90 | # so a file named "default.css" will overwrite the builtin "default.css". 91 | html_static_path = ['_static'] 92 | 93 | # Custom sidebar templates, must be a dictionary that maps document names 94 | # to template names. 95 | # 96 | # The default sidebars (for documents that don't match any pattern) are 97 | # defined by theme itself. Builtin themes are using these templates by 98 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 99 | # 'searchbox.html']``. 100 | # 101 | # html_sidebars = {} 102 | 103 | 104 | # -- Options for HTMLHelp output --------------------------------------------- 105 | 106 | # Output file base name for HTML help builder. 107 | htmlhelp_basename = 'pyDBCdoc' 108 | 109 | 110 | # -- Options for LaTeX output ------------------------------------------------ 111 | 112 | latex_elements = { 113 | # The paper size ('letterpaper' or 'a4paper'). 114 | # 115 | # 'papersize': 'letterpaper', 116 | 117 | # The font size ('10pt', '11pt' or '12pt'). 118 | # 119 | # 'pointsize': '10pt', 120 | 121 | # Additional stuff for the LaTeX preamble. 122 | # 123 | # 'preamble': '', 124 | 125 | # Latex figure (float) alignment 126 | # 127 | # 'figure_align': 'htbp', 128 | } 129 | 130 | # Grouping the document tree into LaTeX files. List of tuples 131 | # (source start file, target name, title, 132 | # author, documentclass [howto, manual, or own class]). 133 | #latex_documents = [ 134 | # (master_doc, 'pyDBC.tex', 'pyDBC Documentation', 135 | # 'Christoph Schueler', 'manual'), 136 | #] 137 | 138 | 139 | # -- Options for manual page output ------------------------------------------ 140 | 141 | # One entry per manual page. List of tuples 142 | # (source start file, name, description, authors, manual section). 143 | man_pages = [ 144 | (master_doc, 'pydbc', 'pyDBC Documentation', 145 | [author], 1) 146 | ] 147 | 148 | 149 | # -- Options for Texinfo output ---------------------------------------------- 150 | 151 | # Grouping the document tree into Texinfo files. List of tuples 152 | # (source start file, target name, title, author, 153 | # dir menu entry, description, category) 154 | #texinfo_documents = [ 155 | # (master_doc, 'pyDBC', 'pyDBC Documentation', 156 | # author, 'pyDBC', 'One line description of project.', 157 | # 'Miscellaneous'), 158 | #] 159 | 160 | 161 | # -- Extension configuration ------------------------------------------------- 162 | 163 | # -- Options for todo extension ---------------------------------------------- 164 | 165 | # If true, `todo` and `todoList` produce output, else they produce nothing. 166 | todo_include_todos = True 167 | 168 | -------------------------------------------------------------------------------- /pydbc/cgen/templates/micropython_can.py.tmpl: -------------------------------------------------------------------------------- 1 | ## -*- coding: utf-8 -*- 2 | ## Mako template: MicroPython CAN app generated by pydbc 3 | <% 4 | # messages: List[CGMessage] 5 | # app_name: str 6 | %> 7 | """ 8 | Auto-generated MicroPython CAN application: ${app_name} 9 | 10 | Generated by pydbc using Mako templates. This code is standalone and embeds 11 | all message/signal metadata (no runtime database needed). 12 | 13 | Limitations: 14 | - Only little-endian signals are handled in the helpers below. 15 | - Multiplexing not implemented in this minimal generator. 16 | """ 17 | try: 18 | from machine import CAN # ESP32, etc. 19 | PLATFORM = "machine" 20 | except Exception: # pragma: no cover 21 | try: 22 | from pyb import CAN # Pyboard 23 | PLATFORM = "pyb" 24 | except Exception: 25 | CAN = None 26 | PLATFORM = "none" 27 | 28 | # --- Bit helpers (little-endian/Intel) --- 29 | def _ins_bits_le(buf, start_bit, size, value): 30 | if size <= 0: 31 | return 32 | mask = (1 << size) - 1 33 | value &= mask 34 | bit_pos = start_bit 35 | remaining = size 36 | while remaining > 0: 37 | byte_index = bit_pos // 8 38 | bit_index = bit_pos % 8 39 | bits_here = min(remaining, 8 - bit_index) 40 | chunk_mask = (1 << bits_here) - 1 41 | chunk = value & chunk_mask 42 | buf[byte_index] &= ~(chunk_mask << bit_index) 43 | buf[byte_index] |= (chunk << bit_index) 44 | value >>= bits_here 45 | remaining -= bits_here 46 | bit_pos += bits_here 47 | 48 | 49 | def _ext_bits_le(buf, start_bit, size, signed): 50 | if size <= 0: 51 | return 0 52 | bit_pos = start_bit 53 | remaining = size 54 | out = 0 55 | shift = 0 56 | while remaining > 0: 57 | byte_index = bit_pos // 8 58 | bit_index = bit_pos % 8 59 | bits_here = min(remaining, 8 - bit_index) 60 | chunk_mask = (1 << bits_here) - 1 61 | byte_val = buf[byte_index] 62 | chunk = (byte_val >> bit_index) & chunk_mask 63 | out |= (chunk << shift) 64 | remaining -= bits_here 65 | bit_pos += bits_here 66 | shift += bits_here 67 | if signed: 68 | sign_bit = 1 << (size - 1) 69 | if out & sign_bit: 70 | out = out - (1 << size) 71 | return out 72 | 73 | 74 | def _phys_to_raw(v, factor, offset, size, signed): 75 | if factor == 0: 76 | raw = int(round(v - offset)) 77 | else: 78 | raw = int(round((v - offset) / factor)) 79 | if signed: 80 | min_raw = -(1 << (size - 1)) 81 | max_raw = (1 << (size - 1)) - 1 82 | else: 83 | min_raw = 0 84 | max_raw = (1 << size) - 1 85 | if raw < min_raw: 86 | raw = min_raw 87 | if raw > max_raw: 88 | raw = max_raw 89 | return raw 90 | 91 | 92 | def _raw_to_phys(raw, factor, offset): 93 | return raw * factor + offset 94 | 95 | 96 | # --- Message metadata (embedded) --- 97 | MESSAGES = [ 98 | % for m in messages: 99 | { 100 | "name": ${repr(m.name)}, 101 | "id": ${m.message_id}, 102 | "dlc": ${m.dlc}, 103 | "signals": [ 104 | % for s in m.signals: 105 | { 106 | "name": ${repr(s.name)}, 107 | "start": ${s.start_bit}, 108 | "size": ${s.size}, 109 | "le": ${'True' if s.little_endian else 'False'}, 110 | "signed": ${'True' if s.signed else 'False'}, 111 | "factor": ${float(s.factor)}, 112 | "offset": ${float(s.offset)}, 113 | }, 114 | % endfor 115 | ], 116 | }, 117 | % endfor 118 | ] 119 | 120 | 121 | def encode_message_by_name(name, values): 122 | for m in MESSAGES: 123 | if m["name"] == name: 124 | return _encode_message(m, values) 125 | raise ValueError("unknown message: %s" % (name,)) 126 | 127 | 128 | def _encode_message(meta, values): 129 | data = bytearray([0] * meta["dlc"]) 130 | for s in meta["signals"]: 131 | if not s["le"]: 132 | raise NotImplementedError("big-endian signals not supported in this generator") 133 | phys = float(values.get(s["name"], 0.0)) 134 | raw = _phys_to_raw(phys, s["factor"], s["offset"], s["size"], s["signed"]) 135 | _ins_bits_le(data, s["start"], s["size"], raw) 136 | return meta["id"], bytes(data), meta["dlc"] 137 | 138 | 139 | def decode_message_by_id(can_id, data_bytes): 140 | for m in MESSAGES: 141 | if m["id"] == can_id: 142 | return _decode_message(m, data_bytes) 143 | raise ValueError("unknown message id: %d" % (can_id,)) 144 | 145 | 146 | def _decode_message(meta, data_bytes): 147 | res = {} 148 | for s in meta["signals"]: 149 | if not s["le"]: 150 | raise NotImplementedError("big-endian signals not supported in this generator") 151 | raw = _ext_bits_le(data_bytes, s["start"], s["size"], s["signed"]) 152 | phys = _raw_to_phys(raw, s["factor"], s["offset"]) 153 | res[s["name"]] = phys 154 | return {"message": meta["name"], "signals": res} 155 | 156 | 157 | def demo_send_once(channel=0, id_mode=CAN.STD): 158 | if CAN is None: 159 | print("No CAN class available on this platform.") 160 | return 161 | can = CAN(channel, mode=CAN.NORMAL, prescaler=100, sjw=1, bs1=6, bs2=8) 162 | # Send the first message with zeroed values as a demo 163 | if not MESSAGES: 164 | print("No messages embedded.") 165 | return 166 | mid, data, dlc = _encode_message(MESSAGES[0], {}) 167 | try: 168 | can.send(data, mid, timeout=100) 169 | print("Sent 0x%X -> %s" % (mid, data)) 170 | except Exception as e: 171 | print("Send failed:", e) 172 | 173 | 174 | if __name__ == "__main__": 175 | # Basic smoke-test on desktop CPython (no CAN): 176 | if MESSAGES: 177 | mid, data, dlc = _encode_message(MESSAGES[0], {}) 178 | out = _decode_message(MESSAGES[0], data) 179 | print("Encoded id=0x%X dlc=%d data=%s" % (mid, dlc, data)) 180 | print("Decoded:", out) 181 | -------------------------------------------------------------------------------- /pydbc/types.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2020 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.1.0' 29 | 30 | 31 | import enum 32 | 33 | CAN_EXTENDED_IDENTIFIER = 0x80000000 34 | 35 | class VndbType(enum.IntEnum): 36 | SINGLE_NETWORK = 0 37 | MULTIPLE_NETWORKS = 1 38 | 39 | 40 | class BusType(enum.IntEnum): 41 | CAN = 0 42 | LIN = 1 43 | 44 | 45 | class FileType(enum.IntEnum): 46 | DBC = 0 47 | LDF = 1 48 | NCF = 2 49 | 50 | class AttributeType(enum.IntEnum): 51 | NODE = 0 52 | MESSAGE = 1 53 | SIGNAL = 2 54 | ENV_VAR = 3 55 | NETWORK = 4 56 | REL_NODE = 5 57 | REL_SIGNAL = 6 58 | REL_ENV_VAR = 7 59 | 60 | 61 | class ValueType(enum.IntEnum): 62 | INT = 0 63 | HEX = 1 64 | FLOAT = 2 65 | STRING = 3 66 | ENUM = 4 67 | 68 | 69 | class ByteOrderType(enum.IntEnum): 70 | MOTOROLA = 0 71 | INTEL = 1 72 | 73 | 74 | class SignalType(enum.IntEnum): 75 | SINT = 0 76 | FLOAT32 = 1 77 | FLOAT64 = 2 78 | UINT = 8 79 | 80 | 81 | class MultiplexingType(enum.IntEnum): 82 | NONE = 0 83 | MULTIPLEXOR = 1 84 | DEPENDENT = 2 85 | 86 | 87 | class IdentifierType(enum.IntEnum): 88 | STANDARD = 0 89 | EXTENDED = 1 90 | 91 | 92 | class EnvVarType(enum.IntEnum): 93 | INT = 0 94 | FLOAT = 1 95 | STRING = 2 96 | DATA = 3 97 | 98 | 99 | class EnvVarAccessType(enum.IntEnum): 100 | UNRESTRICTED = 0 101 | READ = 1 102 | WRITE = 2 103 | READ_WRITE = 3 104 | 105 | 106 | class ValueTableType(enum.IntEnum): 107 | SIGNAL = 0 108 | ENV_VAR = 1 109 | 110 | 111 | class CategoryType(enum.IntEnum): 112 | NODE = 0 113 | MESSAGE = 1 114 | ENV_VAR = 2 115 | 116 | 117 | EXTENDED_ID_MASK = 0x80000000 118 | 119 | 120 | class AddressBaseType: 121 | 122 | def __init__(self): 123 | pass 124 | 125 | 126 | class CANAddress(AddressBaseType): 127 | """ 128 | """ 129 | 130 | def __init__(self, rawId): 131 | if (rawId & EXTENDED_ID_MASK) == EXTENDED_ID_MASK: 132 | self.mtype = IdentifierType.EXTENDED 133 | self.value = rawId & (~0x80000000) 134 | else: 135 | self.mtype = IdentifierType.STANDARD 136 | self.value = mid = rawId 137 | 138 | def __str__(self): 139 | """ 140 | """ 141 | return "{}({:08x} [{}])".format(self.__class__.__name__, self.value, self.mtype.name) 142 | 143 | def __int__(self): 144 | """ 145 | """ 146 | value = self.mid 147 | if self.mtype == IdentifierType.EXTENDED: 148 | value |= EXTENDED_ID_MASK 149 | return value 150 | 151 | 152 | __repr__ = __str__ 153 | 154 | 155 | class J1939Address(AddressBaseType): 156 | """ 157 | 158 | """ 159 | 160 | def __init__(self, priority, reserved, datapage, pdu_format, pdu_specific, source_address): 161 | self.priority = priority 162 | self.reserved = reserved 163 | self.datapage = datapage 164 | self.pdu_format = pdu_format 165 | self.pdu_specific = pdu_specific 166 | self.source_address = source_address 167 | 168 | @classmethod 169 | def from_int(klass, canID): 170 | # TODO: check for extID 171 | canID &= (~CAN_EXTENDED_IDENTIFIER) 172 | priority = (canID & 0x1c000000) >> 26 173 | reserved = (canID & 0x2000000) >> 25 174 | datapage = (canID & 0x1000000) >> 24 175 | pdu_format = (canID & 0xff0000) >> 16 176 | pdu_specific = (canID & 0xff00) >> 8 177 | source_address = canID & 0xff 178 | return klass(priority, reserved, datapage, pdu_format, pdu_specific, source_address) 179 | 180 | @property 181 | def pgn(self): 182 | return (self.pdu_format << 8) | self.pdu_specific 183 | 184 | @pgn.setter 185 | def pgn(self, value): 186 | """ 187 | 188 | """ 189 | self.pdu_format = (value & 0xff00) >> 8 190 | self.pdu_specific = (value & 0xff) 191 | 192 | 193 | @property 194 | def canID(self): 195 | """ 196 | """ 197 | return ((self.priority & 0x07) << 26) | ((self.reserved & 0x01) << 25) | ((self.datapage & 0x01) << 24) | \ 198 | ((self.pdu_format & 0xff) << 16) | ((self.pdu_specific & 0xff) << 8) | (self.source_address & 0xff) 199 | 200 | 201 | @canID.setter 202 | def canID(self, value): 203 | pass 204 | 205 | def __str__(self): 206 | return "{}(priority = {}, reserved = {}, datapage = {}, pdu_format = {}, pdu_specific = {}, source_address = {})".\ 207 | format(self.__class__.__name__, self.priority, self.reserved, self.datapage, self.pdu_format, 208 | self.pdu_specific, self.source_address 209 | ) 210 | 211 | __repr__ = __str__ 212 | 213 | 214 | class LinProductIdType(object): 215 | """ 216 | 217 | """ 218 | 219 | def __init__(self, supplier_id, function_id, variant = 0): 220 | self.supplier_id = supplier_id & 0xffff 221 | self.function_id = function_id & 0xffff 222 | self.variant = variant & 0xff 223 | 224 | def __str__(self): 225 | return "LinProductIdType(supplier_id = {}, function_id = {}, variant = {})".format( 226 | self.supplier_id, self.function_id, self.variant) 227 | 228 | __repr__ = __str__ 229 | -------------------------------------------------------------------------------- /pydbc/db/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2025 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = "Christoph Schueler" 28 | __version__ = "0.1.0" 29 | 30 | import mmap 31 | import re 32 | import sqlite3 33 | from functools import partial 34 | 35 | from sqlalchemy import ( 36 | types, 37 | orm, 38 | event, 39 | create_engine, 40 | text, 41 | ) 42 | from sqlalchemy.engine import Engine 43 | from sqlalchemy.orm import sessionmaker 44 | 45 | from pydbc.db import model 46 | from pydbc.logger import Logger 47 | 48 | DB_EXTENSION = "vndb" 49 | 50 | CACHE_SIZE = 4 # MB 51 | PAGE_SIZE = mmap.PAGESIZE 52 | 53 | 54 | def calculateCacheSize(value): 55 | return -(value // PAGE_SIZE) 56 | 57 | 58 | REGEXER_CACHE = {} 59 | 60 | 61 | def regexer(value, expr): 62 | if not REGEXER_CACHE.get(expr): 63 | REGEXER_CACHE[expr] = re.compile(expr, re.UNICODE) 64 | re_expr = REGEXER_CACHE[expr] 65 | return re_expr.match(value) is not None 66 | 67 | 68 | INITIAL_DATA = { 69 | "node": ( 70 | { 71 | "rid": 0, 72 | "node_id": 0, 73 | "name": "Vector__XXX", 74 | "comment": "Dummy node for non-existent senders/receivers.", 75 | "type_": "Node", 76 | }, 77 | ), 78 | } 79 | """ 80 | INSERT INTO message(comment,rid,name,message_id,dlc,sender,type) VALUES( 81 | 'This is a message for not used signals, created by Vector CANdb++ DBC OLE DB Provider.', 82 | 1, 83 | 'VECTOR__INDEPENDENT_SIG_MSG', 84 | 3221225472, 85 | 0, 86 | 0, 87 | 'Message'); 88 | """ 89 | 90 | 91 | def _inserter(data, target, conn, **kws): 92 | for row in data: 93 | k, v = row.keys(), row.values() 94 | keys = ", ".join([x for x in k]) 95 | values = ", ".join([repr(x) for x in v]) 96 | stmt = text("INSERT INTO {}({}) VALUES ({})".format(target.name, keys, values)) 97 | conn.execute(stmt) 98 | 99 | 100 | def loadInitialData(target): 101 | data = INITIAL_DATA[target.__table__.fullname] 102 | event.listen(target.__table__, "after_create", partial(_inserter, data)) 103 | 104 | 105 | class MyCustomEnum(types.TypeDecorator): 106 | impl = types.Integer 107 | 108 | def __init__(self, enum_values, *l, **kw): 109 | types.TypeDecorator.__init__(self, *l, **kw) 110 | self._enum_values = enum_values 111 | 112 | def convert_bind_param(self, value, engine): 113 | result = self.impl.convert_bind_param(value, engine) 114 | if result not in self._enum_values: 115 | raise TypeError("Value %s must be one of %s" % (result, self._enum_values)) 116 | return result 117 | 118 | def convert_result_value(self, value, engine): 119 | "Do nothing here" 120 | return self.impl.convert_result_value(value, engine) 121 | 122 | 123 | @event.listens_for(Engine, "connect") 124 | def set_sqlite3_pragmas(dbapi_connection, connection_record): 125 | dbapi_connection.create_function("REGEXP", 2, regexer) 126 | cursor = dbapi_connection.cursor() 127 | # cursor.execute("PRAGMA jornal_mode=WAL") 128 | cursor.execute("PRAGMA FOREIGN_KEYS=ON") 129 | cursor.execute("PRAGMA PAGE_SIZE={}".format(PAGE_SIZE)) 130 | cursor.execute( 131 | "PRAGMA CACHE_SIZE={}".format(calculateCacheSize(CACHE_SIZE * 1024 * 1024)) 132 | ) 133 | cursor.execute("PRAGMA SYNCHRONOUS=OFF") # FULL 134 | cursor.execute("PRAGMA LOCKING_MODE=EXCLUSIVE") # NORMAL 135 | cursor.execute("PRAGMA TEMP_STORE=MEMORY") # FILE 136 | cursor.close() 137 | 138 | 139 | class VNDB(object): 140 | """ """ 141 | 142 | def __init__(self, filename: str = ":memory:", debug: bool = False, logLevel: str = "INFO", create: bool = True, 143 | autocommit: bool = False): 144 | if filename == ":memory:": 145 | self.dbname = "" 146 | else: 147 | if not filename.lower().endswith(DB_EXTENSION): 148 | self.dbname = "{}.{}".format(filename, DB_EXTENSION) 149 | else: 150 | self.dbname = filename 151 | self._engine = create_engine( 152 | "sqlite:///{}".format(self.dbname), 153 | echo=debug, 154 | connect_args={ 155 | "detect_types": sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES, 156 | "isolation_level": "IMMEDIATE", 157 | }, 158 | native_datetime=True, 159 | # autocommit=autocommit 160 | ) 161 | SessionFactory = sessionmaker(self._engine, autoflush=False) # autocommit=autocommit 162 | self._session = SessionFactory() 163 | self._metadata = model.Base.metadata 164 | if create == True: 165 | model.Base.metadata.create_all(self.engine) 166 | self.session.flush() 167 | self.session.commit() 168 | self.logger = Logger(__name__, level=logLevel) 169 | 170 | @classmethod 171 | def _open_or_create( 172 | cls, filename=":memory:", debug=False, logLevel="INFO", create=True, autocommit=False, 173 | ): 174 | """ """ 175 | inst = cls(filename, debug, logLevel, create, autocommit) 176 | return inst 177 | 178 | @classmethod 179 | def create(cls, filename=":memory:", debug=False, logLevel="INFO", autocommit: bool=False): 180 | """ """ 181 | return cls._open_or_create(filename, debug, logLevel, True, autocommit) 182 | 183 | @classmethod 184 | def open(cls, filename=":memory:", debug=False, logLevel="INFO", autocommit: bool=False): 185 | """ """ 186 | return cls._open_or_create(filename, debug, logLevel, False, autocommit) 187 | 188 | def close(self): 189 | """ """ 190 | self.session.close() 191 | self.engine.dispose() 192 | 193 | @property 194 | def engine(self): 195 | return self._engine 196 | 197 | @property 198 | def metadata(self): 199 | return self._metadata 200 | 201 | @property 202 | def session(self): 203 | return self._session 204 | 205 | def begin_transaction(self): 206 | """ """ 207 | 208 | def commit_transaction(self): 209 | """ """ 210 | 211 | def rollback_transaction(self): 212 | """ """ 213 | 214 | 215 | loadInitialData(model.Node) 216 | -------------------------------------------------------------------------------- /pydbc/scripts/vndb_importer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | Vehicle Network Database Importer 6 | 7 | This script imports vehicle network description files (.dbc, .ldf, .ncf) into a database. 8 | It creates a .vndb file for each input file. 9 | """ 10 | 11 | __copyright__ = """ 12 | pySART - Simplified AUTOSAR-Toolkit for Python. 13 | 14 | (C) 2010-2023 by Christoph Schueler 15 | 16 | All Rights Reserved 17 | 18 | This program is free software; you can redistribute it and/or modify 19 | it under the terms of the GNU General Public License as published by 20 | the Free Software Foundation; either version 2 of the License, or 21 | (at your option) any later version. 22 | 23 | This program is distributed in the hope that it will be useful, 24 | but WITHOUT ANY WARRANTY; without even the implied warranty of 25 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 26 | GNU General Public License for more details. 27 | 28 | You should have received a copy of the GNU General Public License along 29 | with this program; if not, write to the Free Software Foundation, Inc., 30 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 31 | 32 | s. FLOSS-EXCEPTION.txt 33 | """ 34 | __author__ = "Christoph Schueler" 35 | __version__ = "0.1.0" 36 | 37 | import argparse 38 | import logging 39 | import os 40 | import pathlib 41 | from typing import Optional, Union 42 | 43 | from pydbc.parser import ParserWrapper 44 | from pydbc.dbcListener import DbcListener 45 | 46 | # from pydbc.ldfListener import LdfListener 47 | # from pydbc.ncfListener import NcfListener 48 | from pydbc.types import FileType 49 | 50 | 51 | def parseFile( 52 | pth: pathlib.Path, 53 | filetype: FileType, 54 | debug: bool = False, 55 | remove_file: bool = False, 56 | logLevel: str = "WARN", 57 | ) -> Optional[object]: 58 | """Parse a vehicle network description file and create a database. 59 | 60 | Args: 61 | pth: Path to the file to parse 62 | filetype: Type of the file (DBC, LDF, NCF) 63 | debug: Enable debug output 64 | remove_file: Remove existing database file before parsing 65 | logLevel: Logging level (WARN, INFO, ERROR, DEBUG) 66 | 67 | Returns: 68 | SQLAlchemy session object or None if parsing failed 69 | """ 70 | if filetype == FileType.DBC: 71 | grammar = "dbc" 72 | start_symbol = "dbcfile" 73 | listenerClass = DbcListener 74 | elif filetype == FileType.LDF: 75 | grammar = "ldf" 76 | start_symbol = "lin_description_file" 77 | listenerClass = LdfListener 78 | elif filetype == FileType.NCF: 79 | grammar = "ncf" 80 | start_symbol = "toplevel" 81 | listenerClass = NcfListener 82 | else: 83 | raise ValueError(f"Invalid filetype '{filetype}'") 84 | 85 | parser = ParserWrapper( 86 | grammar, start_symbol, listenerClass, debug=debug, logLevel=logLevel 87 | ) 88 | logging.info(f"Processing '{pth}'") 89 | 90 | dbfn = f"{pth.stem}.vndb" 91 | if remove_file: 92 | try: 93 | os.unlink(dbfn) 94 | except FileNotFoundError: 95 | # File doesn't exist, no need to remove 96 | pass 97 | except PermissionError as e: 98 | logging.error(f"Permission error removing {dbfn}: {e}") 99 | return None 100 | except Exception as e: 101 | logging.error(f"Error removing {dbfn}: {e}") 102 | return None 103 | 104 | try: 105 | session = parser.parseFromFile(str(pth)) 106 | logging.info(f"Successfully parsed {pth}") 107 | return session 108 | except Exception as e: 109 | logging.error(f"Error parsing {pth}: {e}") 110 | return None 111 | 112 | 113 | def get_file_type(pth: pathlib.Path) -> Optional[FileType]: 114 | """Determine the file type based on the file extension. 115 | 116 | Args: 117 | pth: Path to the file 118 | 119 | Returns: 120 | FileType enum value or None if the file type is not supported 121 | """ 122 | suffix = pth.suffix.lower() 123 | if suffix == ".dbc": 124 | result = FileType.DBC 125 | elif suffix == ".ldf": 126 | result = FileType.LDF 127 | elif suffix == ".ncf": 128 | result = FileType.NCF 129 | else: 130 | result = None 131 | return result 132 | 133 | 134 | def importFile(pth: pathlib.Path, logLevel: str) -> Optional[object]: 135 | """Import a vehicle network description file into a database. 136 | 137 | Args: 138 | pth: Path to the file to import 139 | logLevel: Logging level (WARN, INFO, ERROR, DEBUG) 140 | 141 | Returns: 142 | SQLAlchemy session object or None if import failed 143 | """ 144 | file_type = get_file_type(pth) 145 | if file_type is None: 146 | logging.error(f"Unsupported file type: {pth.suffix}") 147 | return None 148 | 149 | return parseFile(pth, file_type, remove_file=True, logLevel=logLevel) 150 | 151 | 152 | def main() -> None: 153 | """Main entry point for the vndb_importer script.""" 154 | footer = ( 155 | "CAVEAT: In this version vndb_importer is DESTRUCTIVE, i.e. no merging happens!" 156 | ) 157 | parser = argparse.ArgumentParser( 158 | description="Import vehicle network description files into a database.", 159 | epilog=footer, 160 | ) 161 | parser.add_argument( 162 | "vehicle_file", 163 | help=".dbc, .ldf, or .ncf file(s) to import (glob patterns supported)", 164 | nargs="+", 165 | ) 166 | parser.add_argument( 167 | "-k", 168 | dest="keepDirectory", 169 | action="store_true", 170 | default=False, 171 | help="keep directory; otherwise create db in current directory", 172 | ) 173 | parser.add_argument( 174 | "-l", 175 | dest="loglevel", 176 | type=str, 177 | choices=["debug", "info", "warn", "error"], 178 | default="warn", 179 | help="logging level [debug | info | warn | error]", 180 | ) 181 | parser.add_argument( 182 | "-w", 183 | dest="winout", 184 | action="store_true", 185 | help="Format output for Windows console.", 186 | ) 187 | parser.add_argument( 188 | "-u", 189 | dest="ucout", 190 | action="store_true", 191 | help="Generate UTF-8 encoded output (otherwise Latin-1).", 192 | ) 193 | 194 | args = parser.parse_args() 195 | 196 | # Configure logging 197 | log_level = getattr(logging, args.loglevel.upper()) 198 | logging.basicConfig(level=log_level, format="%(levelname)s: %(message)s") 199 | 200 | # Process each input file 201 | for arg in args.vehicle_file: 202 | for pth in pathlib.Path().glob(arg): 203 | if not pth.exists(): 204 | logging.error(f"File not found: {pth}") 205 | continue 206 | 207 | if not pth.is_file(): 208 | logging.error(f"Not a file: {pth}") 209 | continue 210 | 211 | # Import the file 212 | session = importFile(pth, args.loglevel) 213 | if session: 214 | logging.info(f"Successfully imported {pth}") 215 | else: 216 | logging.error(f"Failed to import {pth}") 217 | 218 | 219 | if __name__ == "__main__": 220 | main() 221 | -------------------------------------------------------------------------------- /pydbc/tests/test_dbc_parser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: latin-1 -*- 3 | 4 | """ 5 | """ 6 | 7 | import pytest 8 | 9 | import pydbc.db.model as model 10 | from pydbc.parser import ParserWrapper 11 | from pydbc.dbcListener import DbcListener 12 | 13 | 14 | def check_dummy_node(node): 15 | assert node.rid == 0 16 | assert node.comment == 'Dummy node for non-existent senders/receivers.' 17 | assert node.name == 'Vector__XXX' 18 | assert node.node_id == 0 19 | 20 | def test_no_nodes(): 21 | parser = ParserWrapper('dbc', 'nodes', DbcListener, debug = False) 22 | DATA = "BU_:" 23 | session = parser.parseFromString(DATA) 24 | res = session.query(model.Node).all() 25 | assert len(res) == 1 26 | check_dummy_node(res[0]) 27 | 28 | def test_one_nodes(): 29 | parser = ParserWrapper('dbc', 'nodes', DbcListener, debug = False) 30 | DATA = "BU_: MECU" 31 | session = parser.parseFromString(DATA) 32 | res = session.query(model.Node).all() 33 | assert len(res) == 2 34 | check_dummy_node(res[0]) 35 | assert res[1].name == "MECU" 36 | 37 | def test_multiple_nodes(): 38 | parser = ParserWrapper('dbc', 'nodes', DbcListener, debug = False) 39 | DATA = "BU_: MECU KSG BCU" 40 | session = parser.parseFromString(DATA) 41 | res = session.query(model.Node).all() 42 | assert len(res) == 4 43 | check_dummy_node(res[0]) 44 | assert res[1].name == "MECU" 45 | assert res[2].name == "KSG" 46 | assert res[3].name == "BCU" 47 | 48 | def test_errorous_node1(): 49 | parser = ParserWrapper('dbc', 'nodes', DbcListener, debug = False) 50 | DATA = "BU_: MECU 0235 BCU" 51 | session = parser.parseFromString(DATA) 52 | res = session.query(model.Node).all() 53 | assert len(res) == 2 54 | check_dummy_node(res[0]) 55 | assert res[1].name == "MECU" 56 | 57 | def test_errorous_node2(): 58 | parser = ParserWrapper('dbc', 'nodes', DbcListener, debug = False) 59 | DATA = "BU_: MECU BCU 0235" 60 | session = parser.parseFromString(DATA) 61 | res = session.query(model.Node).all() 62 | assert len(res) == 3 63 | check_dummy_node(res[0]) 64 | assert res[1].name == "MECU" 65 | assert res[2].name == "BCU" 66 | 67 | def test_message1(): 68 | parser = ParserWrapper('dbc', 'messages', DbcListener, debug = False) 69 | DATA = """BO_ 781 VBOX_10: 8 Vector__XXX 70 | SG_ Link_Time : 47|24@0+ (0.01,0) [0|0] "Seconds" Vector__XXX 71 | SG_ Status_remote : 39|8@0+ (1,0) [0|0] "-" Vector__XXX 72 | SG_ Sep_Angle : 7|32@0- (1,0) [-180|180] "Deg" Vector__XXX""" 73 | session = parser.parseFromString(DATA) 74 | res = session.query(model.Message).all() 75 | assert len(res) == 1 76 | msg = res[0] 77 | msg.comment == None 78 | msg.rid == 1 79 | msg.name == 'VBOX_10' 80 | msg.message_id == 781 81 | msg.dlc == 8 82 | msg.sender == 0 83 | msg.type == 'Message' 84 | signals = msg.signals 85 | assert len(signals) == 3 86 | s0, s1, s2 = signals 87 | assert s0.comment == None 88 | assert s0.rid == 1 89 | assert s0.name == 'Link_Time' 90 | assert s0.bitsize == 24 91 | assert s0.byteorder == 0 92 | assert s0.sign == 1 93 | assert s0.valuetype == 0 94 | assert s0.formula_factor == 0.01 95 | assert s0.formula_offset == 0.0 96 | assert s0.minimum == 0.0 97 | assert s0.maximum == 0.0 98 | assert s0.unit == 'Seconds' 99 | assert s1.comment == None 100 | assert s1.rid == 2 101 | assert s1.name == 'Status_remote' 102 | assert s1.bitsize == 8 103 | assert s1.byteorder == 0 104 | assert s1.sign == 1 105 | assert s1.valuetype == 0 106 | assert s1.formula_factor == 1.0 107 | assert s1.formula_offset == 0.0 108 | assert s1.minimum == 0.0 109 | assert s1.maximum == 0.0 110 | assert s1.unit == '-' 111 | assert s2.comment == None 112 | assert s2.rid == 3 113 | assert s2.name == 'Sep_Angle' 114 | assert s2.bitsize == 32 115 | assert s2.byteorder == 0 116 | assert s2.sign == -1 117 | assert s2.valuetype == 0 118 | assert s2.formula_factor == 1.0 119 | assert s2.formula_offset == 0.0 120 | assert s2.minimum == -180.0 121 | assert s2.maximum == 180.0 122 | assert s2.unit == 'Deg' 123 | 124 | def test_message_no_signals(): 125 | parser = ParserWrapper('dbc', 'messages', DbcListener, debug = False) 126 | DATA = """BO_ 781 VBOX_10: 8 Vector__XXX""" 127 | session = parser.parseFromString(DATA) 128 | res = session.query(model.Message).all() 129 | assert len(res) == 1 130 | assert res[0].signals == [] 131 | 132 | 133 | def test_malformed_message_no_id(): 134 | parser = ParserWrapper('dbc', 'messages', DbcListener, debug = False) 135 | DATA = """BO_ VBOX_10: 8 Vector__XXX""" 136 | session = parser.parseFromString(DATA) 137 | res = session.query(model.Message).all() 138 | assert len(res) == 1 139 | msg = res[0] 140 | assert msg.comment == None 141 | assert msg.rid == 1 142 | assert msg.name == 'VBOX_10' 143 | assert msg.message_id == None 144 | assert msg.dlc == 8 145 | assert msg.sender == 0 146 | assert msg.type == 'Message' 147 | 148 | def test_malformed_message_wrong_id(): 149 | parser = ParserWrapper('dbc', 'messages', DbcListener, debug = False) 150 | DATA = """BO_ ABC VBOX_10: 8 Vector__XXX""" 151 | session = parser.parseFromString(DATA) 152 | res = session.query(model.Message).all() 153 | assert len(res) == 1 154 | msg = res[0] 155 | assert msg.comment == None 156 | assert msg.rid == 1 157 | assert msg.name == 'ABC' 158 | assert msg.message_id == None 159 | assert msg.dlc == 8 160 | assert msg.sender == 0 161 | assert msg.type == 'Message' 162 | 163 | def test_malformed_message_no_name(): 164 | parser = ParserWrapper('dbc', 'messages', DbcListener, debug = False) 165 | DATA = """BO_ 781 : 8 Vector__XXX""" 166 | session = parser.parseFromString(DATA) 167 | res = session.query(model.Message).all() 168 | assert len(res) == 1 169 | msg = res[0] 170 | assert msg.comment == None 171 | assert msg.rid == 1 172 | assert msg.name == None 173 | assert msg.message_id == 781 174 | assert msg.dlc == 8 175 | assert msg.sender == 0 176 | assert msg.type == 'Message' 177 | 178 | def test_invalid_signal1(): 179 | parser = ParserWrapper('dbc', 'messages', DbcListener, debug = False) 180 | DATA = """ 181 | BO_ 1600 Reporting_Message_640: 8 Vector__XXX 182 | SG_ Opto_IO_1 : 0|1@0+ (1,0) [0|0] "" Vector__XXX 183 | SG_ Analog_Input_1_(0-26.4V) : 15|8@0+ (0.103529,0) [0|26] "" Vector__XXX 184 | """ 185 | session = parser.parseFromString(DATA) 186 | res = session.query(model.Message).all() 187 | assert len(res) == 1 188 | msg = res[0] 189 | assert len(msg.signals) == 2 190 | s0, s1 = msg.signals 191 | assert s0.comment == None 192 | assert s0.rid == 1 193 | assert s0.name == 'Opto_IO_1' 194 | assert s0.bitsize == 1 195 | assert s0.byteorder == 0 196 | assert s0.sign == 1 197 | assert s0.valuetype == 0 198 | assert s0.formula_factor == 1.0 199 | assert s0.formula_offset == 0.0 200 | assert s0.minimum == 0.0 201 | assert s0.maximum == 0.0 202 | assert s0.unit == '' 203 | assert s1.comment == None 204 | assert s1.rid == 2 205 | assert s1.name == 'Analog_Input_1_' 206 | assert s1.bitsize == 0 207 | assert s1.byteorder == 0 208 | assert s1.sign == 1 209 | assert s1.valuetype == 0 210 | assert s1.formula_factor == 1.0 211 | assert s1.formula_offset == 0.0 212 | assert s1.minimum == 0.0 213 | assert s1.maximum == 0.0 214 | assert s1.unit == None 215 | -------------------------------------------------------------------------------- /docs/api-reference.md: -------------------------------------------------------------------------------- 1 | # API Reference 2 | 3 | This reference summarizes the main public classes and methods of pyDBC. For detailed parameter semantics, inspect the docstrings in the source files under pydbc/api and pydbc/db. 4 | 5 | - DBC (CAN) Creator: pydbc.api.dbc.DBCCreator 6 | - LDF (LIN) Creator: pydbc.api.ldf.LDFCreator 7 | - NCF (Vehicle/Network Config) Creator: pydbc.api.ncf.NCFCreator 8 | - Parser Wrapper: pydbc.parser.ParserWrapper 9 | - Exporters: pydbc.db.imex.DbcExporter, pydbc.db.imex.LdfExporter 10 | 11 | ## pydbc.api.dbc — DBCCreator 12 | 13 | Primary methods: 14 | - DBCCreator(db_path=":memory:", debug=False, session=None) 15 | - DBCCreator.from_session(session) -> DBCCreator 16 | - index_existing() -> None (preload caches from current session) 17 | - create_network(name, **kwargs) -> Network 18 | - create_node(name, **kwargs) -> Node 19 | - create_message(name, message_id, dlc, sender, **kwargs) -> Message 20 | - create_signal(name, bitsize, byteorder=1, sign=1, formula_factor=1.0, formula_offset=0.0, minimum=0.0, maximum=0.0, unit="", **kwargs) -> Signal 21 | - add_signal_to_message(message, signal, offset, multiplexor_signal=None, multiplex_dependent=None, multiplexor_value=None) -> Message_Signal 22 | - create_valuetable(name, values: dict[int, str]) -> Valuetable 23 | - add_node_as_receiver(signal, node) -> Node_RxSignal 24 | - create_attribute_definition(name, object_type, value_type, **kwargs) -> Attribute_Definition 25 | - set_attribute_value(attribute_definition, object_id, value) -> Attribute_Value 26 | - commit() -> None 27 | - close() -> None 28 | 29 | Notes: 30 | - You can attach to a parsed database by passing session=... or using DBCCreator.from_session. 31 | - sender can be a Node object, a node name string, or an integer rid. 32 | - Name-based parameters are lazily resolved against the attached session if not found in local caches. 33 | - add_signal_to_message offset is in bits. 34 | 35 | ## pydbc.api.ldf — LDFCreator 36 | 37 | Primary methods: 38 | - LDFCreator(db_path=":memory:", debug=False) 39 | - create_network(name, protocol_version=None, language_version=None, speed=None, file_revision=None, channel_name=None, **kwargs) -> LinNetwork 40 | - create_master_node(name, timebase, jitter, bit_length=None, tolerant=None, **kwargs) -> LinMasterNode 41 | - create_slave_node(name, protocol_version=None, configured_NAD=None, initial_NAD=None, product_id=(), p2_min=None, st_min=None, n_as_timeout=None, n_cr_timeout=None, response_tolerance=None, **kwargs) -> LinSlaveNode 42 | - create_signal(name, signal_size, init_value, publisher, **kwargs) -> LinSignal 43 | - add_signal_subscriber(signal, subscriber) -> LinSignalSubscriber 44 | - create_unconditional_frame(name, frame_id, size, publisher, **kwargs) -> LinUnconditionalFrame 45 | - add_signal_to_frame(frame, signal, signal_offset=0) -> LinUnconditionalFrameSignal 46 | - create_event_triggered_frame(name, frame_id, master_node, collision_resolving_schedule_table=None, **kwargs) -> LinEventTriggeredFrame 47 | - create_sporadic_frame(name, **kwargs) -> LinSporadicFrame 48 | - create_schedule_table(name, **kwargs) -> LinScheduleTable 49 | - add_frame_to_schedule_table(schedule_table, frame, frame_time) -> LinScheduleTable_Command_Frame 50 | - create_signal_encoding_type(name, **kwargs) -> LinSignalEncodingType 51 | - add_logical_value_to_encoding(encoding_type, signal_value, text_info) -> LinSignalEncodingEntry_Logical 52 | - add_physical_range_to_encoding(encoding_type, min_value, max_value, scale, offset, text_info) -> LinSignalEncodingEntry_Physical 53 | - add_signal_representation(signal, encoding_type) -> LinSignalRepresentation 54 | - commit() -> None 55 | - close() -> None 56 | 57 | Notes: 58 | - publisher/subscriber parameters can be a name string or the corresponding node object. 59 | - frame.size is in bytes; signal_offset is in bits. 60 | 61 | ## pydbc.api.ncf — NCFCreator 62 | 63 | Primary methods: 64 | - NCFCreator(db_path=":memory:", debug=False) 65 | - create_network(name, **kwargs) -> Network 66 | - create_node(name, **kwargs) -> Node 67 | - create_ecu(name, **kwargs) -> ECU 68 | - create_vehicle(name, **kwargs) -> Vehicle 69 | - create_env_var(name, var_type, unit=None, minimum=None, maximum=None, initial_value=None, access_type=None, access_node=None, **kwargs) -> EnvVar 70 | - create_gateway_signal(source_signal, target_signal, **kwargs) -> Gateway_Signal 71 | - add_node_to_network(network, node, connector_name=None) -> Network_Node 72 | - add_ecu_to_vehicle(vehicle, ecu) -> Vehicle_ECU 73 | - add_network_to_vehicle(vehicle, network) -> Vehicle_Network 74 | - add_node_to_ecu(ecu, node) -> ECU_Node 75 | - add_env_var_to_ecu(ecu, env_var) -> ECU_EnvVar 76 | - add_access_node_to_env_var(env_var, node, access_type) -> EnvVar_AccessNode 77 | - create_env_var_data(env_var, data) -> EnvironmentVariablesData 78 | - commit() -> None 79 | - close() -> None 80 | 81 | Notes: 82 | - Many parameters accept either names or objects. Ensure referenced objects exist if passing names. 83 | 84 | ## pydbc.parser — ParserWrapper 85 | 86 | Primary methods/properties: 87 | - ParserWrapper(grammarName, startSymbol, listenerClass, debug=False, logLevel="INFO") 88 | - parse(input, trace=False) -> Session 89 | - parseFromFile(filename, encoding="ISO-8859-1", trace=False) -> Session 90 | - parseFromString(buf, encoding="ISO-8859-1", trace=False, dbname=":memory:") -> Session 91 | - stringStream(fname, encoding="ISO-8859-1") -> InputStream 92 | - numberOfSyntaxErrors (property) 93 | 94 | Notes: 95 | - grammarName is used to dynamically import generated ANTLR lexer/parser classes (e.g., "dbc" -> pydbc.py3.dbcLexer/dbcParser). 96 | - listenerClass should be a subclass of parser.BaseListener that populates the VNDB (e.g., DbcListener). 97 | 98 | ## pydbc.db.imex — Exporters 99 | 100 | Classes: 101 | - DbcExporter 102 | - LdfExporter 103 | 104 | Usage: 105 | ```python 106 | from pydbc.db.imex import DbcExporter, LdfExporter 107 | DbcExporter(":memory:").run() 108 | LdfExporter(":memory:").run() 109 | ``` 110 | 111 | ## Code generation (Mako) 112 | 113 | Generators in pydbc.cgen.generators: 114 | - MicroPythonCanAppGenerator(session) 115 | - render(only: list[str] | None = None, app_name: str = "pydbc_mpy_app") -> str 116 | - SocketCanCGenerator(session) 117 | - render(only: list[str] | None = None, program_name: str = "pydbc_socketcan_app") -> str 118 | 119 | Notes: 120 | - The templates are embedded resources under pydbc/cgen/templates. 121 | - Both outputs include little-endian bit packing/unpacking helpers and embedded message metadata. 122 | 123 | See docs/code-generation.md for a full guide. 124 | 125 | ## python-can integration helpers 126 | 127 | Module: pydbc.integrations.python_can 128 | 129 | - encode_message(session, message_name, signal_values: dict[str, float]) -> tuple[int, bytes, int] 130 | - Returns (arbitration_id, data, dlc) 131 | - decode_message(session, can_id: int, data: bytes) -> dict 132 | - Returns {"message": name, "signals": {sig: value}} 133 | - PythonCanSender(**bus_kwargs) 134 | - send(arbitration_id: int, data: bytes, is_extended_id: bool = False) 135 | - shutdown() 136 | - PythonCanReceiver(session, **bus_kwargs) 137 | - recv(timeout: float | None) 138 | - decode_frame(frame) -> dict 139 | - shutdown() 140 | 141 | Notes: 142 | - Little-endian signals supported. Big-endian not yet implemented. 143 | - python-can is optional and imported lazily. 144 | 145 | ## Data model (ORM) 146 | 147 | The SQLAlchemy ORM classes live in pydbc.db.model and include entities such as: 148 | - Network, Node, Message, Signal, Message_Signal 149 | - Valuetable, Value_Description, Attribute_Definition, Attribute_Value 150 | - LIN‑specific: LinNetwork, LinNode, LinMasterNode, LinSlaveNode, LinSignal, LinUnconditionalFrame, LinScheduleTable, encoding types, etc. 151 | - NCF/vehicle config entities: Vehicle, ECU, relations between vehicles, networks, nodes, ECUs, and environment variables. 152 | 153 | Refer to the source for complete field lists and relationships. 154 | -------------------------------------------------------------------------------- /docs/tutorial.md: -------------------------------------------------------------------------------- 1 | # Tutorial 2 | 3 | This tutorial walks you through creating a small but complete set of artifacts with pyDBC: 4 | - A CAN database (DBC) with nodes, messages, and signals 5 | - A LIN network (LDF) with master/slave nodes, frames, schedule tables 6 | - A vehicle configuration (NCF) with vehicle, networks, ECUs, nodes, and environment variables 7 | 8 | Where relevant, we also show how to export DBC/LDF using the provided exporters and how to parse existing DBC files. 9 | 10 | Prerequisites: 11 | - Python 3.10+ 12 | - Clone the repository and install dependencies (see docs/getting-started.md) 13 | 14 | ## 1. DBC: Build a small CAN database 15 | 16 | ```python 17 | from pydbc.api.dbc import DBCCreator 18 | 19 | # Create the in-memory database 20 | dbc = DBCCreator(":memory:") 21 | 22 | # Nodes 23 | engine = dbc.create_node("Engine") 24 | dashboard = dbc.create_node("Dashboard") 25 | 26 | # Messages (sender can be Node object or its name) 27 | engine_data = dbc.create_message("EngineData", message_id=0x64, dlc=8, sender=engine) 28 | 29 | # Signals with scaling and units 30 | rpm = dbc.create_signal( 31 | "EngineRPM", bitsize=16, byteorder=1, sign=1, 32 | formula_factor=0.25, formula_offset=0.0, 33 | minimum=0, maximum=8000, unit="rpm", 34 | ) 35 | 36 | speed = dbc.create_signal( 37 | "VehicleSpeed", bitsize=16, byteorder=1, sign=1, 38 | formula_factor=0.1, formula_offset=0.0, 39 | minimum=0, maximum=300, unit="km/h", 40 | ) 41 | 42 | # Map signals into the message payload 43 | dbc.add_signal_to_message(engine_data, rpm, offset=0) 44 | dbc.add_signal_to_message(engine_data, speed, offset=16) 45 | 46 | # (optional) Receivers – declare which node subscribes to which signal 47 | # dbc.add_node_as_receiver(speed, dashboard) 48 | 49 | dbc.commit() 50 | ``` 51 | 52 | Tips: 53 | - byteorder: 1 = little‑endian, 0 = big‑endian 54 | - sign: 1 = unsigned, 0 = signed 55 | - formula_factor and formula_offset define the physical conversion 56 | 57 | ## 2. LDF: Build a small LIN network 58 | 59 | ```python 60 | from pydbc.api.ldf import LDFCreator 61 | 62 | ldf = LDFCreator(":memory:") 63 | network = ldf.create_network("LinDemo", protocol_version="2.1", language_version="2.1", speed=19.2) 64 | 65 | # Nodes 66 | master = ldf.create_master_node("MasterECU", timebase=0.005, jitter=0.0001) 67 | slave1 = ldf.create_slave_node("Slave1", configured_NAD=1, initial_NAD=1, protocol_version="2.1") 68 | 69 | # Signals (publisher can be name or node object) 70 | temp = ldf.create_signal("Temperature", signal_size=8, init_value=20, publisher=slave1) 71 | 72 | # Frame and mapping 73 | frame = ldf.create_unconditional_frame("MasterFrame", frame_id=0x10, size=2, publisher=master) 74 | ldf.add_signal_to_frame(frame, temp, signal_offset=0) 75 | 76 | # Schedule table 77 | sched = ldf.create_schedule_table("Normal") 78 | ldf.add_frame_to_schedule_table(sched, frame, frame_time=0.01) 79 | 80 | # Encoding for temperature 81 | enc = ldf.create_signal_encoding_type("TemperatureEncoding") 82 | ldf.add_logical_value_to_encoding(enc, 0, "Error") 83 | ldf.add_logical_value_to_encoding(enc, 255, "N/A") 84 | ldf.add_physical_range_to_encoding(enc, min_value=1, max_value=254, scale=0.5, offset=-40, text_info="°C") 85 | ldf.add_signal_representation(temp, enc) 86 | 87 | ldf.commit() 88 | ``` 89 | 90 | ## 3. NCF: Build a small vehicle configuration 91 | 92 | ```python 93 | from pydbc.api.ncf import NCFCreator 94 | 95 | ncf = NCFCreator(":memory:") 96 | vehicle = ncf.create_vehicle("DemoVehicle") 97 | can_net = ncf.create_network("CAN1", protocol="CAN", speed=500) 98 | lin_net = ncf.create_network("LIN1", protocol="LIN", speed=19.2) 99 | 100 | engine_ecu = ncf.create_ecu("EngineECU") 101 | body_ecu = ncf.create_ecu("BodyECU") 102 | 103 | engine_node = ncf.create_node("EngineNode") 104 | body_node = ncf.create_node("BodyNode") 105 | 106 | # wire up the topology 107 | ncf.add_network_to_vehicle(vehicle, can_net) 108 | ncf.add_network_to_vehicle(vehicle, lin_net) 109 | 110 | ncf.add_ecu_to_vehicle(vehicle, engine_ecu) 111 | ncf.add_ecu_to_vehicle(vehicle, body_ecu) 112 | 113 | ncf.add_node_to_network(can_net, engine_node) 114 | ncf.add_node_to_network(lin_net, body_node) 115 | 116 | ncf.add_node_to_ecu(engine_ecu, engine_node) 117 | ncf.add_node_to_ecu(body_ecu, body_node) 118 | 119 | # environment variables 120 | coolant = ncf.create_env_var("CoolantTemp", var_type="INT", unit="°C", minimum=0, maximum=150, initial_value="90") 121 | ncf.add_env_var_to_ecu(engine_ecu, coolant) 122 | 123 | ncf.commit() 124 | ``` 125 | 126 | ## 4. Export to DBC/LDF text 127 | 128 | Use the exporters to render a template output of the current DB (for demo/testing): 129 | 130 | ```python 131 | from pydbc.db.imex import DbcExporter, LdfExporter 132 | 133 | # Export DBC 134 | dbx = DbcExporter(":memory:") 135 | dbx.run() # writes testfile.txt.render next to your working dir 136 | 137 | # Export LDF 138 | ldfx = LdfExporter(":memory:") 139 | ldfx.run() # writes testfile.txt.render 140 | ``` 141 | 142 | Note: The exporters render using packaged templates under pydbc/cgen/templates. They are intended for demonstration/round‑trip tests. 143 | 144 | ## 5. Import an existing DBC/LDF/NCF or open a VNDB file 145 | 146 | To simplify parsing files with the ANTLR4 grammars, use the small helper functions under pydbc.api.imports. 147 | 148 | ```python 149 | from pydbc.api.imports import import_dbc, import_ldf, import_ncf, open_vndb 150 | 151 | # DBC: parse a .dbc and get a SQLAlchemy session (DB is written next to the input as .vndb) 152 | session = import_dbc("path\\to\\your\\file.dbc") 153 | 154 | # LDF / NCF work the same (requires the respective listeners to be available) 155 | # ldf_session = import_ldf("path\\to\\your\\file.ldf") 156 | # ncf_session = import_ncf("path\\to\\your\\file.ncf") 157 | 158 | # If you already have a .vndb file, open it 159 | vndb = open_vndb("path\\to\\your\\file.vndb") 160 | session = vndb.session 161 | 162 | # Advanced: provide a custom listener (must match the grammar) 163 | # from pydbc.dbcListener import DbcListener 164 | # session = import_dbc("file.dbc", listenerClass=DbcListener, debug=False, logLevel="INFO") 165 | ``` 166 | 167 | Under the hood these functions use ParserWrapper and auto-detect file encodings using pydbc.utils.detect_encoding. 168 | 169 | ## 6. Querying your data (SQLAlchemy session) 170 | 171 | All creators and the parser provide access to the underlying SQLAlchemy session via .session. For example, using the DBC creator shown above: 172 | 173 | ```python 174 | from pydbc.db.model import Message, Signal 175 | 176 | # obtain the SQLAlchemy session (e.g., from a creator) 177 | from pydbc.api.dbc import DBCCreator 178 | session = DBCCreator(":memory:").session 179 | 180 | # count messages 181 | n = session.query(Message).count() 182 | print("Messages:", n) 183 | 184 | # join signals of a message 185 | msg = session.query(Message).filter_by(name="EngineData").first() 186 | for ms in msg.signals: # Message_Signal association 187 | print(ms.signal.name, ms.offset) 188 | ``` 189 | 190 | If you opened an existing .vndb file with open_vndb, use vndb.session in the same way: 191 | 192 | ```python 193 | from pydbc.api.imports import open_vndb 194 | from pydbc.db.model import Message, Signal 195 | 196 | vndb = open_vndb("C:\\path\\to\\database.vndb") 197 | session = vndb.session 198 | 199 | # list all messages and their IDs 200 | for m in session.query(Message).order_by(Message.message_id).all(): 201 | print(f"0x{m.message_id:X} {m.name} (dlc={m.dlc})") 202 | 203 | # fetch a specific message and show its signals with bit offsets 204 | msg = session.query(Message).filter_by(name="EngineData").first() 205 | if msg: 206 | for ms in msg.signals: # ms is a Message_Signal association row 207 | s = ms.signal 208 | print(f"{s.name}: start={ms.offset} size={s.bitsize} unit={s.unit}") 209 | ``` 210 | 211 | ## 7. Next steps 212 | 213 | - Review the Examples (docs/examples.md) which mirror and extend pydbc/examples/api_examples.py 214 | - Look through the API Reference (docs/api-reference.md) for details and parameter hints 215 | - Check the repository’s tests and example database files in the root for inspiration (e.g., *.vndb) 216 | -------------------------------------------------------------------------------- /pydbc/cgen/templates/ldf.tmpl: -------------------------------------------------------------------------------- 1 | <% 2 | network = db.session.query(model.LinNetwork).first() 3 | %>\ 4 | LIN_description_file; 5 | LIN_protocol_version = "${network.protocol_version}"; 6 | LIN_language_version = "${network.language_version}"; 7 | %if not network.file_revision is None: 8 | LDF_file_revision = "${network.file_revision}"; 9 | %endif 10 | LIN_speed = ${network.speed} kbps; 11 | %if not network.channel_name is None: 12 | Channel_name = ${network.channel_name}; 13 | %endif 14 | 15 | Nodes { 16 | <% 17 | masterNode = db.session.query(model.LinMasterNode).first() 18 | slaveNodes = db.session.query(model.LinSlaveNode).all() 19 | %>\ 20 | Master: ${masterNode.name}, ${masterNode.timebase}ms, ${masterNode.jitter}ms; 21 | Slaves: ${', '.join([s.name for s in slaveNodes])}; 22 | } 23 | 24 | Signals { 25 | %for signal in db.session.query(model.LinSignal).order_by(model.LinSignal.rid).all(): 26 | ${"{0:25s}".format(signal.name)}: ${signal.bitsize}, \ 27 | %if isinstance(signal.init_value, list) : 28 | { ${', '.join(["0x{:02x}".format(x) for x in signal.init_value])} }, \ 29 | %else: 30 | ${"0x{:02x}".format(signal.init_value)}, \ 31 | %endif 32 | ${signal.publisher.name}, ${', '.join([s.name for s in signal.subscribers])}; 33 | %endfor 34 | } 35 | 36 | Diagnostic_signals { 37 | 38 | } 39 | 40 | Frames { 41 | %for frame in db.session.query(model.LinUnconditionalFrame).filter(\ 42 | model.LinUnconditionalFrame.type == "LinUnconditionalFrame").order_by(model.LinUnconditionalFrame.lin_unconditional_frame_id).all(): 43 | <% senderName = db.session.query(model.Node.name).filter(model.Node.rid == frame.sender).scalar() %>\ 44 | ${frame.name}: ${"0x{:02x}".format(frame.frame_id)}, ${frame.publisher.name}, ${frame.dlc} { 45 | <% frame_signals = db.session.query(model.Signal.name, model.Message_Signal.offset).\ 46 | join(model.Message_Signal).filter(model.Message_Signal.message_id == frame.message_id).\ 47 | order_by(model.Message_Signal.offset).all() %>\ 48 | %for fs in frame.signals: 49 | <% fs_assoc = db.session.query(model.LinUnconditionalFrameSignal).filter( 50 | model.LinUnconditionalFrameSignal.unconditional_frame == frame, model.LinUnconditionalFrameSignal.signal == fs).first() %>\ 51 | ${fs.name}, ${fs_assoc.signal_offset}; 52 | %endfor 53 | } 54 | %endfor 55 | } 56 | 57 | Sporadic_frames { 58 | %for frame in db.session.query(model.LinSporadicFrame).all(): 59 | ${frame.name}: ${', '.join([f.name for f in frame.associated_frames])}; 60 | %endfor 61 | } 62 | 63 | Event_triggered_frames { 64 | %for frame in db.session.query(model.LinEventTriggeredFrame).all(): 65 | ${frame.name}: ${frame.collision_resolving_schedule_table.name}, ${frame.frame_id}, ${', '.join([f.name for f in frame.associated_frames])}; 66 | %endfor 67 | } 68 | 69 | Diagnostic_frames { 70 | 71 | } 72 | 73 | Node_attributes { 74 | %for node in slaveNodes: 75 | <% #attrs = nodeAttrs[node.rid] 76 | fault_state_signals = db.session.query(model.LinFaultStateSignal).filter(model.LinFaultStateSignal.node == node).all() 77 | response_error = db.session.query(model.LinResponseErrorSignal).filter(model.LinResponseErrorSignal.node == node).first() 78 | %>\ 79 | ${node.name} { 80 | %if not node.protocol_version is None: 81 | LIN_protocol = "${node.protocol_version}"; 82 | %endif 83 | %if not node.configured_NAD is None: 84 | configured_NAD = ${"0x{:02x}".format(node.configured_NAD)}; 85 | %endif 86 | %if not node.initial_NAD is None: 87 | initial_NAD = ${"0x{:02x}".format(node.initial_NAD)}; 88 | %endif 89 | %if not node.supplier_id is None: 90 | product_id = ${"0x{:04x}".format(node.supplier_id)},\ 91 | %endif 92 | %if not node.function_id is None: 93 | ${"0x{:04x}".format(node.function_id)}\ 94 | %endif 95 | %if not node.variant is None: 96 | , ${"0x{:04x}".format(node.variant)}\ 97 | %endif 98 | ; 99 | %if not response_error is None: 100 | response_error = ${response_error.signal.name}; 101 | %endif 102 | %if fault_state_signals: 103 | fault_state_signals = ${", ".join([s.signal.name for s in fault_state_signals])}; 104 | %endif 105 | %if not node.p2_min is None: 106 | P2_min = ${node.p2_min} ms; 107 | %endif 108 | %if not node.st_min is None: 109 | ST_min = ${node.st_min} ms; 110 | %endif 111 | %if not node.n_as_timeout is None: 112 | N_As_timeout = ${node.n_as_timeout} ms; 113 | %endif 114 | %if not node.n_cr_timeout is None: 115 | N_Cr_timeout = ${node.n_cr_timeout} ms; 116 | %endif 117 | <% configurable_frames = db.session.query(model.LinConfigurableFrame).\ 118 | filter(model.LinConfigurableFrame.node == node).all() %>\ 119 | %if configurable_frames: 120 | configurable_frames { 121 | %for frame in configurable_frames: 122 | ${frame.frame.name}; 123 | %endfor 124 | } 125 | %endif 126 | } 127 | %endfor 128 | 129 | Schedule_tables { 130 | %for table in db.session.query(model.LinScheduleTable).order_by(model.LinScheduleTable.rid).all(): 131 | ${table.name} { 132 | %for command in table.entries: 133 | %if command.type == "LinScheduleTable_Command_Frame": 134 | ${command.frame.name} delay ${command.frame_time} ms; 135 | %elif command.type == "LinScheduleTable_Command_MasterReq": 136 | MasterReq delay ${command.frame_time} ms; 137 | %elif command.type == "LinScheduleTable_Command_SlaveResp": 138 | SlaveResp delay ${command.frame_time} ms; 139 | %elif command.type == "LinScheduleTable_Command_AssignNad": 140 | AssignNAD { ${command.node.name} } delay ${command.frame_time} ms; 141 | %elif command.type == "LinScheduleTable_Command_ConditionalChangeNad": 142 | ConditionalChangeNAD { ${"0x{:02x}".format(command.nad)}, ${"0x{:02x}".format(command.id)}, \ 143 | ${"0x{:02x}".format(command.byte)}, ${"0x{:02x}".format(command.mask)}, ${"0x{:02x}".format(command.inv)}, \ 144 | ${"0x{:02x}".format(command.new_nad)} } delay ${command.frame_time} ms; 145 | %elif command.type == "LinScheduleTable_Command_DataDump": 146 | DataDump { ${command.node.name}, ${"0x{:02x}".format(command.d1)}, ${"0x{:02x}".format(command.d2)}, \ 147 | ${"0x{:02x}".format(command.d3)}, ${"0x{:02x}".format(command.d4)}, ${"0x{:02x}".format(command.d5)} } \ 148 | delay ${command.frame_time} ms; 149 | %elif command.type == "LinScheduleTable_Command_SaveConfiguration": 150 | SaveConfiguration { ${command.node.name} } delay ${command.frame_time} ms; 151 | %elif command.type == "LinScheduleTable_Command_AssignFrameIdRange": 152 | AssignFrameIdRange {${command.node.name}, ${command.frame_index} \ 153 | %if command.frame_pid1: 154 | ${"0x{:02x}".format(frame_pid1)} \ 155 | %endif 156 | %if command.frame_pid2: 157 | , ${"0x{:02x}".format(frame_pid2)} \ 158 | %endif 159 | %if command.frame_pid3: 160 | , ${"0x{:02x}".format(frame_pid3)} \ 161 | %endif 162 | %if command.frame_pid4: 163 | , ${"0x{:02x}".format(frame_pid4)} \ 164 | %endif 165 | } delay ${command.frame_time} ms; 166 | %elif command.type == "LinScheduleTable_Command_FreeFormat": 167 | FreeFormat { ${"0x{:02x}".format(command.d1)}, ${"0x{:02x}".format(command.d2)}, \ 168 | ${"0x{:02x}".format(command.d3)}, ${"0x{:02x}".format(command.d4)}, ${"0x{:02x}".format(command.d5)}, \ 169 | ${"0x{:02x}".format(command.d6)}, ${"0x{:02x}".format(command.d7)}, ${"0x{:02x}".format(command.d8)} \ 170 | } delay ${command.frame_time} ms; 171 | %elif command.type == "LinScheduleTable_Command_AssignFrameId": 172 | AssignFrameId { ${command.node.name}, ${command.frame.name} } delay ${command.frame_time} ms; 173 | %endif 174 | %endfor 175 | } 176 | %endfor 177 | } 178 | 179 | Signal_encoding_types { 180 | %for enc in db.session.query(model.LinSignalEncodingType).all(): 181 | ${enc.name} { 182 | %for entry in enc.entries: 183 | %if entry.type == "LinSignalEncodingEntry_Logical": 184 | logical_value, ${int(entry.signal_value)}, "${entry.text_info}"; 185 | %elif entry.type == "LinSignalEncodingEntry_Physical": 186 | physical_value, ${int(entry.min_value)}, ${int(entry.max_value)}, ${entry.scale}, \ 187 | ${entry.offset}, "${entry.text_info}"; 188 | %endif 189 | %endfor 190 | } 191 | %endfor 192 | } 193 | 194 | Signal_representation { 195 | %for enc in db.session.query(model.LinSignalEncodingType).all(): 196 | <% sr = db.session.query(model.Signal.name).join(model.LinSignalRepresentation).\ 197 | filter(model.LinSignalRepresentation.signal_encoding_type == enc).all() %>\ 198 | %if sr: 199 | ${enc.name}: ${', '.join([s[0] for s in sr])}; 200 | %endif 201 | %endfor 202 | } 203 | -------------------------------------------------------------------------------- /pydbc/cgen/templates/socketcan.c.tmpl: -------------------------------------------------------------------------------- 1 | /* 2 | * Auto-generated Linux SocketCAN application: ${program_name} 3 | * Generated by pydbc using Mako templates. Standalone: embeds all metadata. 4 | * 5 | * Build (example): 6 | * gcc -O2 -Wall -o ${program_name} ${program_name}.c 7 | * Run (example): 8 | * sudo ./${program_name} vcan0 9 | * 10 | * Limitations: 11 | * - Only little-endian signals handled in helpers below. 12 | * - Multiplexing not implemented in this minimal generator. 13 | */ 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | 28 | typedef struct { 29 | const char *name; 30 | uint16_t start; // bit start 31 | uint8_t size; // bit size 32 | bool le; // little-endian 33 | bool sign; // true if signed 34 | double factor; 35 | double offset; 36 | } cg_signal_t; 37 | 38 | typedef struct { 39 | const char *name; 40 | uint32_t id; 41 | uint8_t dlc; 42 | const cg_signal_t *signals; 43 | size_t num_signals; 44 | } cg_message_t; 45 | 46 | static inline void ins_bits_le(uint8_t *buf, uint16_t start, uint8_t size, int64_t value) { 47 | if (size == 0) return; 48 | uint64_t mask = (size >= 64) ? ~0ULL : ((1ULL << size) - 1ULL); 49 | uint64_t v = ((uint64_t)value) & mask; 50 | uint16_t bit_pos = start; 51 | uint8_t remaining = size; 52 | while (remaining > 0) { 53 | uint16_t byte_index = bit_pos / 8u; 54 | uint8_t bit_index = bit_pos % 8u; 55 | uint8_t bits_here = (uint8_t)((remaining < (8u - bit_index)) ? remaining : (8u - bit_index)); 56 | uint8_t chunk_mask = (uint8_t)((1u << bits_here) - 1u); 57 | uint8_t chunk = (uint8_t)(v & chunk_mask); 58 | buf[byte_index] &= (uint8_t)~(chunk_mask << bit_index); 59 | buf[byte_index] |= (uint8_t)(chunk << bit_index); 60 | v >>= bits_here; 61 | remaining -= bits_here; 62 | bit_pos += bits_here; 63 | } 64 | } 65 | 66 | static inline int64_t ext_bits_le(const uint8_t *buf, uint16_t start, uint8_t size, bool sign) { 67 | if (size == 0) return 0; 68 | uint16_t bit_pos = start; 69 | uint8_t remaining = size; 70 | uint64_t out = 0; 71 | uint8_t shift = 0; 72 | while (remaining > 0) { 73 | uint16_t byte_index = bit_pos / 8u; 74 | uint8_t bit_index = bit_pos % 8u; 75 | uint8_t bits_here = (uint8_t)((remaining < (8u - bit_index)) ? remaining : (8u - bit_index)); 76 | uint8_t chunk_mask = (uint8_t)((1u << bits_here) - 1u); 77 | uint8_t byte_val = buf[byte_index]; 78 | uint8_t chunk = (uint8_t)((byte_val >> bit_index) & chunk_mask); 79 | out |= ((uint64_t)chunk) << shift; 80 | remaining -= bits_here; 81 | bit_pos += bits_here; 82 | shift += bits_here; 83 | } 84 | if (sign) { 85 | uint64_t sign_bit = 1ULL << (size - 1); 86 | if (out & sign_bit) { 87 | out = out - (1ULL << size); 88 | } 89 | } 90 | return (int64_t)out; 91 | } 92 | 93 | static inline int64_t phys_to_raw(double v, double factor, double offset, uint8_t size, bool sign) { 94 | double rawd = (factor == 0.0) ? (v - offset) : ((v - offset) / factor); 95 | int64_t raw = (int64_t)( (rawd >= 0.0) ? (rawd + 0.5) : (rawd - 0.5) ); 96 | if (sign) { 97 | int64_t minv = -(1LL << (size - 1)); 98 | int64_t maxv = (1LL << (size - 1)) - 1LL; 99 | if (raw < minv) raw = minv; 100 | if (raw > maxv) raw = maxv; 101 | } else { 102 | int64_t minv = 0; 103 | int64_t maxv = (size == 63) ? INT64_MAX : ((1LL << size) - 1LL); 104 | if (raw < minv) raw = minv; 105 | if (raw > maxv) raw = maxv; 106 | } 107 | return raw; 108 | } 109 | 110 | /* Embedded database */ 111 | % for m in messages: 112 | static const cg_signal_t sigs_${m.message_id}[] = { 113 | % for s in m.signals: 114 | { ${repr(s.name)}, ${s.start_bit}, ${s.size}, ${'true' if s.little_endian else 'false'}, ${'true' if s.signed else 'false'}, ${float(s.factor)}, ${float(s.offset)} }, 115 | % endfor 116 | }; 117 | % endfor 118 | 119 | static const cg_message_t MESSAGES[] = { 120 | % for m in messages: 121 | { ${repr(m.name)}, ${m.message_id}, ${m.dlc}, sigs_${m.message_id}, sizeof(sigs_${m.message_id})/sizeof(sigs_${m.message_id}[0]) }, 122 | % endfor 123 | }; 124 | static const size_t NUM_MESSAGES = sizeof(MESSAGES)/sizeof(MESSAGES[0]); 125 | 126 | static const cg_message_t* find_msg_by_name(const char *name) { 127 | for (size_t i = 0; i < NUM_MESSAGES; ++i) { 128 | if (MESSAGES[i].name && strcmp(MESSAGES[i].name, name) == 0) return &MESSAGES[i]; 129 | } 130 | return NULL; 131 | } 132 | 133 | static const cg_message_t* find_msg_by_id(uint32_t id) { 134 | for (size_t i = 0; i < NUM_MESSAGES; ++i) { 135 | if (MESSAGES[i].id == id) return &MESSAGES[i]; 136 | } 137 | return NULL; 138 | } 139 | 140 | /* Encode values[] (parallel arrays) into frame data */ 141 | static void encode_message(const cg_message_t *m, const double *values, uint8_t *out_data) { 142 | memset(out_data, 0, m->dlc); 143 | for (size_t i = 0; i < m->num_signals; ++i) { 144 | const cg_signal_t *s = &m->signals[i]; 145 | if (!s->le) { 146 | fprintf(stderr, "Big-endian signal not supported: %s\n", s->name); 147 | continue; 148 | } 149 | int64_t raw = phys_to_raw(values[i], s->factor, s->offset, s->size, s->sign); 150 | ins_bits_le(out_data, s->start, s->size, raw); 151 | } 152 | } 153 | 154 | /* Decode frame data into values[] */ 155 | static void decode_message(const cg_message_t *m, const uint8_t *data, double *values_out) { 156 | for (size_t i = 0; i < m->num_signals; ++i) { 157 | const cg_signal_t *s = &m->signals[i]; 158 | if (!s->le) { 159 | fprintf(stderr, "Big-endian signal not supported: %s\n", s->name); 160 | values_out[i] = 0.0; 161 | continue; 162 | } 163 | int64_t raw = ext_bits_le(data, s->start, s->size, s->sign); 164 | values_out[i] = raw * s->factor + s->offset; 165 | } 166 | } 167 | 168 | int main(int argc, char **argv) { 169 | if (argc < 2) { 170 | fprintf(stderr, "Usage: %s \n", argv[0]); 171 | return 1; 172 | } 173 | const char *ifname = argv[1]; 174 | 175 | int s = socket(PF_CAN, SOCK_RAW, CAN_RAW); 176 | if (s < 0) { 177 | perror("socket"); 178 | return 1; 179 | } 180 | 181 | struct ifreq ifr; 182 | memset(&ifr, 0, sizeof(ifr)); 183 | strncpy(ifr.ifr_name, ifname, IFNAMSIZ - 1); 184 | if (ioctl(s, SIOCGIFINDEX, &ifr) < 0) { 185 | perror("SIOCGIFINDEX"); 186 | close(s); 187 | return 1; 188 | } 189 | 190 | struct sockaddr_can addr; 191 | memset(&addr, 0, sizeof(addr)); 192 | addr.can_family = AF_CAN; 193 | addr.can_ifindex = ifr.ifr_ifindex; 194 | 195 | if (bind(s, (struct sockaddr *)&addr, sizeof(addr)) < 0) { 196 | perror("bind"); 197 | close(s); 198 | return 1; 199 | } 200 | 201 | if (NUM_MESSAGES == 0) { 202 | fprintf(stderr, "No embedded messages to demo.\n"); 203 | close(s); 204 | return 0; 205 | } 206 | 207 | const cg_message_t *m = &MESSAGES[0]; 208 | double *vals = calloc(m->num_signals, sizeof(double)); 209 | if (!vals) { perror("calloc"); close(s); return 1; } 210 | 211 | uint8_t data[8]; 212 | encode_message(m, vals, data); 213 | 214 | struct can_frame tx; 215 | memset(&tx, 0, sizeof(tx)); 216 | tx.can_id = m->id; 217 | tx.can_dlc = m->dlc; 218 | memcpy(tx.data, data, m->dlc); 219 | 220 | if (write(s, &tx, sizeof(tx)) != sizeof(tx)) { 221 | perror("write"); 222 | } else { 223 | printf("Sent 0x%X ->", m->id); 224 | for (int i = 0; i < m->dlc; ++i) printf(" %02X", tx.data[i]); 225 | printf("\n"); 226 | } 227 | 228 | struct can_frame rx; 229 | ssize_t n = read(s, &rx, sizeof(rx)); 230 | if (n == sizeof(rx)) { 231 | const cg_message_t *mr = find_msg_by_id(rx.can_id); 232 | if (mr) { 233 | double *decoded = calloc(mr->num_signals, sizeof(double)); 234 | if (decoded) { 235 | decode_message(mr, rx.data, decoded); 236 | printf("Received 0x%X, decoded %zu signals\n", rx.can_id, mr->num_signals); 237 | free(decoded); 238 | } 239 | } else { 240 | printf("Received unknown id 0x%X\n", rx.can_id); 241 | } 242 | } 243 | 244 | free(vals); 245 | close(s); 246 | return 0; 247 | } 248 | -------------------------------------------------------------------------------- /pydbc/py3/ncfVisitor.py: -------------------------------------------------------------------------------- 1 | # Generated from ncf.g4 by ANTLR 4.13.2 2 | from antlr4 import * 3 | if "." in __name__: 4 | from .ncfParser import ncfParser 5 | else: 6 | from ncfParser import ncfParser 7 | 8 | # This class defines a complete generic visitor for a parse tree produced by ncfParser. 9 | 10 | class ncfVisitor(ParseTreeVisitor): 11 | 12 | # Visit a parse tree produced by ncfParser#toplevel. 13 | def visitToplevel(self, ctx:ncfParser.ToplevelContext): 14 | return self.visitChildren(ctx) 15 | 16 | 17 | # Visit a parse tree produced by ncfParser#language_version. 18 | def visitLanguage_version(self, ctx:ncfParser.Language_versionContext): 19 | return self.visitChildren(ctx) 20 | 21 | 22 | # Visit a parse tree produced by ncfParser#node_definition. 23 | def visitNode_definition(self, ctx:ncfParser.Node_definitionContext): 24 | return self.visitChildren(ctx) 25 | 26 | 27 | # Visit a parse tree produced by ncfParser#node_name. 28 | def visitNode_name(self, ctx:ncfParser.Node_nameContext): 29 | return self.visitChildren(ctx) 30 | 31 | 32 | # Visit a parse tree produced by ncfParser#general_definition. 33 | def visitGeneral_definition(self, ctx:ncfParser.General_definitionContext): 34 | return self.visitChildren(ctx) 35 | 36 | 37 | # Visit a parse tree produced by ncfParser#protocol_version. 38 | def visitProtocol_version(self, ctx:ncfParser.Protocol_versionContext): 39 | return self.visitChildren(ctx) 40 | 41 | 42 | # Visit a parse tree produced by ncfParser#supplier_id. 43 | def visitSupplier_id(self, ctx:ncfParser.Supplier_idContext): 44 | return self.visitChildren(ctx) 45 | 46 | 47 | # Visit a parse tree produced by ncfParser#function_id. 48 | def visitFunction_id(self, ctx:ncfParser.Function_idContext): 49 | return self.visitChildren(ctx) 50 | 51 | 52 | # Visit a parse tree produced by ncfParser#variant_id. 53 | def visitVariant_id(self, ctx:ncfParser.Variant_idContext): 54 | return self.visitChildren(ctx) 55 | 56 | 57 | # Visit a parse tree produced by ncfParser#bitrate_definition. 58 | def visitBitrate_definition(self, ctx:ncfParser.Bitrate_definitionContext): 59 | return self.visitChildren(ctx) 60 | 61 | 62 | # Visit a parse tree produced by ncfParser#bitrate. 63 | def visitBitrate(self, ctx:ncfParser.BitrateContext): 64 | return self.visitChildren(ctx) 65 | 66 | 67 | # Visit a parse tree produced by ncfParser#diagnostic_definition. 68 | def visitDiagnostic_definition(self, ctx:ncfParser.Diagnostic_definitionContext): 69 | return self.visitChildren(ctx) 70 | 71 | 72 | # Visit a parse tree produced by ncfParser#frame_definition. 73 | def visitFrame_definition(self, ctx:ncfParser.Frame_definitionContext): 74 | return self.visitChildren(ctx) 75 | 76 | 77 | # Visit a parse tree produced by ncfParser#single_frame. 78 | def visitSingle_frame(self, ctx:ncfParser.Single_frameContext): 79 | return self.visitChildren(ctx) 80 | 81 | 82 | # Visit a parse tree produced by ncfParser#frame_kind. 83 | def visitFrame_kind(self, ctx:ncfParser.Frame_kindContext): 84 | return self.visitChildren(ctx) 85 | 86 | 87 | # Visit a parse tree produced by ncfParser#frame_name. 88 | def visitFrame_name(self, ctx:ncfParser.Frame_nameContext): 89 | return self.visitChildren(ctx) 90 | 91 | 92 | # Visit a parse tree produced by ncfParser#frame_properties. 93 | def visitFrame_properties(self, ctx:ncfParser.Frame_propertiesContext): 94 | return self.visitChildren(ctx) 95 | 96 | 97 | # Visit a parse tree produced by ncfParser#signal_definition. 98 | def visitSignal_definition(self, ctx:ncfParser.Signal_definitionContext): 99 | return self.visitChildren(ctx) 100 | 101 | 102 | # Visit a parse tree produced by ncfParser#signal_definition_entry. 103 | def visitSignal_definition_entry(self, ctx:ncfParser.Signal_definition_entryContext): 104 | return self.visitChildren(ctx) 105 | 106 | 107 | # Visit a parse tree produced by ncfParser#signal_name. 108 | def visitSignal_name(self, ctx:ncfParser.Signal_nameContext): 109 | return self.visitChildren(ctx) 110 | 111 | 112 | # Visit a parse tree produced by ncfParser#signal_properties. 113 | def visitSignal_properties(self, ctx:ncfParser.Signal_propertiesContext): 114 | return self.visitChildren(ctx) 115 | 116 | 117 | # Visit a parse tree produced by ncfParser#init_value. 118 | def visitInit_value(self, ctx:ncfParser.Init_valueContext): 119 | return self.visitChildren(ctx) 120 | 121 | 122 | # Visit a parse tree produced by ncfParser#init_value_scalar. 123 | def visitInit_value_scalar(self, ctx:ncfParser.Init_value_scalarContext): 124 | return self.visitChildren(ctx) 125 | 126 | 127 | # Visit a parse tree produced by ncfParser#init_value_array. 128 | def visitInit_value_array(self, ctx:ncfParser.Init_value_arrayContext): 129 | return self.visitChildren(ctx) 130 | 131 | 132 | # Visit a parse tree produced by ncfParser#encoding_definition. 133 | def visitEncoding_definition(self, ctx:ncfParser.Encoding_definitionContext): 134 | return self.visitChildren(ctx) 135 | 136 | 137 | # Visit a parse tree produced by ncfParser#encoding_definition_entry. 138 | def visitEncoding_definition_entry(self, ctx:ncfParser.Encoding_definition_entryContext): 139 | return self.visitChildren(ctx) 140 | 141 | 142 | # Visit a parse tree produced by ncfParser#encoding_definition_value. 143 | def visitEncoding_definition_value(self, ctx:ncfParser.Encoding_definition_valueContext): 144 | return self.visitChildren(ctx) 145 | 146 | 147 | # Visit a parse tree produced by ncfParser#encoding_name. 148 | def visitEncoding_name(self, ctx:ncfParser.Encoding_nameContext): 149 | return self.visitChildren(ctx) 150 | 151 | 152 | # Visit a parse tree produced by ncfParser#logical_value. 153 | def visitLogical_value(self, ctx:ncfParser.Logical_valueContext): 154 | return self.visitChildren(ctx) 155 | 156 | 157 | # Visit a parse tree produced by ncfParser#physical_range. 158 | def visitPhysical_range(self, ctx:ncfParser.Physical_rangeContext): 159 | return self.visitChildren(ctx) 160 | 161 | 162 | # Visit a parse tree produced by ncfParser#bcd_value. 163 | def visitBcd_value(self, ctx:ncfParser.Bcd_valueContext): 164 | return self.visitChildren(ctx) 165 | 166 | 167 | # Visit a parse tree produced by ncfParser#ascii_value. 168 | def visitAscii_value(self, ctx:ncfParser.Ascii_valueContext): 169 | return self.visitChildren(ctx) 170 | 171 | 172 | # Visit a parse tree produced by ncfParser#signal_value. 173 | def visitSignal_value(self, ctx:ncfParser.Signal_valueContext): 174 | return self.visitChildren(ctx) 175 | 176 | 177 | # Visit a parse tree produced by ncfParser#min_value. 178 | def visitMin_value(self, ctx:ncfParser.Min_valueContext): 179 | return self.visitChildren(ctx) 180 | 181 | 182 | # Visit a parse tree produced by ncfParser#max_value. 183 | def visitMax_value(self, ctx:ncfParser.Max_valueContext): 184 | return self.visitChildren(ctx) 185 | 186 | 187 | # Visit a parse tree produced by ncfParser#scale. 188 | def visitScale(self, ctx:ncfParser.ScaleContext): 189 | return self.visitChildren(ctx) 190 | 191 | 192 | # Visit a parse tree produced by ncfParser#offset. 193 | def visitOffset(self, ctx:ncfParser.OffsetContext): 194 | return self.visitChildren(ctx) 195 | 196 | 197 | # Visit a parse tree produced by ncfParser#text_info. 198 | def visitText_info(self, ctx:ncfParser.Text_infoContext): 199 | return self.visitChildren(ctx) 200 | 201 | 202 | # Visit a parse tree produced by ncfParser#status_management. 203 | def visitStatus_management(self, ctx:ncfParser.Status_managementContext): 204 | return self.visitChildren(ctx) 205 | 206 | 207 | # Visit a parse tree produced by ncfParser#published_signal. 208 | def visitPublished_signal(self, ctx:ncfParser.Published_signalContext): 209 | return self.visitChildren(ctx) 210 | 211 | 212 | # Visit a parse tree produced by ncfParser#free_text_definition. 213 | def visitFree_text_definition(self, ctx:ncfParser.Free_text_definitionContext): 214 | return self.visitChildren(ctx) 215 | 216 | 217 | # Visit a parse tree produced by ncfParser#intValue. 218 | def visitIntValue(self, ctx:ncfParser.IntValueContext): 219 | return self.visitChildren(ctx) 220 | 221 | 222 | # Visit a parse tree produced by ncfParser#floatValue. 223 | def visitFloatValue(self, ctx:ncfParser.FloatValueContext): 224 | return self.visitChildren(ctx) 225 | 226 | 227 | # Visit a parse tree produced by ncfParser#number. 228 | def visitNumber(self, ctx:ncfParser.NumberContext): 229 | return self.visitChildren(ctx) 230 | 231 | 232 | # Visit a parse tree produced by ncfParser#stringValue. 233 | def visitStringValue(self, ctx:ncfParser.StringValueContext): 234 | return self.visitChildren(ctx) 235 | 236 | 237 | # Visit a parse tree produced by ncfParser#identifierValue. 238 | def visitIdentifierValue(self, ctx:ncfParser.IdentifierValueContext): 239 | return self.visitChildren(ctx) 240 | 241 | 242 | 243 | del ncfParser -------------------------------------------------------------------------------- /pydbc/integrations/python_can.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """ 4 | python-can integration helpers for pyDBC. 5 | 6 | This module provides: 7 | - encode_message: Pack physical signal values into a CAN frame payload 8 | according to the DBC data stored in the VNDB (SQLAlchemy session). 9 | - decode_message: Unpack a CAN frame payload back to physical signal values. 10 | - Optional convenience sender/receiver classes that use python-can if installed. 11 | 12 | Notes/assumptions: 13 | - Currently supports little-endian (Intel) signals (Signal.byteorder == 1). 14 | Big-endian (Motorola) signals raise NotImplementedError. 15 | - Signal.sign: In the current model, sign==1 is treated as unsigned, 0 as signed. 16 | - Physical<->raw conversion uses: phys = raw * factor + offset 17 | and raw = round((phys - offset) / factor). 18 | 19 | This module avoids a hard dependency on python-can. The Bus-related classes 20 | attempt to import python-can at runtime and fail gracefully with clear messages 21 | if it's not installed. 22 | """ 23 | from __future__ import annotations 24 | 25 | from dataclasses import dataclass 26 | from typing import Dict, Tuple, Any, Optional, List 27 | 28 | from sqlalchemy.orm import Session 29 | 30 | from pydbc.db.model import Message, Message_Signal, Signal 31 | 32 | 33 | @dataclass 34 | class CompiledSignal: 35 | name: str 36 | start_bit: int # bit offset in frame 37 | size: int # bits 38 | little_endian: bool 39 | signed: bool 40 | factor: float 41 | offset: float 42 | minimum: float 43 | maximum: float 44 | 45 | 46 | @dataclass 47 | class CompiledMessage: 48 | message_id: int 49 | dlc: int 50 | name: Optional[str] 51 | signals: List[CompiledSignal] 52 | 53 | 54 | def _compile_message(session: Session, message: Message) -> CompiledMessage: 55 | sigs: List[CompiledSignal] = [] 56 | for ms in message.message_signals: 57 | sig: Signal = ms.signal 58 | if sig.byteorder not in (0, 1): 59 | raise ValueError(f"Unsupported byteorder {sig.byteorder} for signal {sig.name}") 60 | little = sig.byteorder == 1 61 | signed = (sig.sign == 0) # treat 0 as signed, 1 as unsigned 62 | sigs.append( 63 | CompiledSignal( 64 | name=sig.name, 65 | start_bit=ms.offset, 66 | size=sig.bitsize, 67 | little_endian=little, 68 | signed=signed, 69 | factor=sig.formula_factor or 1.0, 70 | offset=sig.formula_offset or 0.0, 71 | minimum=sig.minimum if sig.minimum is not None else 0.0, 72 | maximum=sig.maximum if sig.maximum is not None else 0.0, 73 | ) 74 | ) 75 | return CompiledMessage( 76 | message_id=message.message_id or 0, 77 | dlc=message.dlc, 78 | name=message.name, 79 | signals=sigs, 80 | ) 81 | 82 | 83 | def get_message_by_name(session: Session, name: str) -> Message: 84 | msg = session.query(Message).filter_by(name=name).first() 85 | if not msg: 86 | raise KeyError(f"Message with name '{name}' not found") 87 | return msg 88 | 89 | 90 | def get_message_by_id(session: Session, can_id: int) -> Message: 91 | msg = session.query(Message).filter_by(message_id=can_id).first() 92 | if not msg: 93 | raise KeyError(f"Message with ID 0x{can_id:X} not found") 94 | return msg 95 | 96 | 97 | # --- Bit packing utilities (little-endian/Intel only) --- 98 | 99 | def _insert_bits_le(buf: bytearray, start_bit: int, size: int, value: int) -> None: 100 | """Insert 'size' bits of 'value' into buf at little-endian start_bit. 101 | Little-endian here means the start_bit 0 is the LSB of byte 0, increasing upwards. 102 | """ 103 | # Mask value to size bits 104 | if size <= 0: 105 | return 106 | mask = (1 << size) - 1 107 | value &= mask 108 | 109 | bit_pos = start_bit 110 | remaining = size 111 | while remaining > 0: 112 | byte_index = bit_pos // 8 113 | bit_index = bit_pos % 8 114 | bits_in_this_byte = min(remaining, 8 - bit_index) 115 | # Extract the chunk to write at current position 116 | chunk_mask = (1 << bits_in_this_byte) - 1 117 | chunk = value & chunk_mask 118 | # Clear target bits 119 | buf[byte_index] &= ~(chunk_mask << bit_index) 120 | # Write 121 | buf[byte_index] |= (chunk << bit_index) 122 | # Advance 123 | value >>= bits_in_this_byte 124 | remaining -= bits_in_this_byte 125 | bit_pos += bits_in_this_byte 126 | 127 | 128 | def _extract_bits_le(buf: bytes, start_bit: int, size: int, signed: bool) -> int: 129 | if size <= 0: 130 | return 0 131 | bit_pos = start_bit 132 | remaining = size 133 | out = 0 134 | shift = 0 135 | while remaining > 0: 136 | byte_index = bit_pos // 8 137 | bit_index = bit_pos % 8 138 | bits_in_this_byte = min(remaining, 8 - bit_index) 139 | chunk_mask = (1 << bits_in_this_byte) - 1 140 | byte_val = buf[byte_index] 141 | chunk = (byte_val >> bit_index) & chunk_mask 142 | out |= (chunk << shift) 143 | remaining -= bits_in_this_byte 144 | bit_pos += bits_in_this_byte 145 | shift += bits_in_this_byte 146 | if signed: 147 | # sign-extend 148 | sign_bit = 1 << (size - 1) 149 | if out & sign_bit: 150 | out = out - (1 << size) 151 | return out 152 | 153 | 154 | def _phys_to_raw(value: float, factor: float, offset: float, size: int, signed: bool) -> int: 155 | if factor == 0: 156 | # Avoid division by zero; treat as identity around offset 157 | raw = int(round(value - offset)) 158 | else: 159 | raw = int(round((value - offset) / factor)) 160 | # Clamp to representable range 161 | if signed: 162 | min_raw = -(1 << (size - 1)) 163 | max_raw = (1 << (size - 1)) - 1 164 | else: 165 | min_raw = 0 166 | max_raw = (1 << size) - 1 167 | return max(min(raw, max_raw), min_raw) 168 | 169 | 170 | def _raw_to_phys(raw: int, factor: float, offset: float) -> float: 171 | return raw * factor + offset 172 | 173 | 174 | def encode_message(session: Session, message_name: str, signal_values: Dict[str, float]) -> Tuple[int, bytes, int]: 175 | """Encode a message by name into (can_id, data_bytes, dlc). 176 | 177 | Returns a tuple suitable for constructing a python-can Message: 178 | (arbitration_id, data, dlc) 179 | """ 180 | msg = get_message_by_name(session, message_name) 181 | cm = _compile_message(session, msg) 182 | data = bytearray([0] * cm.dlc) 183 | 184 | # Place each signal 185 | for sig in cm.signals: 186 | if not sig.little_endian: 187 | raise NotImplementedError( 188 | f"Signal '{sig.name}' is big-endian (Motorola); not supported yet." 189 | ) 190 | if sig.name not in signal_values: 191 | # Missing values default to 0 after scaling 192 | phys = 0.0 193 | else: 194 | phys = float(signal_values[sig.name]) 195 | raw = _phys_to_raw(phys, sig.factor, sig.offset, sig.size, sig.signed) 196 | _insert_bits_le(data, sig.start_bit, sig.size, raw) 197 | 198 | return cm.message_id, bytes(data), cm.dlc 199 | 200 | 201 | def decode_message(session: Session, can_id: int, data: bytes) -> Dict[str, Any]: 202 | """Decode a received CAN frame given arbitration id and data payload. 203 | 204 | Returns dict: {"message": name, "signals": {sig_name: physical_value, ...}} 205 | """ 206 | msg = get_message_by_id(session, can_id) 207 | cm = _compile_message(session, msg) 208 | result: Dict[str, float] = {} 209 | 210 | for sig in cm.signals: 211 | if not sig.little_endian: 212 | raise NotImplementedError( 213 | f"Signal '{sig.name}' is big-endian (Motorola); not supported yet." 214 | ) 215 | raw = _extract_bits_le(data, sig.start_bit, sig.size, sig.signed) 216 | phys = _raw_to_phys(raw, sig.factor, sig.offset) 217 | result[sig.name] = phys 218 | 219 | return {"message": cm.name or f"0x{cm.message_id:X}", "signals": result} 220 | 221 | 222 | class PythonCanSender: 223 | """Simple sender using python-can Bus. 224 | 225 | Usage: 226 | sender = PythonCanSender(channel='vcan0', bustype='virtual') 227 | arb_id, data, dlc = encode_message(session, 'EngineData', {...}) 228 | sender.send(arb_id, data) 229 | """ 230 | 231 | def __init__(self, **bus_kwargs): 232 | try: 233 | import can # type: ignore 234 | except Exception as exc: 235 | raise RuntimeError( 236 | "python-can is required for PythonCanSender. Install with 'pip install python-can'." 237 | ) from exc 238 | self._can = can 239 | self._bus = can.Bus(**bus_kwargs) 240 | 241 | def send(self, arbitration_id: int, data: bytes, is_extended_id: bool = False) -> None: 242 | msg = self._can.Message( 243 | arbitration_id=arbitration_id, 244 | is_extended_id=is_extended_id, 245 | data=data, 246 | ) 247 | self._bus.send(msg) 248 | 249 | def shutdown(self) -> None: 250 | try: 251 | self._bus.shutdown() 252 | except Exception: 253 | pass 254 | 255 | 256 | class PythonCanReceiver: 257 | """Simple receiver that decodes messages with a pydbc session. 258 | 259 | Example: 260 | rx = PythonCanReceiver(session, channel='vcan0', bustype='virtual') 261 | frame = rx.recv(timeout=1.0) 262 | if frame: print(rx.decode_frame(frame)) 263 | """ 264 | 265 | def __init__(self, session: Session, **bus_kwargs): 266 | try: 267 | import can # type: ignore 268 | except Exception as exc: 269 | raise RuntimeError( 270 | "python-can is required for PythonCanReceiver. Install with 'pip install python-can'." 271 | ) from exc 272 | self._can = can 273 | self._bus = can.Bus(**bus_kwargs) 274 | self._session = session 275 | 276 | def recv(self, timeout: Optional[float] = None): 277 | return self._bus.recv(timeout=timeout) 278 | 279 | def decode_frame(self, frame) -> Dict[str, Any]: 280 | return decode_message(self._session, frame.arbitration_id, bytes(frame.data)) 281 | 282 | def shutdown(self) -> None: 283 | try: 284 | self._bus.shutdown() 285 | except Exception: 286 | pass 287 | -------------------------------------------------------------------------------- /pydbc/ncf.g4: -------------------------------------------------------------------------------- 1 | /* 2 | pySART - Simplified AUTOSAR-Toolkit for Python. 3 | 4 | (C) 2010-2021 by Christoph Schueler 5 | 6 | All Rights Reserved 7 | 8 | This program is free software; you can redistribute it and/or modify 9 | it under the terms of the GNU General Public License as published by 10 | the Free Software Foundation; either version 2 of the License, or 11 | (at your option) any later version. 12 | 13 | This program is distributed in the hope that it will be useful, 14 | but WITHOUT ANY WARRANTY; without even the implied warranty of 15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 | GNU General Public License for more details. 17 | 18 | You should have received a copy of the GNU General Public License along 19 | with this program; if not, write to the Free Software Foundation, Inc., 20 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 21 | 22 | s. FLOSS-EXCEPTION.txt 23 | */ 24 | 25 | grammar ncf; 26 | 27 | /* 28 | ::= A name on the left of the ::= is expressed using the syntax on its right 29 | <> Used to mark objects specified later 30 | | The vertical bar indicates choice. Either the left-hand side or the right hand side of the vertical bar shall appear 31 | Bold The text in bold is reserved - either because it is a reserved word, or mandatory punctuation 32 | [ ] The text between the square brackets shall appear once or multiple times 33 | ( ) The text between the parenthesis are optional, i.e. shall appear once or zero times char_string 34 | char_string Any character string enclosed in quotes "like this" identifier 35 | identifier An identifier. Typically used to name objects. Identifiers shall follow the normal C rules for variable declaration 36 | integer An integer. Integers can be in decimal or hexadecimal (prefixed with 0x) format. 37 | real_or_integer A real or integer number. A real number is always in decimal and has an embedded decimal point. 38 | */ 39 | 40 | /* 41 | node_capability_file; 42 | LIN_language_version = "2.2" 43 | node step_motor { 44 | general { 45 | LIN_protocol_version = "2.2"; 46 | supplier = 0x0005; 47 | function = 0x0020; 48 | variant = 1; 49 | bitrate = automatic min 10 kbps max 20 kbps; 50 | sends_wake_up_signal = "yes"; 51 | } 52 | diagnostic { 53 | NAD = 1 to 3; 54 | diagnostic_class = 2; 55 | P2_min = 100 ms; ST_min = 40 ms; 56 | support_sid { 0xB0, 0xB2, 0xB7 }; 57 | } 58 | frames { 59 | publish node_status { 60 | length = 4; min_period = 10 ms; max_period = 100 ms; 61 | signals { 62 | state {size = 8; init_value = 0; offset = 0;} 63 | fault_state {size = 2; init_value = 0; offset = 9; fault_enc;} 64 | error_bit {size = 1; init_value = 0; offset = 8;} 65 | angle {size = 16; init_value = {0x22, 0x11}; offset = 16;} 66 | } 67 | } 68 | subscribe control { 69 | length = 1; max_period = 100 ms; 70 | signals { 71 | command {size = 8; init_value = 0; offset = 0; position;} 72 | } 73 | } 74 | } 75 | encoding { 76 | position {physical_value 0, 199, 1.8, 0, "deg";} 77 | fault_enc {logical_value, 0, "no result"; 78 | logical_value, 1, "failed"; 79 | logical_value, 2, "passed";} 80 | } 81 | status_management { response_error = error_bit; 82 | fault_state_signals = fault_state; } 83 | free_text { "step_motor signal values outside 0 - 199 are ignored" } 84 | } 85 | */ 86 | 87 | toplevel: 88 | 'node_capability_file' ';' 89 | v = language_version 90 | (nodes += node_definition)* 91 | ; 92 | 93 | language_version: 94 | 'LIN_language_version' '=' s = stringValue ';' 95 | ; 96 | 97 | node_definition: 98 | 'node' name = node_name '{' 99 | g = general_definition 100 | d = diagnostic_definition 101 | f = frame_definition 102 | e = encoding_definition? // Not 2.0 103 | s = status_management 104 | t = free_text_definition? 105 | '}' 106 | ; 107 | 108 | node_name: 109 | i = identifierValue 110 | ; 111 | 112 | general_definition: 113 | 'general' '{' 114 | 'LIN_protocol_version' '=' pv = protocol_version ';' 115 | 'supplier' '=' sup = supplier_id ';' 116 | 'function' '=' fun = function_id ';' 117 | 'variant' '=' var = variant_id ';' 118 | 'bitrate' '=' br = bitrate_definition ';' 119 | ('sends_wake_up_signal' '=' tf = ('yes' | 'no') ';')? // Not 2.0 120 | ('volt_range' '=' vfrom = number ',' vto = number ';')? // optional in 2.0 121 | ('temp_range' '=' tfrom = number ',' tto = number ';')? // optional in 2.0 122 | ('conformance' '=' conf = stringValue ';')? // optional in 2.0 123 | '}' 124 | ; 125 | 126 | protocol_version: 127 | s = stringValue 128 | ; 129 | 130 | supplier_id: 131 | i = intValue 132 | ; 133 | 134 | function_id: 135 | i = intValue 136 | ; 137 | 138 | variant_id: 139 | i = intValue 140 | ; 141 | 142 | bitrate_definition: 143 | ('automatic' ('min' minBr = bitrate)? ('max' maxBr = bitrate)? ) 144 | | ('select' '{' rates += bitrate (',' rates += bitrate)* '}') 145 | | br = bitrate 146 | ; 147 | 148 | bitrate: 149 | n = number 'kbps' 150 | ; 151 | 152 | diagnostic_definition: 153 | 'diagnostic' '{' 154 | 'NAD' '=' lhs = intValue (('to' rhs = intValue) | (',' nads += intValue)*) ';' // Range (to) new in 2.2 155 | ('diagnostic_class' '=' dc = intValue ';')? // Required in 2.2 156 | ('P2_min' '=' p2Min = number 'ms' ';')? 157 | ('ST_min' '=' stMin = number 'ms' ';')? 158 | ('N_As_timeout' '=' nAs = number 'ms' ';')? // New in 2.2 159 | ('N_Cr_timeout' '=' nCr = number 'ms' ';')? // New in 2.2 160 | ('support_sid' '{' sids += intValue (',' sids += intValue)* '}' ';')? 161 | ('max_message_length' '=' mml = intValue ';')? 162 | '}' 163 | ; 164 | 165 | frame_definition: 166 | 'frames' '{' 167 | (frames += single_frame)* 168 | '}' 169 | ; 170 | 171 | single_frame: 172 | n = frame_kind frame_name '{' 173 | p = frame_properties 174 | s = signal_definition? 175 | '}' 176 | ; 177 | 178 | frame_kind: 179 | v = ('publish' | 'subscribe') 180 | ; 181 | 182 | frame_name: 183 | i = identifierValue 184 | ; 185 | 186 | frame_properties: 187 | // message_ID = intValue ';' // Required in 2.0 188 | 'length' '=' l = intValue ';' 189 | ('min_period' '=' minValue = intValue 'ms' ';')? 190 | ('max_period' '=' maxValue = intValue 'ms' ';')? 191 | ('event_triggered_frame' '=' etf = identifierValue)? 192 | ; 193 | 194 | signal_definition: 195 | 'signals' '{' 196 | (items += signal_definition_entry)* 197 | '}' 198 | ; 199 | 200 | signal_definition_entry: 201 | n = signal_name '{' p = signal_properties '}' 202 | ; 203 | 204 | signal_name: 205 | i = identifierValue 206 | ; 207 | 208 | signal_properties: 209 | init = init_value ';' 210 | 'size' '=' s = intValue ';' 211 | 'offset' '=' o = intValue ';' 212 | (e = encoding_name ';')? 213 | ; 214 | 215 | 216 | init_value: 217 | s = init_value_scalar | a = init_value_array 218 | ; 219 | 220 | init_value_scalar: 221 | 'init_value' '=' i = intValue 222 | ; 223 | 224 | init_value_array: 225 | 'init_value' '=' '{' 226 | values += intValue (',' values += intValue )* 227 | '}' 228 | ; 229 | 230 | encoding_definition: 231 | 'encoding' '{' 232 | (items += encoding_definition_entry)* 233 | '}' 234 | ; 235 | 236 | encoding_definition_entry: 237 | name = encoding_name '{' (items += encoding_definition_value)* '}' 238 | ; 239 | 240 | encoding_definition_value: 241 | l = logical_value | p = physical_range | b = bcd_value | a = ascii_value 242 | ; 243 | 244 | encoding_name: 245 | i = identifierValue 246 | ; 247 | 248 | logical_value: 249 | 'logical_value' ',' s = signal_value (',' t = text_info)? ';' 250 | ; 251 | 252 | physical_range: 253 | 'physical_value' ',' minValue = min_value ',' maxValue = max_value ',' s = scale ',' o = offset (',' t = text_info)? ';' 254 | ; 255 | 256 | bcd_value: 257 | 'bcd_value' ';' 258 | ; 259 | 260 | ascii_value: 261 | 'ascii_value' ';' 262 | ; 263 | 264 | signal_value: 265 | n = intValue 266 | ; 267 | 268 | min_value: 269 | n = intValue 270 | ; 271 | 272 | max_value: 273 | n = intValue 274 | ; 275 | 276 | scale: 277 | n = number 278 | ; 279 | 280 | offset: 281 | n = number 282 | ; 283 | 284 | text_info: 285 | t = stringValue 286 | ; 287 | 288 | status_management: 289 | 'status_management' '{' 290 | 'response_error' '=' r = identifierValue ';' 291 | ('fault_state_signals' '=' values += identifierValue (',' values += identifierValue)* ';')? // New in 2.2 292 | '}' 293 | ; 294 | 295 | published_signal: 296 | s = identifierValue 297 | ; 298 | 299 | free_text_definition: 300 | 'free_text' '{' 301 | f = stringValue 302 | '}' 303 | ; 304 | 305 | 306 | 307 | intValue: 308 | i = INT 309 | | h = HEX 310 | ; 311 | 312 | floatValue: 313 | f = FLOAT 314 | ; 315 | 316 | number: 317 | i = intValue 318 | | f = floatValue 319 | ; 320 | 321 | stringValue: 322 | s = STRING 323 | ; 324 | 325 | identifierValue: 326 | i = C_IDENTIFIER 327 | ; 328 | 329 | 330 | C_IDENTIFIER: 331 | ('a'..'z'|'A'..'Z'|'_') ('a'..'z'|'A'..'Z'|'0'..'9'|'_')* 332 | ; 333 | 334 | 335 | fragment 336 | EXPONENT: 337 | ('e'|'E') ('+'|'-')? ('0'..'9')+ 338 | ; 339 | 340 | FLOAT: 341 | SIGN? 342 | ( 343 | ('0'..'9')+ '.' ('0'..'9')* EXPONENT? 344 | | '.' ('0'..'9')+ EXPONENT? 345 | | ('0'..'9')+ EXPONENT 346 | ) 347 | ; 348 | 349 | 350 | INT: 351 | SIGN? '0'..'9'+ 352 | ; 353 | 354 | HEX: 355 | '0'('x' | 'X') HEX_DIGIT+ 356 | ; 357 | 358 | fragment 359 | HEX_DIGIT : ('0'..'9'|'a'..'f'|'A'..'F') ; 360 | 361 | fragment 362 | ESC_SEQ: 363 | '\\' ( 364 | 'b' 365 | | 't' 366 | | 'n' 367 | | 'f' 368 | | 'r' 369 | | '\u0022' 370 | | '\'' 371 | | '\\' 372 | ) 373 | ; 374 | 375 | WS: 376 | (' ' | '\t' | '\r' | '\n') -> skip 377 | ; 378 | 379 | COMMENT 380 | : ('//' ~('\n'|'\r')* '\r'? '\n' 381 | | '/*' .*? '*/') 382 | -> channel(HIDDEN) 383 | ; 384 | 385 | STRING: 386 | '"' ( ESC_SEQ | ~('\\'|'"') )* '"' 387 | ; 388 | 389 | SIGN: 390 | '+' 391 | | '-' 392 | ; 393 | 394 | 395 | -------------------------------------------------------------------------------- /pydbc/ncfListener.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __copyright__ = """ 5 | pySART - Simplified AUTOSAR-Toolkit for Python. 6 | 7 | (C) 2010-2020 by Christoph Schueler 8 | 9 | All Rights Reserved 10 | 11 | This program is free software; you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation; either version 2 of the License, or 14 | (at your option) any later version. 15 | 16 | This program is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | 21 | You should have received a copy of the GNU General Public License along 22 | with this program; if not, write to the Free Software Foundation, Inc., 23 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24 | 25 | s. FLOSS-EXCEPTION.txt 26 | """ 27 | __author__ = 'Christoph Schueler' 28 | __version__ = '0.1.0' 29 | 30 | 31 | from sqlalchemy.sql.expression import literal, bindparam 32 | 33 | from pydbc.logger import Logger 34 | from pydbc import parser 35 | from pydbc.types import AttributeType, BusType, CategoryType, ValueType 36 | from pydbc.db.model import ( 37 | Dbc_Version, Message, Message_Signal, Network, Node, Signal, Value_Description, 38 | Valuetable, EnvironmentVariablesData, EnvVar, Attribute_Definition, Attribute_Value, 39 | Node_TxMessage, Node_RxSignal, Category_Definition, Category_Value, AttributeRel_Value, 40 | Signal_Group_Signal, Signal_Group, Node_TxSig, LinSignalEncodingType, LinSignalEncodingEntry_Value, 41 | LinSignalEncodingEntry_Logical, LinSignalEncodingEntry_Physical, LinSignalRepresentation, 42 | LinScheduleTable, LinScheduleTable_Command_Frame, LinScheduleTable_Command_MasterReq, 43 | LinScheduleTable_Command_SlaveResp, LinScheduleTable_Command_AssignNad, 44 | LinScheduleTable_Command_ConditionalChangeNad, LinScheduleTable_Command_DataDump, 45 | LinScheduleTable_Command_SaveConfiguration, LinScheduleTable_Command_AssignFrameIdRange, 46 | LinScheduleTable_Command_FreeFormat, LinScheduleTable_Command_AssignFrameId, LinSporadicFrame, 47 | LinUnconditionalFrame, LinEventTriggeredFrame, LinConfigurableFrame, LinFaultStateSignal, 48 | Vndb_Protocol 49 | ) 50 | 51 | 52 | class NcfListener(parser.BaseListener): 53 | 54 | def __init__(self, database, logLevel = 'INFO', *args, **kws): 55 | super(NcfListener, self).__init__(database, logLevel, *args, **kws) 56 | self.logger = Logger(__name__, level = logLevel) 57 | #self.bake_queries() 58 | 59 | def exitToplevel(self, ctx): 60 | v = ctx.v.value 61 | nodes = [x.value for x in ctx.nodes] 62 | self.value = dict(version = v, nodes = nodes) 63 | 64 | def exitLanguage_version(self, ctx): 65 | s = ctx.s.value if ctx.s else None 66 | ctx.value = s 67 | 68 | def exitNode_definition(self, ctx): 69 | name = ctx.name.value 70 | g = ctx.g.value if ctx.g else None 71 | d = ctx.d.value if ctx.d else None 72 | f = ctx.f.value if ctx.d else None 73 | e = ctx.e.value if ctx.e else None 74 | s = ctx.s.value if ctx.s else None 75 | t = ctx.t.value if ctx.t else None 76 | ctx.value = dict(name = name, general = g, diagnostic = d, frames= f, encodings = e, status = s, freeText = t) 77 | 78 | def exitNode_name(self, ctx): 79 | ctx.value = ctx.i.value 80 | 81 | def exitGeneral_definition(self, ctx): 82 | pv = ctx.pv.value 83 | sup = ctx.sup.value 84 | fun = ctx.fun.value 85 | var = ctx.var.value 86 | br = ctx.br.value 87 | tf = True if ctx.tf.text == "yes" else False 88 | vfrom = ctx.vfrom.value if ctx.vfrom else None 89 | vto = ctx.vto.value if ctx.vto else None 90 | tfrom = ctx.tfrom.value if ctx.tfrom else None 91 | tto = ctx.tto.value if ctx.tto else None 92 | conf = ctx.conf.value if ctx.conf else None 93 | ctx.value = dict( 94 | protocolVersion = pv, supplier = sup, function = fun, variant = var, bitrate = br, wakeupSignal = tf, 95 | voltageFrom = vfrom, voltageTo = vto, temperatureFrom = tfrom, temperatureTo = tto, conformance = conf, 96 | ) 97 | 98 | def exitProtocol_version(self, ctx): 99 | ctx.value = ctx.s.value 100 | 101 | def exitSupplier_id(self, ctx): 102 | ctx.value = ctx.i.value 103 | 104 | def exitFunction_id(self, ctx): 105 | ctx.value = ctx.i.value 106 | 107 | def exitVariant_id(self, ctx): 108 | ctx.value = ctx.i.value 109 | 110 | def exitBitrate_definition(self, ctx): 111 | rates = br = minBr = maxBr = None 112 | if ctx.rates: 113 | rates = [x.value for x in ctx.rates] 114 | elif ctx.br: 115 | br = ctx.br.value 116 | else: 117 | minBr = ctx.minBr.value 118 | maxBr = ctx.maxBr.value 119 | ctx.value = dict(bitrate = br, minBr = minBr, maxBr = maxBr, rates = rates) 120 | 121 | def exitBitrate(self, ctx): 122 | ctx.value = ctx.n.value 123 | 124 | def exitDiagnostic_definition(self, ctx): 125 | lhs = ctx.lhs.value 126 | rhs = ctx.rhs.value if ctx.rhs else None 127 | nads = [x.value for x in ctx.nads] if ctx.nads else [] 128 | dc = ctx.dc.value if ctx.dc else None 129 | p2Min = ctx.p2Min.value if ctx.p2Min else None 130 | stMin = ctx.stMin.value if ctx.p2Min else None 131 | nAs = ctx.nAs.value if ctx.nAs else None 132 | nCr = ctx.nCr.value if ctx.nCr else None 133 | sids = [x.value for x in ctx.sids] if ctx.sids else [] 134 | mml = ctx.mml.value if ctx.mml else None 135 | ctx.value = dict( 136 | maxMessageLength = mml, lhs = lhs, rhs = rhs, nads = nads, diagnosticClass = dc, p2Min = p2Min, stMin = stMin, nAs = nAs, nCr = nCr, supportedSids = sids 137 | ) 138 | 139 | def exitFrame_definition(self, ctx): 140 | frames = [x.value for x in ctx.frames] 141 | ctx.value = frames 142 | 143 | def exitSingle_frame(self, ctx): 144 | n = ctx.n.value 145 | p = ctx.p.value if ctx.p else None 146 | s = ctx.s.value if ctx.s else None 147 | ctx.value = dict(name = n, properties = p, signal = s) 148 | 149 | def exitFrame_kind(self, ctx): 150 | text = ctx.v.text 151 | ctx.value = text 152 | 153 | def exitFrame_name(self, ctx): 154 | name = ctx.i.value 155 | ctx.value = name 156 | 157 | def exitFrame_properties(self, ctx): 158 | l = ctx.l.value 159 | minValue = ctx.minValue.value if ctx.minValue else None 160 | maxValue = ctx.maxValue.value if ctx.maxValue else None 161 | etf = ctx.etf.value if ctx.etf else None 162 | ctx.value = dict(length = l, minPeriod = minValue, maxPeriod = maxValue, eventTriggeredFrame = etf) 163 | 164 | def exitSignal_definition(self, ctx): 165 | ctx.value = [x.value for x in ctx.items] 166 | 167 | def exitSignal_definition_entry(self, ctx): 168 | n = ctx.n.value 169 | p = ctx.p.value if ctx.p else None 170 | ctx.value = dict(name = n, properties = p) 171 | 172 | def exitSignal_name(self, ctx): 173 | name = ctx.i.value 174 | ctx.value = name 175 | 176 | def exitSignal_properties(self, ctx): 177 | init = ctx.init.value 178 | s = ctx.s.value 179 | o = ctx.o.value if ctx.o else None 180 | e = ctx.e.value if ctx.e else None 181 | ctx.value = dict(initValue = init, size = s, offset = o, encoding = e) 182 | 183 | def exitInit_value(self, ctx): 184 | scalar = ctx.s.value if ctx.s else None 185 | array = ctx.a.value if ctx.a else None 186 | ctx.value = dict(scalar = scalar, array = array) 187 | 188 | def exitInit_value_scalar(self, ctx): 189 | ctx.value = ctx.i.value 190 | 191 | def exitInit_value_array(self, ctx): 192 | ctx.value = [x.value for x in ctx.values] 193 | 194 | def exitEncoding_definition(self, ctx): 195 | ctx.value = [x.value for x in ctx.items] 196 | 197 | def exitEncoding_definition_entry(self, ctx): 198 | name = ctx.name.value 199 | items = [x.value for x in ctx.items] 200 | ctx.value = dict(name = name, values = items) 201 | 202 | def exitEncoding_definition_value(self, ctx): 203 | if ctx.l: 204 | value = ctx.l.value 205 | vtype = "logical" 206 | elif ctx.p: 207 | value = ctx.p.value 208 | vtype = "range" 209 | elif ctx.b: 210 | value = None 211 | vtype = "bcd" 212 | elif ctx.a: 213 | value = None 214 | vtype = "ascii" 215 | ctx.value = dict(value = value, valueType = vtype) 216 | 217 | def exitSignal_encoding_type_name(self, ctx): 218 | ctx.value = ctx.i.value 219 | 220 | def exitEncoding_name(self, ctx): 221 | ctx.value = ctx.i.value 222 | 223 | def exitLogical_value(self, ctx): 224 | s = ctx.s.value 225 | t = ctx.t.value if ctx.t else None 226 | ctx.value = dict(signalValue = s, text = t) 227 | 228 | def exitPhysical_range(self, ctx): 229 | #'physical_value' ',' minValue = min_value ',' maxValue = max_value ',' s = scale ',' o = offset (',' t = text_info)? ';' 230 | minValue = ctx.minValue.value 231 | maxValue = ctx.maxValue.value 232 | s = ctx.s.value 233 | o = ctx.o.value 234 | t = ctx.t.value if ctx.t else None 235 | ctx.value = dict(min = minValue, max = maxValue, scale = s, offset = o, text = t) 236 | 237 | def exitBcd_value(self, ctx): 238 | pass 239 | 240 | def exitAscii_value(self, ctx): 241 | pass 242 | 243 | def exitSignal_value(self, ctx): 244 | ctx.value = ctx.n.value 245 | 246 | def exitMin_value(self, ctx): 247 | ctx.value = ctx.n.value 248 | 249 | def exitMax_value(self, ctx): 250 | ctx.value = ctx.n.value 251 | 252 | def exitScale(self, ctx): 253 | ctx.value = ctx.n.value 254 | 255 | def exitOffset(self, ctx): 256 | ctx.value = ctx.n.value 257 | 258 | def exitText_info(self, ctx): 259 | ctx.value = ctx.t.value 260 | 261 | def exitStatus_management(self, ctx): 262 | r = ctx.r.value 263 | values = [x.value for x in ctx.values] if ctx.values else [] 264 | ctx.value = dict(responseError = r, faultStateSignals = values) 265 | 266 | def exitPublished_signal(self, ctx): 267 | pass 268 | 269 | def exitFree_text_definition(self, ctx): 270 | text = ctx.f.value 271 | ctx.value = text 272 | -------------------------------------------------------------------------------- /pydbc/py3/ldfVisitor.py: -------------------------------------------------------------------------------- 1 | # Generated from ldf.g4 by ANTLR 4.13.2 2 | from antlr4 import * 3 | if "." in __name__: 4 | from .ldfParser import ldfParser 5 | else: 6 | from ldfParser import ldfParser 7 | 8 | # This class defines a complete generic visitor for a parse tree produced by ldfParser. 9 | 10 | class ldfVisitor(ParseTreeVisitor): 11 | 12 | # Visit a parse tree produced by ldfParser#lin_description_file. 13 | def visitLin_description_file(self, ctx:ldfParser.Lin_description_fileContext): 14 | return self.visitChildren(ctx) 15 | 16 | 17 | # Visit a parse tree produced by ldfParser#lin_protocol_version_def. 18 | def visitLin_protocol_version_def(self, ctx:ldfParser.Lin_protocol_version_defContext): 19 | return self.visitChildren(ctx) 20 | 21 | 22 | # Visit a parse tree produced by ldfParser#lin_language_version_def. 23 | def visitLin_language_version_def(self, ctx:ldfParser.Lin_language_version_defContext): 24 | return self.visitChildren(ctx) 25 | 26 | 27 | # Visit a parse tree produced by ldfParser#lin_file_revision_def. 28 | def visitLin_file_revision_def(self, ctx:ldfParser.Lin_file_revision_defContext): 29 | return self.visitChildren(ctx) 30 | 31 | 32 | # Visit a parse tree produced by ldfParser#lin_speed_def. 33 | def visitLin_speed_def(self, ctx:ldfParser.Lin_speed_defContext): 34 | return self.visitChildren(ctx) 35 | 36 | 37 | # Visit a parse tree produced by ldfParser#channel_name_def. 38 | def visitChannel_name_def(self, ctx:ldfParser.Channel_name_defContext): 39 | return self.visitChildren(ctx) 40 | 41 | 42 | # Visit a parse tree produced by ldfParser#node_def. 43 | def visitNode_def(self, ctx:ldfParser.Node_defContext): 44 | return self.visitChildren(ctx) 45 | 46 | 47 | # Visit a parse tree produced by ldfParser#node_attributes_def. 48 | def visitNode_attributes_def(self, ctx:ldfParser.Node_attributes_defContext): 49 | return self.visitChildren(ctx) 50 | 51 | 52 | # Visit a parse tree produced by ldfParser#node_attribute. 53 | def visitNode_attribute(self, ctx:ldfParser.Node_attributeContext): 54 | return self.visitChildren(ctx) 55 | 56 | 57 | # Visit a parse tree produced by ldfParser#attributes_def. 58 | def visitAttributes_def(self, ctx:ldfParser.Attributes_defContext): 59 | return self.visitChildren(ctx) 60 | 61 | 62 | # Visit a parse tree produced by ldfParser#configurable_frames. 63 | def visitConfigurable_frames(self, ctx:ldfParser.Configurable_framesContext): 64 | return self.visitChildren(ctx) 65 | 66 | 67 | # Visit a parse tree produced by ldfParser#configurable_frame. 68 | def visitConfigurable_frame(self, ctx:ldfParser.Configurable_frameContext): 69 | return self.visitChildren(ctx) 70 | 71 | 72 | # Visit a parse tree produced by ldfParser#node_composition_def. 73 | def visitNode_composition_def(self, ctx:ldfParser.Node_composition_defContext): 74 | return self.visitChildren(ctx) 75 | 76 | 77 | # Visit a parse tree produced by ldfParser#configuration. 78 | def visitConfiguration(self, ctx:ldfParser.ConfigurationContext): 79 | return self.visitChildren(ctx) 80 | 81 | 82 | # Visit a parse tree produced by ldfParser#configuration_item. 83 | def visitConfiguration_item(self, ctx:ldfParser.Configuration_itemContext): 84 | return self.visitChildren(ctx) 85 | 86 | 87 | # Visit a parse tree produced by ldfParser#signal_def. 88 | def visitSignal_def(self, ctx:ldfParser.Signal_defContext): 89 | return self.visitChildren(ctx) 90 | 91 | 92 | # Visit a parse tree produced by ldfParser#signal_item. 93 | def visitSignal_item(self, ctx:ldfParser.Signal_itemContext): 94 | return self.visitChildren(ctx) 95 | 96 | 97 | # Visit a parse tree produced by ldfParser#init_value. 98 | def visitInit_value(self, ctx:ldfParser.Init_valueContext): 99 | return self.visitChildren(ctx) 100 | 101 | 102 | # Visit a parse tree produced by ldfParser#init_value_scalar. 103 | def visitInit_value_scalar(self, ctx:ldfParser.Init_value_scalarContext): 104 | return self.visitChildren(ctx) 105 | 106 | 107 | # Visit a parse tree produced by ldfParser#init_value_array. 108 | def visitInit_value_array(self, ctx:ldfParser.Init_value_arrayContext): 109 | return self.visitChildren(ctx) 110 | 111 | 112 | # Visit a parse tree produced by ldfParser#diagnostic_signal_def. 113 | def visitDiagnostic_signal_def(self, ctx:ldfParser.Diagnostic_signal_defContext): 114 | return self.visitChildren(ctx) 115 | 116 | 117 | # Visit a parse tree produced by ldfParser#diagnostic_item. 118 | def visitDiagnostic_item(self, ctx:ldfParser.Diagnostic_itemContext): 119 | return self.visitChildren(ctx) 120 | 121 | 122 | # Visit a parse tree produced by ldfParser#signal_groups_def. 123 | def visitSignal_groups_def(self, ctx:ldfParser.Signal_groups_defContext): 124 | return self.visitChildren(ctx) 125 | 126 | 127 | # Visit a parse tree produced by ldfParser#signal_group. 128 | def visitSignal_group(self, ctx:ldfParser.Signal_groupContext): 129 | return self.visitChildren(ctx) 130 | 131 | 132 | # Visit a parse tree produced by ldfParser#signal_group_item. 133 | def visitSignal_group_item(self, ctx:ldfParser.Signal_group_itemContext): 134 | return self.visitChildren(ctx) 135 | 136 | 137 | # Visit a parse tree produced by ldfParser#frame_def. 138 | def visitFrame_def(self, ctx:ldfParser.Frame_defContext): 139 | return self.visitChildren(ctx) 140 | 141 | 142 | # Visit a parse tree produced by ldfParser#frame_item. 143 | def visitFrame_item(self, ctx:ldfParser.Frame_itemContext): 144 | return self.visitChildren(ctx) 145 | 146 | 147 | # Visit a parse tree produced by ldfParser#frame_signal. 148 | def visitFrame_signal(self, ctx:ldfParser.Frame_signalContext): 149 | return self.visitChildren(ctx) 150 | 151 | 152 | # Visit a parse tree produced by ldfParser#sporadic_frame_def. 153 | def visitSporadic_frame_def(self, ctx:ldfParser.Sporadic_frame_defContext): 154 | return self.visitChildren(ctx) 155 | 156 | 157 | # Visit a parse tree produced by ldfParser#sporadic_frame_item. 158 | def visitSporadic_frame_item(self, ctx:ldfParser.Sporadic_frame_itemContext): 159 | return self.visitChildren(ctx) 160 | 161 | 162 | # Visit a parse tree produced by ldfParser#event_triggered_frame_def. 163 | def visitEvent_triggered_frame_def(self, ctx:ldfParser.Event_triggered_frame_defContext): 164 | return self.visitChildren(ctx) 165 | 166 | 167 | # Visit a parse tree produced by ldfParser#event_triggered_frame_item. 168 | def visitEvent_triggered_frame_item(self, ctx:ldfParser.Event_triggered_frame_itemContext): 169 | return self.visitChildren(ctx) 170 | 171 | 172 | # Visit a parse tree produced by ldfParser#diag_frame_def. 173 | def visitDiag_frame_def(self, ctx:ldfParser.Diag_frame_defContext): 174 | return self.visitChildren(ctx) 175 | 176 | 177 | # Visit a parse tree produced by ldfParser#diag_frame_item. 178 | def visitDiag_frame_item(self, ctx:ldfParser.Diag_frame_itemContext): 179 | return self.visitChildren(ctx) 180 | 181 | 182 | # Visit a parse tree produced by ldfParser#schedule_table_def. 183 | def visitSchedule_table_def(self, ctx:ldfParser.Schedule_table_defContext): 184 | return self.visitChildren(ctx) 185 | 186 | 187 | # Visit a parse tree produced by ldfParser#schedule_table_entry. 188 | def visitSchedule_table_entry(self, ctx:ldfParser.Schedule_table_entryContext): 189 | return self.visitChildren(ctx) 190 | 191 | 192 | # Visit a parse tree produced by ldfParser#schedule_table_command. 193 | def visitSchedule_table_command(self, ctx:ldfParser.Schedule_table_commandContext): 194 | return self.visitChildren(ctx) 195 | 196 | 197 | # Visit a parse tree produced by ldfParser#command. 198 | def visitCommand(self, ctx:ldfParser.CommandContext): 199 | return self.visitChildren(ctx) 200 | 201 | 202 | # Visit a parse tree produced by ldfParser#signal_encoding_type_def. 203 | def visitSignal_encoding_type_def(self, ctx:ldfParser.Signal_encoding_type_defContext): 204 | return self.visitChildren(ctx) 205 | 206 | 207 | # Visit a parse tree produced by ldfParser#signal_encoding_entry. 208 | def visitSignal_encoding_entry(self, ctx:ldfParser.Signal_encoding_entryContext): 209 | return self.visitChildren(ctx) 210 | 211 | 212 | # Visit a parse tree produced by ldfParser#signal_encoding_value. 213 | def visitSignal_encoding_value(self, ctx:ldfParser.Signal_encoding_valueContext): 214 | return self.visitChildren(ctx) 215 | 216 | 217 | # Visit a parse tree produced by ldfParser#logical_value. 218 | def visitLogical_value(self, ctx:ldfParser.Logical_valueContext): 219 | return self.visitChildren(ctx) 220 | 221 | 222 | # Visit a parse tree produced by ldfParser#physical_range. 223 | def visitPhysical_range(self, ctx:ldfParser.Physical_rangeContext): 224 | return self.visitChildren(ctx) 225 | 226 | 227 | # Visit a parse tree produced by ldfParser#bcd_value. 228 | def visitBcd_value(self, ctx:ldfParser.Bcd_valueContext): 229 | return self.visitChildren(ctx) 230 | 231 | 232 | # Visit a parse tree produced by ldfParser#ascii_value. 233 | def visitAscii_value(self, ctx:ldfParser.Ascii_valueContext): 234 | return self.visitChildren(ctx) 235 | 236 | 237 | # Visit a parse tree produced by ldfParser#signal_representation_def. 238 | def visitSignal_representation_def(self, ctx:ldfParser.Signal_representation_defContext): 239 | return self.visitChildren(ctx) 240 | 241 | 242 | # Visit a parse tree produced by ldfParser#signal_representation_entry. 243 | def visitSignal_representation_entry(self, ctx:ldfParser.Signal_representation_entryContext): 244 | return self.visitChildren(ctx) 245 | 246 | 247 | # Visit a parse tree produced by ldfParser#intValue. 248 | def visitIntValue(self, ctx:ldfParser.IntValueContext): 249 | return self.visitChildren(ctx) 250 | 251 | 252 | # Visit a parse tree produced by ldfParser#floatValue. 253 | def visitFloatValue(self, ctx:ldfParser.FloatValueContext): 254 | return self.visitChildren(ctx) 255 | 256 | 257 | # Visit a parse tree produced by ldfParser#number. 258 | def visitNumber(self, ctx:ldfParser.NumberContext): 259 | return self.visitChildren(ctx) 260 | 261 | 262 | # Visit a parse tree produced by ldfParser#stringValue. 263 | def visitStringValue(self, ctx:ldfParser.StringValueContext): 264 | return self.visitChildren(ctx) 265 | 266 | 267 | # Visit a parse tree produced by ldfParser#identifierValue. 268 | def visitIdentifierValue(self, ctx:ldfParser.IdentifierValueContext): 269 | return self.visitChildren(ctx) 270 | 271 | 272 | 273 | del ldfParser --------------------------------------------------------------------------------