├── docs
├── authors.md
├── readme.md
├── changelog.md
├── _static
│ ├── .gitignore
│ └── urdf_examples.jpg
├── contributing.rst
├── license.rst
├── requirements.txt
├── index.md
├── Makefile
└── conf.py
├── AUTHORS.md
├── tests
├── conftest.py
├── test_viz.py
├── test_utils.py
├── test_urdf.py
└── models
│ └── franka
│ └── franka.urdf
├── pyproject.toml
├── .readthedocs.yml
├── .coveragerc
├── setup.py
├── .gitignore
├── LICENSE.txt
├── .github
└── workflows
│ └── python-publish.yml
├── src
└── yourdfpy
│ ├── __init__.py
│ ├── viz.py
│ └── urdf.py
├── tox.ini
├── setup.cfg
├── CHANGELOG.md
├── README.md
└── CONTRIBUTING.rst
/docs/authors.md:
--------------------------------------------------------------------------------
1 | ../AUTHORS.md
--------------------------------------------------------------------------------
/docs/readme.md:
--------------------------------------------------------------------------------
1 | ../README.md
--------------------------------------------------------------------------------
/docs/changelog.md:
--------------------------------------------------------------------------------
1 | ../CHANGELOG.md
--------------------------------------------------------------------------------
/docs/_static/.gitignore:
--------------------------------------------------------------------------------
1 | # Empty directory
2 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/AUTHORS.md:
--------------------------------------------------------------------------------
1 | # Contributors
2 |
3 | * [Clemens Eppner](https://github.com/clemense/)
4 |
--------------------------------------------------------------------------------
/docs/_static/urdf_examples.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/clemense/yourdfpy/HEAD/docs/_static/urdf_examples.jpg
--------------------------------------------------------------------------------
/docs/license.rst:
--------------------------------------------------------------------------------
1 | .. _license:
2 |
3 | =======
4 | License
5 | =======
6 |
7 | .. include:: ../LICENSE.txt
8 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """
2 | Dummy conftest.py for yourdfpy.
3 |
4 | If you don't know what this is for, just leave it empty.
5 | Read more about conftest.py under:
6 | - https://docs.pytest.org/en/stable/fixture.html
7 | - https://docs.pytest.org/en/stable/writing_plugins.html
8 | """
9 |
10 | # import pytest
11 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | # AVOID CHANGING REQUIRES: IT WILL BE UPDATED BY PYSCAFFOLD!
3 | requires = ["setuptools>=46.1.0", "setuptools_scm[toml]>=5", "wheel"]
4 | build-backend = "setuptools.build_meta"
5 |
6 | [tool.setuptools_scm]
7 | # See configuration details in https://github.com/pypa/setuptools_scm
8 | version_scheme = "no-guess-dev"
9 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | # Requirements file for ReadTheDocs, check .readthedocs.yml.
2 | # To build the module reference correctly, make sure every external package
3 | # under `install_requires` in `setup.cfg` is also listed here!
4 | recommonmark
5 | sphinx>=3.2.1
6 | sphinx_rtd_theme
7 | sphinx-automodapi
8 | lxml
9 | trimesh[easy]>=3.11.2
10 | numpy
--------------------------------------------------------------------------------
/tests/test_viz.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from yourdfpy.viz import main
4 |
5 | __author__ = "Clemens Eppner"
6 | __copyright__ = "Clemens Eppner"
7 | __license__ = "MIT"
8 |
9 |
10 | def test_main(capsys):
11 | """CLI Tests"""
12 | # capsys is a pytest fixture that allows asserts agains stdout/stderr
13 | # https://docs.pytest.org/en/stable/capture.html
14 | # main(["-h"])
15 | # captured = capsys.readouterr()
16 | # assert "The 7-th Fibonacci number is 13" in captured.out
17 | assert True
18 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Build documentation in the docs/ directory with Sphinx
8 | sphinx:
9 | configuration: docs/conf.py
10 |
11 | # Build documentation with MkDocs
12 | #mkdocs:
13 | # configuration: mkdocs.yml
14 |
15 | # Optionally build your docs in additional formats such as PDF
16 | formats:
17 | - pdf
18 |
19 | python:
20 | version: 3.8
21 | install:
22 | - requirements: docs/requirements.txt
23 | - {path: ., method: pip}
24 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | # .coveragerc to control coverage.py
2 | [run]
3 | branch = True
4 | source = yourdfpy
5 | # omit = bad_file.py
6 |
7 | [paths]
8 | source =
9 | src/
10 | */site-packages/
11 |
12 | [report]
13 | # Regexes for lines to exclude from consideration
14 | exclude_lines =
15 | # Have to re-enable the standard pragma
16 | pragma: no cover
17 |
18 | # Don't complain about missing debug-only code:
19 | def __repr__
20 | if self\.debug
21 |
22 | # Don't complain if tests don't hit defensive assertion code:
23 | raise AssertionError
24 | raise NotImplementedError
25 |
26 | # Don't complain if non-runnable code isn't run:
27 | if 0:
28 | if __name__ == .__main__.:
29 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """
2 | Setup file for yourdfpy.
3 | Use setup.cfg to configure your project.
4 |
5 | This file was generated with PyScaffold 4.1.
6 | PyScaffold helps you to put up the scaffold of your new Python project.
7 | Learn more under: https://pyscaffold.org/
8 | """
9 | from setuptools import setup
10 |
11 | if __name__ == "__main__":
12 | try:
13 | setup(use_scm_version={"version_scheme": "no-guess-dev"})
14 | except: # noqa
15 | print(
16 | "\n\nAn error occurred while building the project, "
17 | "please ensure you have the most updated version of setuptools, "
18 | "setuptools_scm and wheel with:\n"
19 | " pip install -U setuptools setuptools_scm wheel\n\n"
20 | )
21 | raise
22 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Temporary and binary files
2 | *~
3 | *.py[cod]
4 | *.so
5 | *.cfg
6 | !.isort.cfg
7 | !setup.cfg
8 | *.orig
9 | *.log
10 | *.pot
11 | __pycache__/*
12 | .cache/*
13 | .*.swp
14 | */.ipynb_checkpoints/*
15 | .DS_Store
16 |
17 | # Project files
18 | .ropeproject
19 | .project
20 | .pydevproject
21 | .settings
22 | .idea
23 | .vscode
24 | tags
25 |
26 | # Package files
27 | *.egg
28 | *.eggs/
29 | .installed.cfg
30 | *.egg-info
31 |
32 | # Unittest and coverage
33 | htmlcov/*
34 | .coverage
35 | .coverage.*
36 | .tox
37 | junit*.xml
38 | coverage.xml
39 | .pytest_cache/
40 |
41 | # Build and docs folder/files
42 | build/*
43 | dist/*
44 | sdist/*
45 | docs/api/*
46 | docs/_rst/*
47 | docs/_build/*
48 | cover/*
49 | MANIFEST
50 |
51 | # Per-project virtualenvs
52 | .venv*/
53 | .conda*/
54 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # yourdfpy
2 |
3 | Yet anOther URDF parser for Python. Yup, it's another one. Deal with it.
4 |
5 | Yourdfpy is a simpler and easier-to-use library for loading, manipulating, validating, saving, and visualizing URDF files.
6 |
7 | ## Contents
8 |
9 | * [Overview](readme)
10 | * [License](license)
11 | * [Authors](authors)
12 | * [Changelog](changelog)
13 | * [API](api/index)
14 | * [Module Reference](api/modules)
15 |
16 | ## Indices and tables
17 |
18 | ```eval_rst
19 | * :ref:`genindex`
20 | * :ref:`modindex`
21 | * :ref:`search`
22 | ```
23 |
24 | [Sphinx]: http://www.sphinx-doc.org/
25 | [Markdown]: https://daringfireball.net/projects/markdown/
26 | [reStructuredText]: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
27 | [recommonmark]: https://recommonmark.readthedocs.io/en/latest
28 | [autostructify]: https://recommonmark.readthedocs.io/en/latest/auto_structify.html
29 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2021 Clemens Eppner
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 | AUTODOCDIR = api
11 |
12 | # User-friendly check for sphinx-build
13 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1)
14 | $(error "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://sphinx-doc.org/")
15 | endif
16 |
17 | .PHONY: help clean Makefile
18 |
19 | # Put it first so that "make" without argument is like "make help".
20 | help:
21 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
22 |
23 | clean:
24 | rm -rf $(BUILDDIR)/* $(AUTODOCDIR)
25 |
26 | # Catch-all target: route all unknown targets to Sphinx using the new
27 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
28 | %: Makefile
29 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
30 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
3 |
4 | # This workflow uses actions that are not certified by GitHub.
5 | # They are provided by a third-party and are governed by
6 | # separate terms of service, privacy policy, and support
7 | # documentation.
8 |
9 | name: Upload Python Package
10 |
11 | on:
12 | workflow_dispatch:
13 | release:
14 | types: [published]
15 |
16 | permissions:
17 | contents: read
18 |
19 | jobs:
20 | deploy:
21 |
22 | runs-on: ubuntu-latest
23 |
24 | steps:
25 | - uses: actions/checkout@v3
26 | - name: Set up Python
27 | uses: actions/setup-python@v3
28 | with:
29 | python-version: '3.x'
30 | - name: Install dependencies
31 | run: |
32 | python -m pip install --upgrade pip
33 | pip install setuptools setuptools_scm wheel
34 | pip install pytest pytest-cov # coveralls
35 | # pip install build
36 | # - name: Build package
37 | # run: python -m build
38 | - name: Build a binary wheel and a source tarball
39 | run: python setup.py sdist bdist_wheel
40 | - name: Publish package
41 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
42 | with:
43 | skip_existing: true
44 | user: __token__
45 | password: ${{ secrets.PYPI_API_TOKEN }}
46 |
--------------------------------------------------------------------------------
/src/yourdfpy/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | if sys.version_info[:2] >= (3, 8):
4 | # TODO: Import directly (no need for conditional) when `python_requires = >= 3.8`
5 | from importlib.metadata import PackageNotFoundError, version # pragma: no cover
6 | else:
7 | from importlib_metadata import PackageNotFoundError, version # pragma: no cover
8 |
9 | try:
10 | # Change here if project is renamed and does not equal the package name
11 | dist_name = __name__
12 | __version__ = version(dist_name)
13 | except PackageNotFoundError: # pragma: no cover
14 | __version__ = "unknown"
15 | finally:
16 | del version, PackageNotFoundError
17 |
18 | from .urdf import (
19 | Actuator,
20 | Box,
21 | Calibration,
22 | Collision,
23 | Color,
24 | Cylinder,
25 | Dynamics,
26 | Geometry,
27 | Inertial,
28 | Joint,
29 | Link,
30 | Limit,
31 | Material,
32 | Mesh,
33 | Mimic,
34 | Robot,
35 | SafetyController,
36 | Sphere,
37 | Texture,
38 | Transmission,
39 | TransmissionJoint,
40 | URDF,
41 | Visual,
42 | URDFError,
43 | URDFIncompleteError,
44 | URDFBrokenRefError,
45 | URDFSaveValidationError,
46 | URDFMalformedError,
47 | URDFUnsupportedError,
48 | filename_handler_null,
49 | filename_handler_ignore_directive,
50 | filename_handler_ignore_directive_package,
51 | filename_handler_add_prefix,
52 | filename_handler_absolute2relative,
53 | filename_handler_relative,
54 | filename_handler_relative_to_urdf_file,
55 | filename_handler_relative_to_urdf_file_recursive,
56 | filename_handler_meta,
57 | filename_handler_magic,
58 | )
59 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | # Tox configuration file
2 | # Read more under https://tox.readthedocs.io/
3 | # THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS!
4 |
5 | [tox]
6 | minversion = 3.15
7 | envlist = default
8 | isolated_build = True
9 |
10 |
11 | [testenv]
12 | description = Invoke pytest to run automated tests
13 | setenv =
14 | TOXINIDIR = {toxinidir}
15 | passenv =
16 | HOME
17 | extras =
18 | testing
19 | commands =
20 | pytest {posargs}
21 |
22 |
23 | [testenv:{build,clean}]
24 | description =
25 | build: Build the package in isolation according to PEP517, see https://github.com/pypa/build
26 | clean: Remove old distribution files and temporary build artifacts (./build and ./dist)
27 | # NOTE: build is still experimental, please refer to the links for updates/issues
28 | # https://setuptools.readthedocs.io/en/stable/build_meta.html#how-to-use-it
29 | # https://github.com/pypa/pep517/issues/91
30 | skip_install = True
31 | changedir = {toxinidir}
32 | deps =
33 | build: build[virtualenv]
34 | commands =
35 | clean: python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)'
36 | build: python -m build .
37 | # By default `build` produces wheels, you can also explicitly use the flags `--sdist` and `--wheel`
38 |
39 |
40 | [testenv:{docs,doctests,linkcheck}]
41 | description =
42 | docs: Invoke sphinx-build to build the docs
43 | doctests: Invoke sphinx-build to run doctests
44 | linkcheck: Check for broken links in the documentation
45 | setenv =
46 | DOCSDIR = {toxinidir}/docs
47 | BUILDDIR = {toxinidir}/docs/_build
48 | docs: BUILD = html
49 | doctests: BUILD = doctest
50 | linkcheck: BUILD = linkcheck
51 | deps =
52 | -r {toxinidir}/docs/requirements.txt
53 | # ^ requirements.txt shared with Read The Docs
54 | commands =
55 | sphinx-build --color -b {env:BUILD} -d "{env:BUILDDIR}/doctrees" "{env:DOCSDIR}" "{env:BUILDDIR}/{env:BUILD}" {posargs}
56 |
57 |
58 | [testenv:publish]
59 | description =
60 | Publish the package you have been developing to a package index server.
61 | By default, it uses testpypi. If you really want to publish your package
62 | to be publicly accessible in PyPI, use the `-- --repository pypi` option.
63 | skip_install = True
64 | changedir = {toxinidir}
65 | passenv =
66 | TWINE_USERNAME
67 | TWINE_PASSWORD
68 | TWINE_REPOSITORY
69 | deps = twine
70 | commands =
71 | python -m twine check dist/*
72 | python -m twine upload {posargs:--repository testpypi} dist/*
73 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from functools import partial
3 |
4 | from yourdfpy import urdf
5 |
6 | __author__ = "Clemens Eppner"
7 | __copyright__ = "Clemens Eppner"
8 | __license__ = "MIT"
9 |
10 |
11 | def test_filename_handler_absolute2relative():
12 | result = urdf.filename_handler_absolute2relative(
13 | fname="/a/b/c/d.urdf",
14 | dir="/a/b/",
15 | )
16 | assert result == "c/d.urdf"
17 |
18 | result = urdf.filename_handler_absolute2relative(
19 | fname="/a/b/c/d.urdf",
20 | dir="/c/d/",
21 | )
22 | assert result == "/a/b/c/d.urdf"
23 |
24 |
25 | def test_filename_handler_add_prefix():
26 | result = urdf.filename_handler_add_prefix(
27 | fname="a/b/c/hoho.urdf",
28 | prefix="package://",
29 | )
30 | assert result == "package://a/b/c/hoho.urdf"
31 |
32 |
33 | def test_filename_handler_ignore_directive():
34 | result = urdf.filename_handler_ignore_directive(fname="/a/b/c/d.urdf")
35 | assert result == "/a/b/c/d.urdf"
36 |
37 | result = urdf.filename_handler_ignore_directive(fname="package://a/b/c/d.urdf")
38 | assert result == "a/b/c/d.urdf"
39 |
40 | result = urdf.filename_handler_ignore_directive(fname="file://a/b/c/d.urdf")
41 | assert result == "a/b/c/d.urdf"
42 |
43 | result = urdf.filename_handler_ignore_directive(fname="file:///a/b/c/d.urdf")
44 | assert result == "/a/b/c/d.urdf"
45 |
46 | result = urdf.filename_handler_ignore_directive(fname="file:\\\\a\\b\\c\\d.urdf")
47 | assert result == "a\\b\\c\\d.urdf"
48 |
49 |
50 | def test_filename_handler_ignore_directive_package():
51 | result = urdf.filename_handler_ignore_directive_package(fname="/a/b/c/d.urdf")
52 | assert result == "/a/b/c/d.urdf"
53 |
54 | result = urdf.filename_handler_ignore_directive_package(
55 | fname="package://a/b/c/d.urdf"
56 | )
57 | assert result == "b/c/d.urdf"
58 |
59 | result = urdf.filename_handler_ignore_directive_package(fname="file://a/b/c/d.urdf")
60 | assert result == "a/b/c/d.urdf"
61 |
62 | result = urdf.filename_handler_ignore_directive_package(
63 | fname="file:///a/b/c/d.urdf"
64 | )
65 | assert result == "/a/b/c/d.urdf"
66 |
67 | result = urdf.filename_handler_ignore_directive_package(
68 | fname="file:\\\\a\\b\\c\\d.urdf"
69 | )
70 | assert result == "a\\b\\c\\d.urdf"
71 |
72 |
73 | def test_filename_handler_magic():
74 | result = urdf.filename_handler_magic(fname="/a/b/c/d/e.urdf", dir="/a/")
75 | assert result == "/a/b/c/d/e.urdf"
76 |
77 |
78 | def test_filename_handler_meta():
79 | result = urdf.filename_handler_meta(
80 | fname="/a/b/c/d/e.urdf",
81 | filename_handlers=[
82 | urdf.filename_handler_ignore_directive,
83 | partial(urdf.filename_handler_absolute2relative, dir="/a/"),
84 | ],
85 | )
86 | assert result == "/a/b/c/d/e.urdf"
87 |
88 |
89 | def test_filename_handler_null():
90 | result = urdf.filename_handler_null(fname="a/b/c/d/e.urdf")
91 | assert result == "a/b/c/d/e.urdf"
92 |
93 |
94 | def test_filename_handler_relative():
95 | result = urdf.filename_handler_relative(fname="d/e.urdf", dir="/a/b/c")
96 | assert result == "/a/b/c/d/e.urdf"
97 |
98 |
99 | def test_filename_handler_relative_to_urdf_file():
100 | result = urdf.filename_handler_relative_to_urdf_file(
101 | fname="b/c/d.urdf", urdf_fname="/a/b.urdf"
102 | )
103 | assert result == "/a/b/c/d.urdf"
104 |
105 |
106 | def test_filename_handler_relative_to_urdf_file_recursive():
107 | result = urdf.filename_handler_relative_to_urdf_file_recursive(
108 | fname="b/c/d.urdf", urdf_fname="/a/b.urdf", level=1
109 | )
110 | assert result == "/b/c/d.urdf"
111 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | # This file is used to configure your project.
2 | # Read more about the various options under:
3 | # https://setuptools.readthedocs.io/en/stable/userguide/declarative_config.html
4 |
5 | [metadata]
6 | name = yourdfpy
7 | description = A simpler and easier-to-use library for loading, manipulating, saving, and visualizing URDF files.
8 | author = Clemens Eppner
9 | author_email = clemens.eppner@gmail.com
10 | license = MIT
11 | long_description = file: README.md
12 | long_description_content_type = text/markdown; charset=UTF-8; variant=GFM
13 | url = https://github.com/clemense/yourdfpy/
14 | # Add here related links, for example:
15 | project_urls =
16 | # Documentation = https://pyscaffold.org/
17 | Source = https://github.com/clemense/yourdfpy/
18 | # Changelog = https://pyscaffold.org/en/latest/changelog.html
19 | # Tracker = https://github.com/pyscaffold/pyscaffold/issues
20 | # Conda-Forge = https://anaconda.org/conda-forge/pyscaffold
21 | # Download = https://pypi.org/project/PyScaffold/#files
22 |
23 | # Change if running only on Windows, Mac or Linux (comma-separated)
24 | platforms = any
25 |
26 | # Add here all kinds of additional classifiers as defined under
27 | # https://pypi.org/classifiers/
28 | classifiers =
29 | Development Status :: 4 - Beta
30 | Programming Language :: Python
31 | Programming Language :: Python :: 3
32 | Programming Language :: Python :: 3.7
33 | Programming Language :: Python :: 3.8
34 | Intended Audience :: Developers
35 | License :: OSI Approved :: MIT License
36 | Operating System :: POSIX :: Linux
37 | Operating System :: Unix
38 | Operating System :: MacOS
39 | Operating System :: Microsoft :: Windows
40 |
41 |
42 | [options]
43 | zip_safe = False
44 | packages = find_namespace:
45 | include_package_data = True
46 | package_dir =
47 | =src
48 |
49 | # Require a min/specific Python version (comma-separated conditions)
50 | python_requires = >=3.7
51 |
52 | # Add here dependencies of your project (line-separated), e.g. requests>=2.2,<3.0.
53 | # Version specifiers like >=2.2,<3.0 avoid problems due to API changes in
54 | # new major versions. This works if the required packages follow Semantic Versioning.
55 | # For more information, check out https://semver.org/.
56 | install_requires =
57 | importlib-metadata; python_version<"3.8"
58 | lxml
59 | trimesh[easy]>=3.11.2
60 | numpy
61 | six
62 |
63 |
64 | [options.packages.find]
65 | where = src
66 | exclude =
67 | tests
68 |
69 | [options.extras_require]
70 | # Add here additional requirements for extra features, to install with:
71 | # `pip install yourdfpy[PDF]` like:
72 | # PDF = ReportLab; RXP
73 | full = pyglet<2
74 |
75 | # Add here test requirements (semicolon/line-separated)
76 | testing =
77 | setuptools
78 | pytest
79 | pytest-cov
80 |
81 | [options.entry_points]
82 | # Add here console scripts like:
83 | console_scripts =
84 | yourdfpy = yourdfpy.viz:run
85 | # And any other entry points, for example:
86 | # pyscaffold.cli =
87 | # awesome = pyscaffoldext.awesome.extension:AwesomeExtension
88 |
89 | [tool:pytest]
90 | # Specify command line options as you would do when invoking pytest directly.
91 | # e.g. --cov-report html (or xml) for html/xml output or --junitxml junit.xml
92 | # in order to write a coverage file that can be read by Jenkins.
93 | # CAUTION: --cov flags may prohibit setting breakpoints while debugging.
94 | # Comment those flags to avoid this pytest issue.
95 | addopts =
96 | --cov yourdfpy --cov-report term-missing
97 | --verbose
98 | norecursedirs =
99 | dist
100 | build
101 | .tox
102 | testpaths = tests
103 | # Use pytest markers to select/deselect specific tests
104 | # markers =
105 | # slow: mark tests as slow (deselect with '-m "not slow"')
106 | # system: mark end-to-end system tests
107 |
108 | [devpi:upload]
109 | # Options for the devpi: PyPI server and packaging tool
110 | # VCS export must be deactivated since we are using setuptools-scm
111 | no_vcs = 1
112 | formats = bdist_wheel
113 |
114 | [flake8]
115 | # Some sane defaults for the code style checker flake8
116 | max_line_length = 88
117 | extend_ignore = E203, W503
118 | # ^ Black-compatible
119 | # E203 and W503 have edge cases handled by black
120 | exclude =
121 | .tox
122 | build
123 | dist
124 | .eggs
125 | docs/conf.py
126 |
127 | [pyscaffold]
128 | # PyScaffold's parameters when the project was created.
129 | # This will be used when updating. Do not change!
130 | version = 4.1
131 | package = yourdfpy
132 | extensions =
133 | markdown
134 |
--------------------------------------------------------------------------------
/tests/test_urdf.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import os
3 | import io
4 |
5 | from yourdfpy import urdf
6 |
7 | __author__ = "Clemens Eppner"
8 | __copyright__ = "Clemens Eppner"
9 | __license__ = "MIT"
10 |
11 |
12 | DIR_CURRENT = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
13 | DIR_MODELS = os.path.abspath(os.path.join(DIR_CURRENT, "models"))
14 |
15 |
16 | def _create_robot():
17 | r = urdf.Robot(name="test_robot")
18 | return r
19 |
20 |
21 | def test_robot():
22 | r = _create_robot()
23 | assert r.name == "test_robot"
24 |
25 |
26 | def test_validate():
27 | r = _create_robot()
28 | xml = urdf.URDF(robot=r)
29 | assert xml.validate()
30 |
31 |
32 | def test_mimic_joint():
33 | urdf_fname = os.path.join(DIR_MODELS, "franka", "franka.urdf")
34 | urdf_model = urdf.URDF.load(urdf_fname)
35 |
36 | assert True
37 |
38 |
39 | def test_equality():
40 | urdf_fname = os.path.join(DIR_MODELS, "franka", "franka.urdf")
41 | urdf_model_0 = urdf.URDF.load(urdf_fname)
42 |
43 | urdf_model_1 = urdf.URDF.load(urdf_fname)
44 |
45 | assert urdf_model_0 == urdf_model_1
46 |
47 |
48 | def test_equality_different_link_order():
49 | robot_0 = _create_robot()
50 | robot_0.links.append(urdf.Link(name="link_0"))
51 | robot_0.links.append(urdf.Link(name="link_1"))
52 |
53 | robot_1 = _create_robot()
54 | robot_1.links.append(urdf.Link(name="link_1"))
55 | robot_1.links.append(urdf.Link(name="link_0"))
56 |
57 | assert robot_0 == robot_1
58 |
59 |
60 | def test_single_link_urdf():
61 | urdf_str = """
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 | """
72 | with io.StringIO(urdf_str) as f:
73 | urdf_model = urdf.URDF.load(f)
74 |
75 | assert len(urdf_model.scene.graph.to_edgelist()) == 1
76 |
77 |
78 | def test_material_color():
79 | urdf_str = """
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 | """
93 | with io.StringIO(urdf_str) as f:
94 | urdf_model = urdf.URDF.load(f)
95 |
96 | assert urdf_model.robot.links[0].visuals[0].material.name == "red_material"
97 | assert all(
98 | urdf_model.robot.links[0].visuals[0].material.color.rgba == [1, 0, 0, 1]
99 | )
100 |
101 |
102 | def test_material_mapping():
103 | urdf_str = """
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 | """
118 | with io.StringIO(urdf_str) as f:
119 | urdf_model = urdf.URDF.load(f)
120 |
121 | assert urdf_model.robot.links[0].visuals[0].material.name == "red_material"
122 | assert all(urdf_model._material_map["red_material"].color.rgba == [1, 0, 0, 1])
123 |
124 |
125 | def test_geometric_primitives():
126 | urdf_str = """
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 | """
150 | with io.StringIO(urdf_str) as f:
151 | urdf_model = urdf.URDF.load(f)
152 |
153 | assert urdf_model.link_map["link_0"].visuals[0].geometry.sphere.radius == 11
154 | assert all(
155 | urdf_model.link_map["link_0"].visuals[1].geometry.box.size == [1, 2, 3]
156 | )
157 | assert urdf_model.link_map["link_0"].visuals[2].geometry.cylinder.radius == 11
158 | assert urdf_model.link_map["link_0"].visuals[2].geometry.cylinder.length == 4
159 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 | ## Version 0.0.58
3 | - Fix typos
4 |
5 | ## Version 0.0.57
6 | - Add parameter EQUALITY_TOLERANCE for comparing different URDFs
7 |
8 | ## Version 0.0.56
9 | - Replace travis by Github Actions
10 |
11 | ## Version 0.0.55
12 | - Add `pyglet<2` install dependency to `[full]` option (https://github.com/clemense/yourdfpy/issues/47)
13 | - Enable 3D scaling (https://github.com/clemense/yourdfpy/issues/48, https://github.com/clemense/yourdfpy/issues/41)
14 | - Fix geometry naming bug
15 | - Allow comma as a separator handling for box and mesh scales
16 |
17 | ## Version 0.0.54
18 | - Cast down NumPy array to float by @stephane-caron in #50
19 |
20 | ## Version 0.0.53
21 | - Fix NumPy float issue #46: https://github.com/clemense/yourdfpy/issues/45
22 |
23 | ## Version 0.0.52
24 | - `parse_mass()` returns `float` instead of `str`
25 | - default parsed mass is 0.0 instead of 1.0 (see http://wiki.ros.org/urdf/XML/link)
26 | - Update `trimesh` dependency to `trimesh[easy]` (to support loading Collada meshes)
27 | - Won't crash when loading model with joints that mimic unactuated/fixed joints.
28 |
29 | ## Version 0.0.51
30 | - Fix path separator issues in Windows [Issue 27](https://github.com/clemense/yourdfpy/issues/27)
31 |
32 | ## Version 0.0.50
33 | - A nothingburger
34 |
35 | ## Version 0.0.49
36 | - Fix single link URDF trimesh scene bug [PR26](https://github.com/clemense/yourdfpy/pull/26)
37 | ## Version 0.0.48
38 | - Implement `-c`/`--configuration` argument for `yourdfpy`
39 | - Add `--animate` flag to `yourdfpy`
40 |
41 | ## Version 0.0.47
42 | - Bugfix: Parsing box dimensions
43 | - Change to `trimesh.primitives.*` for geometric primitives (instead of `trimesh.creation.*`)
44 |
45 | ## Version 0.0.46
46 | - Bugfix: Named material with color wouldn't be applied
47 |
48 | ## Version 0.0.45
49 | - Upgrade to trimesh version 3.11.2
50 | - Add `__eq__` operator to URDF based on equality of individual elements (order-invariant) [PR18](https://github.com/clemense/yourdfpy/pull/18)
51 | - Add material information [PR15](https://github.com/clemense/yourdfpy/pull/15)
52 | - Improve mesh filename search [PR14](https://github.com/clemense/yourdfpy/pull/14)
53 |
54 | ## Version 0.0.44
55 | - Parse and write `name` attribute of `material` element
56 | - Apply colors to mesh if available
57 | - Handle empty scene exception (in case of URDF without meshes)
58 |
59 | ## Version 0.0.43
60 | - Skip material loading for collision geometries
61 |
62 | ## Version 0.0.42
63 | - Fix bug when updating robots with mimic joints
64 |
65 | ## Version 0.0.41
66 | - yourdfpy.Sphere visible
67 |
68 | ## Version 0.0.40
69 | - Add 'file_path', 'file_name', and 'file_element' to trimesh geometry's metadata in case loaded OBJ file contains multiple parts
70 | - Load collision geometry for viz
71 |
72 | ## Version 0.0.39
73 | - Fix mimic joint issue
74 |
75 | ## Version 0.0.38
76 | - Change namespace of filename handlers
77 |
78 | ## Version 0.0.37
79 | - Use visual/collision name property as geometry name for scene graph
80 | - Write visual/collision name property to URDF
81 |
82 | ## Version 0.0.36
83 | - Fix validation of JointLimit
84 | - Add Dynamics to __init__.py
85 |
86 | ## Version 0.0.35
87 | - Add `force_single_geometry_per_link` feature: similar to `collision_mesh` in urdfpy; will concatenate all meshes in a single link and only create one node in the scene graph. This is the new default for loading the collision scene.
88 |
89 | ## Version 0.0.34
90 | - Fix missing `Collision` exposure in init.py
91 | - Add `force_collision_mesh`
92 |
93 | ## Version 0.0.33
94 | - Add `force_mesh` to constructor; allows loading mesh files as single meshes instead of turning them into graphs (since trimesh can't deal with meshes with multiple textures)
95 |
96 | ## Version 0.0.32
97 | - Fix `continuous` joint during forward kinematics
98 | - Introduce DOF indices for actuated joints (to handle planar and floating types)
99 |
100 | ## Version 0.0.31
101 | - Add `num_dofs`
102 | - Add `zero_cfg` property
103 | - Change function `get_default_cfg` to property `center_cfg`
104 | - Initial configuration is now the zero configuration not the center configuration (as previously)
105 |
106 | ## Version 0.0.30
107 | ## Version 0.0.29
108 | ## Version 0.0.28
109 | ## Version 0.0.27
110 | - Bugfix in travis deployment pipeline
111 |
112 | ## Version 0.0.26
113 | - Bugfix: rename `generate_scene_graph` parameter
114 | - Bugfix of bugfix of previous version, which introduced a new bug
115 | - Bugfix: root URDF result of `split_along_joints` (scene was not in sync with model)
116 | - Add params to `split_along_joints`
117 | - Bugfix: `parse_inertia` resulted in wrong matrix dtype
118 |
119 | ## Version 0.0.25
120 | - Bugfix: `get_default_cfg` returns flattened array
121 |
122 | ## Version 0.0.24
123 | - Added pytests
124 | - Separate visual and collision scene
125 | - Rename constructor's parameter `create_scene_graph` to `build_scene_graph`
126 | - Added ROS validation rules
127 | - Rename `update_trimesh_scene` to `update_cfg`, change arguments
128 | - Add `get_transform` function
129 | - Rename `get_default_configuration` to `get_default_cfg`
130 | - Proper handling of `mimic` joints
131 | - New members for `actuated_joints`
132 | - New `base_link` property
133 |
134 | ## Version 0.0.23
135 | - The Great YOURDFPY Steering Committee (G.Y.S.C.) decides to jump as many version numbers ahead as needed to pass urdfpy
136 |
137 | ## Version 0.0.14
138 | - The Great YOURDFPY Steering Committee (G.Y.S.C.) gives up on using only version numbers that are prime
139 |
140 | ## Version 0.0.13
141 | - Adding images. For the Github crowd.
142 |
143 | ## Version 0.0.11
144 | - These numbers are going up quickly.
145 |
146 | ## Version 0.0.7
147 | - Wow. This was quite the evening.
148 |
149 | ## Version 0.0.5
150 | - The Great YOURDFPY Steering Committee (G.Y.S.C.) decides to only use version numbers that are prime
151 |
152 | ## Version 0.0.3
153 | - A version few remember and many ignored
154 |
155 | ## Version 0.0.1
156 | - A version nobody remembers
157 |
--------------------------------------------------------------------------------
/src/yourdfpy/viz.py:
--------------------------------------------------------------------------------
1 | """
2 | Script for visualizing a robot from a URDF.
3 | """
4 |
5 | import sys
6 | import time
7 | import logging
8 | import argparse
9 | import numpy as np
10 | from functools import partial
11 |
12 | from yourdfpy import __version__
13 | from yourdfpy import URDF
14 |
15 | __author__ = "Clemens Eppner"
16 | __copyright__ = "Clemens Eppner"
17 | __license__ = "MIT"
18 |
19 | _logger = logging.getLogger(__name__)
20 |
21 |
22 | def parse_args(args):
23 | """Parse command line parameters
24 |
25 | Args:
26 | args (List[str]): command line parameters as list of strings
27 | (for example ``["--help"]``).
28 |
29 | Returns:
30 | :obj:`argparse.Namespace`: command line parameters namespace
31 | """
32 | parser = argparse.ArgumentParser(description="Visualize a URDF model.")
33 | parser.add_argument(
34 | "--version",
35 | action="version",
36 | version="yourdfpy {ver}".format(ver=__version__),
37 | )
38 | parser.add_argument(
39 | "input",
40 | help="URDF file name.",
41 | )
42 | parser.add_argument(
43 | "-c",
44 | "--configuration",
45 | nargs="+",
46 | type=float,
47 | help="Configuration of the visualized URDF model.",
48 | )
49 | parser.add_argument(
50 | "--collision",
51 | action="store_true",
52 | help="Use collision geometry for the visualized URDF model.",
53 | )
54 | parser.add_argument(
55 | "--animate",
56 | action="store_true",
57 | help="Animate model by interpolating through all actuated joint limits.",
58 | )
59 | parser.add_argument(
60 | "-v",
61 | "--verbose",
62 | dest="loglevel",
63 | help="set loglevel to INFO",
64 | action="store_const",
65 | const=logging.INFO,
66 | )
67 | parser.add_argument(
68 | "-vv",
69 | "--very-verbose",
70 | dest="loglevel",
71 | help="set loglevel to DEBUG",
72 | action="store_const",
73 | const=logging.DEBUG,
74 | )
75 | return parser.parse_args(args)
76 |
77 |
78 | def setup_logging(loglevel):
79 | """Setup basic logging.
80 |
81 | Args:
82 | loglevel (int): minimum loglevel for emitting messages
83 | """
84 | logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s"
85 | logging.basicConfig(
86 | level=loglevel, stream=sys.stdout, format=logformat, datefmt="%Y-%m-%d %H:%M:%S"
87 | )
88 |
89 |
90 | def generate_joint_limit_trajectory(urdf_model, loop_time):
91 | """Generate a trajectory for all actuated joints that interpolates between joint limits.
92 | For continuous joint interpolate between [0, 2 * pi].
93 |
94 | Args:
95 | urdf_model (yourdfpy.URDF): _description_
96 | loop_time (float): Time in seconds to loop through the trajectory.
97 |
98 | Returns:
99 | dict: A dictionary over all actuated joints with list of configuration values.
100 | """
101 | trajectory_via_points = {}
102 | for joint_name in urdf_model.actuated_joint_names:
103 | if urdf_model.joint_map[joint_name].type.lower() == "continuous":
104 | via_point_0 = 0.0
105 | via_point_2 = 2.0 * np.pi
106 | via_point_1 = (via_point_2 - via_point_0) / 2.0
107 | else:
108 | limit_lower = (
109 | urdf_model.joint_map[joint_name].limit.lower
110 | if urdf_model.joint_map[joint_name].limit.lower is not None
111 | else -np.pi
112 | )
113 | limit_upper = (
114 | urdf_model.joint_map[joint_name].limit.upper
115 | if urdf_model.joint_map[joint_name].limit.upper is not None
116 | else +np.pi
117 | )
118 | via_point_0 = limit_lower
119 | via_point_1 = limit_upper
120 | via_point_2 = limit_lower
121 |
122 | trajectory_via_points[joint_name] = np.array(
123 | [
124 | via_point_0,
125 | via_point_1,
126 | via_point_2,
127 | ]
128 | )
129 | times = np.linspace(0.0, 1.0, int(loop_time * 100.0))
130 | bins = np.arange(3) / 2.0
131 |
132 | # Compute alphas for each time
133 | inds = np.digitize(times, bins, right=True)
134 | inds[inds == 0] = 1
135 | alphas = (bins[inds] - times) / (bins[inds] - bins[inds - 1])
136 |
137 | # Create the new interpolated trajectory
138 | trajectory = {}
139 | for k in trajectory_via_points:
140 | trajectory[k] = (
141 | alphas * trajectory_via_points[k][inds - 1]
142 | + (1.0 - alphas) * trajectory_via_points[k][inds]
143 | )
144 |
145 | return trajectory
146 |
147 |
148 | def viewer_callback(scene, urdf_model, trajectory, loop_time):
149 | frame = int(100.0 * (time.time() % loop_time))
150 | cfg = {k: trajectory[k][frame] for k in trajectory}
151 |
152 | urdf_model.update_cfg(configuration=cfg)
153 |
154 |
155 | def main(args):
156 | """Wrapper allowing string arguments in a CLI fashion.
157 |
158 | Args:
159 | args (List[str]): command line parameters as list of strings
160 | (for example ``["--verbose", "42"]``).
161 | """
162 | args = parse_args(args)
163 | setup_logging(args.loglevel)
164 |
165 | if args.collision:
166 | urdf_model = URDF.load(
167 | args.input, build_collision_scene_graph=True, load_collision_meshes=True
168 | )
169 | else:
170 | urdf_model = URDF.load(args.input)
171 |
172 | if args.configuration:
173 | urdf_model.update_cfg(args.configuration)
174 |
175 | callback = None
176 | if args.animate:
177 | loop_time = 6.0
178 | callback = partial(
179 | viewer_callback,
180 | urdf_model=urdf_model,
181 | loop_time=loop_time,
182 | trajectory=generate_joint_limit_trajectory(
183 | urdf_model=urdf_model, loop_time=loop_time
184 | ),
185 | )
186 |
187 | urdf_model.show(
188 | collision_geometry=args.collision,
189 | callback=callback,
190 | )
191 |
192 |
193 | def run():
194 | """Calls :func:`main` passing the CLI arguments extracted from :obj:`sys.argv`.
195 |
196 | This function can be used as entry point to create console scripts with setuptools.
197 | """
198 | main(sys.argv[1:])
199 |
200 |
201 | if __name__ == "__main__":
202 | run()
203 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://yourdfpy.readthedocs.io/en/latest/?badge=latest)
2 | [](https://coveralls.io/github/clemense/yourdfpy?branch=main)
3 | [](https://badge.fury.io/py/yourdfpy)
4 |
5 | # yourdfpy
6 |
7 | Yet anOther URDF parser for Python. Yup, it's another one. Deal with it.
8 |
9 | Yourdfpy is a simpler and easier-to-use library for loading, manipulating, validating, saving, and visualizing URDF files.
10 |
11 | ## Installation
12 |
13 | You can install yourdfpy directly from pip:
14 | ```
15 | pip install yourdfpy
16 | ```
17 |
18 | ## Visualization
19 |
20 | Once installed, you can visualize a URDF model from the command line:
21 | ```
22 | yourdfpy ./my_description/urdf/robot.urdf
23 | ```
24 |
25 | You can use the following keyboard shortcuts to inspect your model:
26 |
27 | - ``a``: Toggle rendered XYZ/RGB axis markers (off, world frame, every frame)
28 | - ``w``: Toggle wireframe mode (good for looking inside meshes, off by default)
29 | - ``c``: Toggle back face culling (on by default but in wireframe mode it is sometimes useful to see the back sides)
30 |
31 | ## But why another one?!?
32 | `Why are you wasting not only your but also our time?` you might ask. Fair point. There are already [urdfpy](https://github.com/mmatl/urdfpy) and [urdf_parser_py](https://github.com/ros/urdf_parser_py) that deal with URDFs. Unfortunately, none of these solutions allow customizable URDF parsing that is fully independent of validation and mesh loading. Dealing with filenames, outdated dependencies, open bug reports, and limited flexibility when it comes to serialization are other disadvantages. As shown in the table below, **yourdfpy** is the most robust one when it comes to loading URDFs in the wild.
33 |
34 | 
35 |
36 | | | [urdfpy](https://github.com/mmatl/urdfpy) | [urdf_parser_py](https://github.com/ros/urdf_parser_py) | **yourdfpy** |
37 | | ---------------------------------------------------------------------------------------------: | :---------------------------------------: | :-----------------------------------------------------: | :----------------: |
38 | | Decouple parsing from validation | | | :heavy_check_mark: |
39 | | Decouple parsing from loading meshes | | :heavy_check_mark: | :heavy_check_mark: |
40 | | Visualize URDF | :heavy_check_mark: | | :heavy_check_mark: |
41 | | Forward Kinematics | :heavy_check_mark: | | :heavy_check_mark: |
42 | | Robustness test: loading 12 URDF files from [here](https://github.com/ankurhanda/robot-assets) | 4/12 | 6/12 | 12/12 |
43 | | Avg. loading time per file (w/ mesh loading) | 480 ms | | 370 ms |
44 | | (w/o mesh loading) | | 3.2 ms | 6.2 ms |
45 | | Test on 4 URDF files on which `urdfpy` succeeds | 347.5 ms | | 203 ms |
46 | | Test on 6 URDF files on which `urdf_parser_py` succeeds | | 2.6 ms | 3.8 ms |
47 |
48 |
49 | Click to expand code listing that produces the above table entries.
50 |
51 | ```python
52 | robot_assets = ['robot-assets/urdfs/robots/barret_hand/bhand_model.URDF', 'robot-assets/urdfs/robots/robotiq_gripper/robotiq_arg85_description.URDF', 'robot-assets/urdfs/robots/anymal/anymal.urdf', 'robot-assets/urdfs/robots/franka_panda/panda.urdf', 'robot-assets/urdfs/robots/ginger_robot/gingerurdf.urdf', 'robot-assets/urdfs/robots/halodi/eve_r3.urdf', 'robot-assets/urdfs/robots/kinova/kinova.urdf', 'robot-assets/urdfs/robots/kuka_iiwa/model.urdf', 'robot-assets/urdfs/robots/pr2/pr2.urdf', 'robot-assets/urdfs/robots/ur10/ur10_robot.urdf', 'robot-assets/urdfs/robots/ur5/ur5_gripper.urdf', 'robot-assets/urdfs/robots/yumi/yumi.urdf']
53 |
54 | import urdfpy
55 | import urdf_parser_py
56 | import yourdfpy
57 |
58 | from functools import partial
59 |
60 | def load_urdfs(fnames, load_fn):
61 | results = {fname: None for fname in fnames}
62 | for fname in fnames:
63 | try:
64 | x = load_fn(fname)
65 | results[fname] = x
66 | except:
67 | print("Problems loading: ", fname)
68 | pass
69 | print(sum([1 for x, y in results.items() if y is not None]), "/", len(fnames))
70 | return results
71 |
72 | # parsing success rate
73 | load_urdfs(robot_assets, urdfpy.URDF.load)
74 | load_urdfs(robot_assets, urdf_parser_py.urdf.URDF.load)
75 | load_urdfs(robot_assets, yourdfpy.URDF.load)
76 |
77 | # parsing times
78 | %timeit load_urdfs(robot_assets, urdfpy.URDF.load)
79 | %timeit load_urdfs(robot_assets, urdf_parser_py.urdf.URDF.load)
80 | %timeit load_urdfs(robot_assets, yourdfpy.URDF.load)
81 | %timeit load_urdfs(robot_assets, partial(yourdfpy.URDF.load, load_meshes=False, build_scene_graph=False))
82 |
83 | # fairer comparison with yourdfpy
84 | urdfpy_fnames = [x for x, y in load_urdfs(robot_assets, urdfpy.URDF.load).items() if y is not None]
85 | %timeit load_urdfs(urdfpy_fnames, yourdfpy.URDF.load)
86 |
87 | # fairer comparison with urdf_parser_py
88 | urdfparser_fnames = [x for x, y in load_urdfs(robot_assets, urdf_parser_py.urdf.URDF.from_xml_file).items() if y is not None]
89 | %timeit load_urdfs(urdfparser_fnames, functools.partial(yourdfpy.URDF.load, load_meshes=False, build_scene_graph=False))
90 | ```
91 |
92 |
93 |
94 |
114 |
115 |
136 |
--------------------------------------------------------------------------------
/tests/models/franka/franka.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # This file is execfile()d with the current directory set to its containing dir.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 | #
7 | # All configuration values have a default; values that are commented out
8 | # serve to show the default.
9 |
10 | import os
11 | import sys
12 | import shutil
13 |
14 | # -- Path setup --------------------------------------------------------------
15 |
16 | import sphinx_rtd_theme
17 |
18 | __location__ = os.path.dirname(__file__)
19 |
20 | # If extensions (or modules to document with autodoc) are in another directory,
21 | # add these directories to sys.path here. If the directory is relative to the
22 | # documentation root, use os.path.abspath to make it absolute, like shown here.
23 | sys.path.insert(0, os.path.join(__location__, "../src"))
24 |
25 | # -- Run sphinx-apidoc -------------------------------------------------------
26 | # This hack is necessary since RTD does not issue `sphinx-apidoc` before running
27 | # `sphinx-build -b html . _build/html`. See Issue:
28 | # https://github.com/readthedocs/readthedocs.org/issues/1139
29 | # DON'T FORGET: Check the box "Install your project inside a virtualenv using
30 | # setup.py install" in the RTD Advanced Settings.
31 | # Additionally it helps us to avoid running apidoc manually
32 |
33 | try: # for Sphinx >= 1.7
34 | from sphinx.ext import apidoc
35 | except ImportError:
36 | from sphinx import apidoc
37 |
38 | output_dir = os.path.join(__location__, "api")
39 | module_dir = os.path.join(__location__, "../src/yourdfpy")
40 | try:
41 | shutil.rmtree(output_dir)
42 | except FileNotFoundError:
43 | pass
44 |
45 | try:
46 | import sphinx
47 |
48 | cmd_line = f"sphinx-apidoc --implicit-namespaces -f -o {output_dir} {module_dir}"
49 |
50 | args = cmd_line.split(" ")
51 | if tuple(sphinx.__version__.split(".")) >= ("1", "7"):
52 | # This is a rudimentary parse_version to avoid external dependencies
53 | args = args[1:]
54 |
55 | apidoc.main(args)
56 | except Exception as e:
57 | print("Running `sphinx-apidoc` failed!\n{}".format(e))
58 |
59 | # -- General configuration ---------------------------------------------------
60 |
61 | # If your documentation needs a minimal Sphinx version, state it here.
62 | # needs_sphinx = '1.0'
63 |
64 | # Add any Sphinx extension module names here, as strings. They can be extensions
65 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
66 | extensions = [
67 | "sphinx.ext.autodoc",
68 | "sphinx.ext.intersphinx",
69 | "sphinx.ext.todo",
70 | "sphinx.ext.autosummary",
71 | "sphinx.ext.viewcode",
72 | "sphinx.ext.coverage",
73 | "sphinx.ext.doctest",
74 | "sphinx.ext.ifconfig",
75 | "sphinx.ext.mathjax",
76 | "sphinx.ext.napoleon",
77 | "sphinx_automodapi.automodapi",
78 | "sphinx_automodapi.smart_resolver",
79 | ]
80 | numpydoc_class_members_toctree = False
81 | numpydoc_show_class_members = False
82 | automodapi_toctreedirnm = "generated"
83 |
84 | # Add any paths that contain templates here, relative to this directory.
85 | templates_path = ["_templates"]
86 |
87 |
88 | # Configure AutoStructify
89 | # https://recommonmark.readthedocs.io/en/latest/auto_structify.html
90 | def setup(app):
91 | from recommonmark.transform import AutoStructify
92 |
93 | params = {
94 | "enable_auto_toc_tree": True,
95 | "auto_toc_tree_section": "Contents",
96 | "auto_toc_maxdepth": 2,
97 | "enable_eval_rst": True,
98 | "enable_math": True,
99 | "enable_inline_math": True,
100 | }
101 | app.add_config_value("recommonmark_config", params, True)
102 | app.add_transform(AutoStructify)
103 |
104 |
105 | # Enable markdown
106 | extensions.append("recommonmark")
107 |
108 | # The suffix of source filenames.
109 | source_suffix = [".rst", ".md"]
110 |
111 | # The encoding of source files.
112 | # source_encoding = 'utf-8-sig'
113 |
114 | # The master toctree document.
115 | master_doc = "index"
116 |
117 | # General information about the project.
118 | project = "yourdfpy"
119 | copyright = "2021, Clemens Eppner"
120 |
121 | # The version info for the project you're documenting, acts as replacement for
122 | # |version| and |release|, also used in various other places throughout the
123 | # built documents.
124 | #
125 | # version: The short X.Y version.
126 | # release: The full version, including alpha/beta/rc tags.
127 | # If you don’t need the separation provided between version and release,
128 | # just set them both to the same value.
129 | try:
130 | from yourdfpy import __version__ as version
131 | except ImportError:
132 | version = ""
133 |
134 | if not version or version.lower() == "unknown":
135 | version = os.getenv("READTHEDOCS_VERSION", "unknown") # automatically set by RTD
136 |
137 | release = version
138 |
139 | # The language for content autogenerated by Sphinx. Refer to documentation
140 | # for a list of supported languages.
141 | # language = None
142 |
143 | # There are two options for replacing |today|: either, you set today to some
144 | # non-false value, then it is used:
145 | # today = ''
146 | # Else, today_fmt is used as the format for a strftime call.
147 | # today_fmt = '%B %d, %Y'
148 |
149 | # List of patterns, relative to source directory, that match files and
150 | # directories to ignore when looking for source files.
151 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"]
152 |
153 | # The reST default role (used for this markup: `text`) to use for all documents.
154 | # default_role = None
155 |
156 | # If true, '()' will be appended to :func: etc. cross-reference text.
157 | # add_function_parentheses = True
158 |
159 | # If true, the current module name will be prepended to all description
160 | # unit titles (such as .. function::).
161 | # add_module_names = True
162 |
163 | # If true, sectionauthor and moduleauthor directives will be shown in the
164 | # output. They are ignored by default.
165 | # show_authors = False
166 |
167 | # The name of the Pygments (syntax highlighting) style to use.
168 | pygments_style = "sphinx"
169 |
170 | # A list of ignored prefixes for module index sorting.
171 | # modindex_common_prefix = []
172 |
173 | # If true, keep warnings as "system message" paragraphs in the built documents.
174 | # keep_warnings = False
175 |
176 | # If this is True, todo emits a warning for each TODO entries. The default is False.
177 | todo_emit_warnings = True
178 |
179 |
180 | # -- Options for HTML output -------------------------------------------------
181 |
182 | # The theme to use for HTML and HTML Help pages. See the documentation for
183 | # a list of builtin themes.
184 | # html_theme = "alabaster"
185 | html_theme = "sphinx_rtd_theme"
186 |
187 | # Theme options are theme-specific and customize the look and feel of a theme
188 | # further. For a list of options available for each theme, see the
189 | # documentation.
190 | # html_theme_options = {"sidebar_width": "300px", "page_width": "1200px"}
191 |
192 | # Add any paths that contain custom themes here, relative to this directory.
193 | # html_theme_path = []
194 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
195 |
196 | # The name for this set of Sphinx documents. If None, it defaults to
197 | # " v documentation".
198 | # html_title = None
199 |
200 | # A shorter title for the navigation bar. Default is the same as html_title.
201 | # html_short_title = None
202 |
203 | # The name of an image file (relative to this directory) to place at the top
204 | # of the sidebar.
205 | # html_logo = ""
206 |
207 | # The name of an image file (within the static path) to use as favicon of the
208 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
209 | # pixels large.
210 | # html_favicon = None
211 |
212 | # Add any paths that contain custom static files (such as style sheets) here,
213 | # relative to this directory. They are copied after the builtin static files,
214 | # so a file named "default.css" will overwrite the builtin "default.css".
215 | html_static_path = ["_static"]
216 |
217 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
218 | # using the given strftime format.
219 | # html_last_updated_fmt = '%b %d, %Y'
220 |
221 | # If true, SmartyPants will be used to convert quotes and dashes to
222 | # typographically correct entities.
223 | # html_use_smartypants = True
224 |
225 | # Custom sidebar templates, maps document names to template names.
226 | # html_sidebars = {}
227 |
228 | # Additional templates that should be rendered to pages, maps page names to
229 | # template names.
230 | # html_additional_pages = {}
231 |
232 | # If false, no module index is generated.
233 | # html_domain_indices = True
234 |
235 | # If false, no index is generated.
236 | # html_use_index = True
237 |
238 | # If true, the index is split into individual pages for each letter.
239 | # html_split_index = False
240 |
241 | # If true, links to the reST sources are added to the pages.
242 | # html_show_sourcelink = True
243 |
244 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
245 | # html_show_sphinx = True
246 |
247 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
248 | # html_show_copyright = True
249 |
250 | # If true, an OpenSearch description file will be output, and all pages will
251 | # contain a tag referring to it. The value of this option must be the
252 | # base URL from which the finished HTML is served.
253 | # html_use_opensearch = ''
254 |
255 | # This is the file name suffix for HTML files (e.g. ".xhtml").
256 | # html_file_suffix = None
257 |
258 | # Output file base name for HTML help builder.
259 | htmlhelp_basename = "yourdfpy-doc"
260 |
261 |
262 | # -- Options for LaTeX output ------------------------------------------------
263 |
264 | latex_elements = {
265 | # The paper size ("letterpaper" or "a4paper").
266 | # "papersize": "letterpaper",
267 | # The font size ("10pt", "11pt" or "12pt").
268 | # "pointsize": "10pt",
269 | # Additional stuff for the LaTeX preamble.
270 | # "preamble": "",
271 | }
272 |
273 | # Grouping the document tree into LaTeX files. List of tuples
274 | # (source start file, target name, title, author, documentclass [howto/manual]).
275 | latex_documents = [
276 | ("index", "user_guide.tex", "yourdfpy Documentation", "Clemens Eppner", "manual")
277 | ]
278 |
279 | # The name of an image file (relative to this directory) to place at the top of
280 | # the title page.
281 | # latex_logo = ""
282 |
283 | # For "manual" documents, if this is true, then toplevel headings are parts,
284 | # not chapters.
285 | # latex_use_parts = False
286 |
287 | # If true, show page references after internal links.
288 | # latex_show_pagerefs = False
289 |
290 | # If true, show URL addresses after external links.
291 | # latex_show_urls = False
292 |
293 | # Documents to append as an appendix to all manuals.
294 | # latex_appendices = []
295 |
296 | # If false, no module index is generated.
297 | # latex_domain_indices = True
298 |
299 | # -- External mapping --------------------------------------------------------
300 | python_version = ".".join(map(str, sys.version_info[0:2]))
301 | intersphinx_mapping = {
302 | "sphinx": ("https://www.sphinx-doc.org/en/master", None),
303 | "python": ("https://docs.python.org/" + python_version, None),
304 | "matplotlib": ("https://matplotlib.org", None),
305 | "numpy": ("https://numpy.org/doc/stable", None),
306 | "sklearn": ("https://scikit-learn.org/stable", None),
307 | "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
308 | "scipy": ("https://docs.scipy.org/doc/scipy/reference", None),
309 | "setuptools": ("https://setuptools.readthedocs.io/en/stable/", None),
310 | "pyscaffold": ("https://pyscaffold.org/en/stable", None),
311 | }
312 |
313 | print(f"loading configurations for {project} {version} ...", file=sys.stderr)
314 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | .. todo:: THIS IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS!
2 |
3 | The document assumes you are using a source repository service that promotes a
4 | contribution model similar to `GitHub's fork and pull request workflow`_.
5 | While this is true for the majority of services (like GitHub, GitLab,
6 | BitBucket), it might not be the case for private repositories (e.g., when
7 | using Gerrit).
8 |
9 | Also notice that the code examples might refer to GitHub URLs or the text
10 | might use GitHub specific terminology (e.g., *Pull Request* instead of *Merge
11 | Request*).
12 |
13 | Please make sure to check the document having these assumptions in mind
14 | and update things accordingly.
15 |
16 | .. todo:: Provide the correct links/replacements at the bottom of the document.
17 |
18 | .. todo:: You might want to have a look on `PyScaffold's contributor's guide`_,
19 |
20 | especially if your project is open source. The text should be very similar to
21 | this template, but there are a few extra contents that you might decide to
22 | also include, like mentioning labels of your issue tracker or automated
23 | releases.
24 |
25 |
26 | ============
27 | Contributing
28 | ============
29 |
30 | Welcome to ``yourdfpy`` contributor's guide.
31 |
32 | This document focuses on getting any potential contributor familiarized
33 | with the development processes, but `other kinds of contributions`_ are also
34 | appreciated.
35 |
36 | If you are new to using git_ or have never collaborated in a project previously,
37 | please have a look at `contribution-guide.org`_. Other resources are also
38 | listed in the excellent `guide created by FreeCodeCamp`_ [#contrib1]_.
39 |
40 | Please notice, all users and contributors are expected to be **open,
41 | considerate, reasonable, and respectful**. When in doubt, `Python Software
42 | Foundation's Code of Conduct`_ is a good reference in terms of behavior
43 | guidelines.
44 |
45 |
46 | Issue Reports
47 | =============
48 |
49 | If you experience bugs or general issues with ``yourdfpy``, please have a look
50 | on the `issue tracker`_. If you don't see anything useful there, please feel
51 | free to fire an issue report.
52 |
53 | .. tip::
54 | Please don't forget to include the closed issues in your search.
55 | Sometimes a solution was already reported, and the problem is considered
56 | **solved**.
57 |
58 | New issue reports should include information about your programming environment
59 | (e.g., operating system, Python version) and steps to reproduce the problem.
60 | Please try also to simplify the reproduction steps to a very minimal example
61 | that still illustrates the problem you are facing. By removing other factors,
62 | you help us to identify the root cause of the issue.
63 |
64 |
65 | Documentation Improvements
66 | ==========================
67 |
68 | You can help improve ``yourdfpy`` docs by making them more readable and coherent, or
69 | by adding missing information and correcting mistakes.
70 |
71 | ``yourdfpy`` documentation uses Sphinx_ as its main documentation compiler.
72 | This means that the docs are kept in the same repository as the project code, and
73 | that any documentation update is done in the same way was a code contribution.
74 |
75 | .. todo:: Don't forget to mention which markup language you are using.
76 |
77 | e.g., reStructuredText_ or CommonMark_ with MyST_ extensions.
78 |
79 | .. todo:: If your project is hosted on GitHub, you can also mention the following tip:
80 |
81 | .. tip::
82 | Please notice that the `GitHub web interface`_ provides a quick way of
83 | propose changes in ``yourdfpy``'s files. While this mechanism can
84 | be tricky for normal code contributions, it works perfectly fine for
85 | contributing to the docs, and can be quite handy.
86 |
87 | If you are interested in trying this method out, please navigate to
88 | the ``docs`` folder in the source repository_, find which file you
89 | would like to propose changes and click in the little pencil icon at the
90 | top, to open `GitHub's code editor`_. Once you finish editing the file,
91 | please write a message in the form at the bottom of the page describing
92 | which changes have you made and what are the motivations behind them and
93 | submit your proposal.
94 |
95 | When working on documentation changes in your local machine, you can
96 | compile them using |tox|_::
97 |
98 | tox -e docs
99 |
100 | and use Python's built-in web server for a preview in your web browser
101 | (``http://localhost:8000``)::
102 |
103 | python3 -m http.server --directory 'docs/_build/html'
104 |
105 |
106 | Code Contributions
107 | ==================
108 |
109 | .. todo:: Please include a reference or explanation about the internals of the project.
110 |
111 | An architecture description, design principles or at least a summary of the
112 | main concepts will make it easy for potential contributors to get started
113 | quickly.
114 |
115 | Submit an issue
116 | ---------------
117 |
118 | Before you work on any non-trivial code contribution it's best to first create
119 | a report in the `issue tracker`_ to start a discussion on the subject.
120 | This often provides additional considerations and avoids unnecessary work.
121 |
122 | Create an environment
123 | ---------------------
124 |
125 | Before you start coding, we recommend creating an isolated `virtual
126 | environment`_ to avoid any problems with your installed Python packages.
127 | This can easily be done via either |virtualenv|_::
128 |
129 | virtualenv
130 | source /bin/activate
131 |
132 | or Miniconda_::
133 |
134 | conda create -n pyscaffold python=3 six virtualenv pytest pytest-cov
135 | conda activate pyscaffold
136 |
137 | Clone the repository
138 | --------------------
139 |
140 | #. Create an user account on |the repository service| if you do not already have one.
141 | #. Fork the project repository_: click on the *Fork* button near the top of the
142 | page. This creates a copy of the code under your account on |the repository service|.
143 | #. Clone this copy to your local disk::
144 |
145 | git clone git@github.com:YourLogin/yourdfpy.git
146 | cd yourdfpy
147 |
148 | #. You should run::
149 |
150 | pip install -U pip setuptools -e .
151 |
152 | to be able run ``putup --help``.
153 |
154 | .. todo:: if you are not using pre-commit, please remove the following item:
155 |
156 | #. Install |pre-commit|_::
157 |
158 | pip install pre-commit
159 | pre-commit install
160 |
161 | ``yourdfpy`` comes with a lot of hooks configured to automatically help the
162 | developer to check the code being written.
163 |
164 | Implement your changes
165 | ----------------------
166 |
167 | #. Create a branch to hold your changes::
168 |
169 | git checkout -b my-feature
170 |
171 | and start making changes. Never work on the master branch!
172 |
173 | #. Start your work on this branch. Don't forget to add docstrings_ to new
174 | functions, modules and classes, especially if they are part of public APIs.
175 |
176 | #. Add yourself to the list of contributors in ``AUTHORS.rst``.
177 |
178 | #. When you’re done editing, do::
179 |
180 | git add
181 | git commit
182 |
183 | to record your changes in git_.
184 |
185 | .. todo:: if you are not using pre-commit, please remove the following item:
186 |
187 | Please make sure to see the validation messages from |pre-commit|_ and fix
188 | any eventual issues.
189 | This should automatically use flake8_/black_ to check/fix the code style
190 | in a way that is compatible with the project.
191 |
192 | .. important:: Don't forget to add unit tests and documentation in case your
193 | contribution adds an additional feature and is not just a bugfix.
194 |
195 | Moreover, writing a `descriptive commit message`_ is highly recommended.
196 | In case of doubt, you can check the commit history with::
197 |
198 | git log --graph --decorate --pretty=oneline --abbrev-commit --all
199 |
200 | to look for recurring communication patterns.
201 |
202 | #. Please check that your changes don't break any unit tests with::
203 |
204 | tox
205 |
206 | (after having installed |tox|_ with ``pip install tox`` or ``pipx``).
207 |
208 | You can also use |tox|_ to run several other pre-configured tasks in the
209 | repository. Try ``tox -av`` to see a list of the available checks.
210 |
211 | Submit your contribution
212 | ------------------------
213 |
214 | #. If everything works fine, push your local branch to |the repository service| with::
215 |
216 | git push -u origin my-feature
217 |
218 | #. Go to the web page of your fork and click |contribute button|
219 | to send your changes for review.
220 |
221 | .. todo:: if you are using GitHub, you can uncomment the following paragraph
222 |
223 | Find more detailed information `creating a PR`_. You might also want to open
224 | the PR as a draft first and mark it as ready for review after the feedbacks
225 | from the continuous integration (CI) system or any required fixes.
226 |
227 |
228 | Troubleshooting
229 | ---------------
230 |
231 | The following tips can be used when facing problems to build or test the
232 | package:
233 |
234 | #. Make sure to fetch all the tags from the upstream repository_.
235 | The command ``git describe --abbrev=0 --tags`` should return the version you
236 | are expecting. If you are trying to run CI scripts in a fork repository,
237 | make sure to push all the tags.
238 | You can also try to remove all the egg files or the complete egg folder, i.e.,
239 | ``.eggs``, as well as the ``*.egg-info`` folders in the ``src`` folder or
240 | potentially in the root of your project.
241 |
242 | #. Sometimes |tox|_ misses out when new dependencies are added, especially to
243 | ``setup.cfg`` and ``docs/requirements.txt``. If you find any problems with
244 | missing dependencies when running a command with |tox|_, try to recreate the
245 | ``tox`` environment using the ``-r`` flag. For example, instead of::
246 |
247 | tox -e docs
248 |
249 | Try running::
250 |
251 | tox -r -e docs
252 |
253 | #. Make sure to have a reliable |tox|_ installation that uses the correct
254 | Python version (e.g., 3.7+). When in doubt you can run::
255 |
256 | tox --version
257 | # OR
258 | which tox
259 |
260 | If you have trouble and are seeing weird errors upon running |tox|_, you can
261 | also try to create a dedicated `virtual environment`_ with a |tox|_ binary
262 | freshly installed. For example::
263 |
264 | virtualenv .venv
265 | source .venv/bin/activate
266 | .venv/bin/pip install tox
267 | .venv/bin/tox -e all
268 |
269 | #. `Pytest can drop you`_ in an interactive session in the case an error occurs.
270 | In order to do that you need to pass a ``--pdb`` option (for example by
271 | running ``tox -- -k --pdb``).
272 | You can also setup breakpoints manually instead of using the ``--pdb`` option.
273 |
274 |
275 | Maintainer tasks
276 | ================
277 |
278 | Releases
279 | --------
280 |
281 | .. todo:: This section assumes you are using PyPI to publicly release your package.
282 |
283 | If instead you are using a different/private package index, please update
284 | the instructions accordingly.
285 |
286 | If you are part of the group of maintainers and have correct user permissions
287 | on PyPI_, the following steps can be used to release a new version for
288 | ``yourdfpy``:
289 |
290 | #. Make sure all unit tests are successful.
291 | #. Tag the current commit on the main branch with a release tag, e.g., ``v1.2.3``.
292 | #. Push the new tag to the upstream repository_, e.g., ``git push upstream v1.2.3``
293 | #. Clean up the ``dist`` and ``build`` folders with ``tox -e clean``
294 | (or ``rm -rf dist build``)
295 | to avoid confusion with old builds and Sphinx docs.
296 | #. Run ``tox -e build`` and check that the files in ``dist`` have
297 | the correct version (no ``.dirty`` or git_ hash) according to the git_ tag.
298 | Also check the sizes of the distributions, if they are too big (e.g., >
299 | 500KB), unwanted clutter may have been accidentally included.
300 | #. Run ``tox -e publish -- --repository pypi`` and check that everything was
301 | uploaded to PyPI_ correctly.
302 |
303 |
304 |
305 | .. [#contrib1] Even though, these resources focus on open source projects and
306 | communities, the general ideas behind collaborating with other developers
307 | to collectively create software are general and can be applied to all sorts
308 | of environments, including private companies and proprietary code bases.
309 |
310 |
311 | .. <-- strart -->
312 | .. todo:: Please review and change the following definitions:
313 |
314 | .. |the repository service| replace:: GitHub
315 | .. |contribute button| replace:: "Create pull request"
316 |
317 | .. _repository: https://github.com//yourdfpy
318 | .. _issue tracker: https://github.com//yourdfpy/issues
319 | .. <-- end -->
320 |
321 |
322 | .. |virtualenv| replace:: ``virtualenv``
323 | .. |pre-commit| replace:: ``pre-commit``
324 | .. |tox| replace:: ``tox``
325 |
326 |
327 | .. _black: https://pypi.org/project/black/
328 | .. _CommonMark: https://commonmark.org/
329 | .. _contribution-guide.org: http://www.contribution-guide.org/
330 | .. _creating a PR: https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request
331 | .. _descriptive commit message: https://chris.beams.io/posts/git-commit
332 | .. _docstrings: https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html
333 | .. _first-contributions tutorial: https://github.com/firstcontributions/first-contributions
334 | .. _flake8: https://flake8.pycqa.org/en/stable/
335 | .. _git: https://git-scm.com
336 | .. _GitHub's fork and pull request workflow: https://guides.github.com/activities/forking/
337 | .. _guide created by FreeCodeCamp: https://github.com/FreeCodeCamp/how-to-contribute-to-open-source
338 | .. _Miniconda: https://docs.conda.io/en/latest/miniconda.html
339 | .. _MyST: https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html
340 | .. _other kinds of contributions: https://opensource.guide/how-to-contribute
341 | .. _pre-commit: https://pre-commit.com/
342 | .. _PyPI: https://pypi.org/
343 | .. _PyScaffold's contributor's guide: https://pyscaffold.org/en/stable/contributing.html
344 | .. _Pytest can drop you: https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-at-the-start-of-a-test
345 | .. _Python Software Foundation's Code of Conduct: https://www.python.org/psf/conduct/
346 | .. _reStructuredText: https://www.sphinx-doc.org/en/master/usage/restructuredtext/
347 | .. _Sphinx: https://www.sphinx-doc.org/en/master/
348 | .. _tox: https://tox.readthedocs.io/en/stable/
349 | .. _virtual environment: https://realpython.com/python-virtual-environments-a-primer/
350 | .. _virtualenv: https://virtualenv.pypa.io/en/stable/
351 |
352 | .. _GitHub web interface: https://docs.github.com/en/github/managing-files-in-a-repository/managing-files-on-github/editing-files-in-your-repository
353 | .. _GitHub's code editor: https://docs.github.com/en/github/managing-files-in-a-repository/managing-files-on-github/editing-files-in-your-repository
354 |
--------------------------------------------------------------------------------
/src/yourdfpy/urdf.py:
--------------------------------------------------------------------------------
1 | import os
2 | import six
3 | import copy
4 | import logging
5 | import numpy as np
6 | from dataclasses import dataclass, field, is_dataclass
7 | from typing import Dict, List, Optional, Union
8 | from functools import partial
9 |
10 | import trimesh
11 | import trimesh.transformations as tra
12 |
13 | from lxml import etree
14 |
15 | _logger = logging.getLogger(__name__)
16 |
17 | # threshold for comparison
18 | EQUALITY_TOLERANCE = 1e-8
19 |
20 | def _array_eq(arr1, arr2):
21 | if arr1 is None and arr2 is None:
22 | return True
23 | return (
24 | isinstance(arr1, np.ndarray)
25 | and isinstance(arr2, np.ndarray)
26 | and arr1.shape == arr2.shape
27 | and np.allclose(arr1, arr2, atol=EQUALITY_TOLERANCE)
28 | )
29 |
30 | def _scalar_eq(scalar1, scalar2):
31 | return np.isclose(scalar1, scalar2, atol=EQUALITY_TOLERANCE)
32 |
33 | @dataclass(eq=False)
34 | class TransmissionJoint:
35 | name: str
36 | hardware_interfaces: List[str] = field(default_factory=list)
37 |
38 | def __eq__(self, other):
39 | if not isinstance(other, TransmissionJoint):
40 | return NotImplemented
41 | return (
42 | self.name == other.name
43 | and all(
44 | self_hi in other.hardware_interfaces
45 | for self_hi in self.hardware_interfaces
46 | )
47 | and all(
48 | other_hi in self.hardware_interfaces
49 | for other_hi in other.hardware_interfaces
50 | )
51 | )
52 |
53 |
54 | @dataclass(eq=False)
55 | class Actuator:
56 | name: str
57 | mechanical_reduction: Optional[float] = None
58 | # The following is only valid for ROS Indigo and prior versions
59 | hardware_interfaces: List[str] = field(default_factory=list)
60 |
61 | def __eq__(self, other):
62 | if not isinstance(other, Actuator):
63 | return NotImplemented
64 | return (
65 | self.name == other.name
66 | and _scalar_eq(self.mechanical_reduction, other.mechanical_reduction)
67 | and all(
68 | self_hi in other.hardware_interfaces
69 | for self_hi in self.hardware_interfaces
70 | )
71 | and all(
72 | other_hi in self.hardware_interfaces
73 | for other_hi in other.hardware_interfaces
74 | )
75 | )
76 |
77 |
78 | @dataclass(eq=False)
79 | class Transmission:
80 | name: str
81 | type: Optional[str] = None
82 | joints: List[TransmissionJoint] = field(default_factory=list)
83 | actuators: List[Actuator] = field(default_factory=list)
84 |
85 | def __eq__(self, other):
86 | if not isinstance(other, Transmission):
87 | return NotImplemented
88 | return (
89 | self.name == other.name
90 | and self.type == other.type
91 | and all(self_joint in other.joints for self_joint in self.joints)
92 | and all(other_joint in self.joints for other_joint in other.joints)
93 | and all(
94 | self_actuator in other.actuators for self_actuator in self.actuators
95 | )
96 | and all(
97 | other_actuator in self.actuators for other_actuator in other.actuators
98 | )
99 | )
100 |
101 |
102 | @dataclass
103 | class Calibration:
104 | rising: Optional[float] = None
105 | falling: Optional[float] = None
106 |
107 |
108 | @dataclass
109 | class Mimic:
110 | joint: str
111 | multiplier: Optional[float] = None
112 | offset: Optional[float] = None
113 |
114 |
115 | @dataclass
116 | class SafetyController:
117 | soft_lower_limit: Optional[float] = None
118 | soft_upper_limit: Optional[float] = None
119 | k_position: Optional[float] = None
120 | k_velocity: Optional[float] = None
121 |
122 |
123 | @dataclass
124 | class Sphere:
125 | radius: float
126 |
127 |
128 | @dataclass
129 | class Cylinder:
130 | radius: float
131 | length: float
132 |
133 |
134 | @dataclass(eq=False)
135 | class Box:
136 | size: np.ndarray
137 |
138 | def __eq__(self, other):
139 | if not isinstance(other, Box):
140 | return NotImplemented
141 | return _array_eq(self.size, other.size)
142 |
143 |
144 | @dataclass(eq=False)
145 | class Mesh:
146 | filename: str
147 | scale: Optional[Union[float, np.ndarray]] = None
148 |
149 | def __eq__(self, other):
150 | if not isinstance(other, Mesh):
151 | return NotImplemented
152 |
153 | if self.filename != other.filename:
154 | return False
155 |
156 | if isinstance(self.scale, float) and isinstance(other.scale, float):
157 | return _scalar_eq(self.scale, other.scale)
158 |
159 | return _array_eq(self.scale, other.scale)
160 |
161 |
162 | @dataclass
163 | class Geometry:
164 | box: Optional[Box] = None
165 | cylinder: Optional[Cylinder] = None
166 | sphere: Optional[Sphere] = None
167 | mesh: Optional[Mesh] = None
168 |
169 |
170 | @dataclass(eq=False)
171 | class Color:
172 | rgba: np.ndarray
173 |
174 | def __eq__(self, other):
175 | if not isinstance(other, Color):
176 | return NotImplemented
177 | return _array_eq(self.rgba, other.rgba)
178 |
179 |
180 | @dataclass
181 | class Texture:
182 | filename: str
183 |
184 |
185 | @dataclass
186 | class Material:
187 | name: Optional[str] = None
188 | color: Optional[Color] = None
189 | texture: Optional[Texture] = None
190 |
191 |
192 | @dataclass(eq=False)
193 | class Visual:
194 | name: Optional[str] = None
195 | origin: Optional[np.ndarray] = None
196 | geometry: Optional[Geometry] = None # That's not really optional according to ROS
197 | material: Optional[Material] = None
198 |
199 | def __eq__(self, other):
200 | if not isinstance(other, Visual):
201 | return NotImplemented
202 | return (
203 | self.name == other.name
204 | and _array_eq(self.origin, other.origin)
205 | and self.geometry == other.geometry
206 | and self.material == other.material
207 | )
208 |
209 |
210 | @dataclass(eq=False)
211 | class Collision:
212 | name: str
213 | origin: Optional[np.ndarray] = None
214 | geometry: Geometry = None
215 |
216 | def __eq__(self, other):
217 | if not isinstance(other, Collision):
218 | return NotImplemented
219 | return (
220 | self.name == other.name
221 | and _array_eq(self.origin, other.origin)
222 | and self.geometry == other.geometry
223 | )
224 |
225 |
226 | @dataclass(eq=False)
227 | class Inertial:
228 | origin: Optional[np.ndarray] = None
229 | mass: Optional[float] = None
230 | inertia: Optional[np.ndarray] = None
231 |
232 | def __eq__(self, other):
233 | if not isinstance(other, Inertial):
234 | return NotImplemented
235 | return (
236 | _array_eq(self.origin, other.origin)
237 | and _scalar_eq(self.mass, other.mass)
238 | and _array_eq(self.inertia, other.inertia)
239 | )
240 |
241 |
242 | @dataclass(eq=False)
243 | class Link:
244 | name: str
245 | inertial: Optional[Inertial] = None
246 | visuals: List[Visual] = field(default_factory=list)
247 | collisions: List[Collision] = field(default_factory=list)
248 |
249 | def __eq__(self, other):
250 | if not isinstance(other, Link):
251 | return NotImplemented
252 | return (
253 | self.name == other.name
254 | and self.inertial == other.inertial
255 | and all(self_visual in other.visuals for self_visual in self.visuals)
256 | and all(other_visual in self.visuals for other_visual in other.visuals)
257 | and all(
258 | self_collision in other.collisions for self_collision in self.collisions
259 | )
260 | and all(
261 | other_collision in self.collisions
262 | for other_collision in other.collisions
263 | )
264 | )
265 |
266 |
267 | @dataclass
268 | class Dynamics:
269 | damping: Optional[float] = None
270 | friction: Optional[float] = None
271 |
272 |
273 | @dataclass
274 | class Limit:
275 | effort: Optional[float] = None
276 | velocity: Optional[float] = None
277 | lower: Optional[float] = None
278 | upper: Optional[float] = None
279 |
280 |
281 | @dataclass(eq=False)
282 | class Joint:
283 | name: str
284 | type: str = None
285 | parent: str = None
286 | child: str = None
287 | origin: np.ndarray = None
288 | axis: np.ndarray = None
289 | dynamics: Optional[Dynamics] = None
290 | limit: Optional[Limit] = None
291 | mimic: Optional[Mimic] = None
292 | calibration: Optional[Calibration] = None
293 | safety_controller: Optional[SafetyController] = None
294 |
295 | def __eq__(self, other):
296 | if not isinstance(other, Joint):
297 | return NotImplemented
298 | return (
299 | self.name == other.name
300 | and self.type == other.type
301 | and self.parent == other.parent
302 | and self.child == other.child
303 | and _array_eq(self.origin, other.origin)
304 | and _array_eq(self.axis, other.axis)
305 | and self.dynamics == other.dynamics
306 | and self.limit == other.limit
307 | and self.mimic == other.mimic
308 | and self.calibration == other.calibration
309 | and self.safety_controller == other.safety_controller
310 | )
311 |
312 |
313 | @dataclass(eq=False)
314 | class Robot:
315 | name: str
316 | links: List[Link] = field(default_factory=list)
317 | joints: List[Joint] = field(default_factory=list)
318 | materials: List[Material] = field(default_factory=list)
319 | transmission: List[str] = field(default_factory=list)
320 | gazebo: List[str] = field(default_factory=list)
321 |
322 | def __eq__(self, other):
323 | if not isinstance(other, Robot):
324 | return NotImplemented
325 | return (
326 | self.name == other.name
327 | and all(self_link in other.links for self_link in self.links)
328 | and all(other_link in self.links for other_link in other.links)
329 | and all(self_joint in other.joints for self_joint in self.joints)
330 | and all(other_joint in self.joints for other_joint in other.joints)
331 | and all(
332 | self_material in other.materials for self_material in self.materials
333 | )
334 | and all(
335 | other_material in self.materials for other_material in other.materials
336 | )
337 | and all(
338 | self_transmission in other.transmission
339 | for self_transmission in self.transmission
340 | )
341 | and all(
342 | other_transmission in self.transmission
343 | for other_transmission in other.transmission
344 | )
345 | and all(self_gazebo in other.gazebo for self_gazebo in self.gazebo)
346 | and all(other_gazebo in self.gazebo for other_gazebo in other.gazebo)
347 | )
348 |
349 |
350 | class URDFError(Exception):
351 | """General URDF exception."""
352 |
353 | def __init__(self, msg):
354 | super(URDFError, self).__init__()
355 | self.msg = msg
356 |
357 | def __str__(self):
358 | return type(self).__name__ + ": " + self.msg
359 |
360 | def __repr__(self):
361 | return type(self).__name__ + '("' + self.msg + '")'
362 |
363 |
364 | class URDFIncompleteError(URDFError):
365 | """Raised when needed data for an object that isn't there."""
366 |
367 | pass
368 |
369 |
370 | class URDFAttributeValueError(URDFError):
371 | """Raised when attribute value is not contained in the set of allowed values."""
372 |
373 | pass
374 |
375 |
376 | class URDFBrokenRefError(URDFError):
377 | """Raised when a referenced object is not found in the scope."""
378 |
379 | pass
380 |
381 |
382 | class URDFMalformedError(URDFError):
383 | """Raised when data is found to be corrupted in some way."""
384 |
385 | pass
386 |
387 |
388 | class URDFUnsupportedError(URDFError):
389 | """Raised when some unexpectedly unsupported feature is found."""
390 |
391 | pass
392 |
393 |
394 | class URDFSaveValidationError(URDFError):
395 | """Raised when XML validation fails when saving."""
396 |
397 | pass
398 |
399 |
400 | def _str2float(s):
401 | """Cast string to float if it is not None. Otherwise return None.
402 |
403 | Args:
404 | s (str): String to convert or None.
405 |
406 | Returns:
407 | str or NoneType: The converted string or None.
408 | """
409 | return float(s) if s is not None else None
410 |
411 |
412 | def apply_visual_color(
413 | geom: trimesh.Trimesh,
414 | visual: Visual,
415 | material_map: Dict[str, Material],
416 | ) -> None:
417 | """Apply the color of the visual material to the mesh.
418 |
419 | Args:
420 | geom: Trimesh to color.
421 | visual: Visual description from XML.
422 | material_map: Dictionary mapping material names to their definitions.
423 | """
424 | if visual.material is None:
425 | return
426 |
427 | if visual.material.color is not None:
428 | color = visual.material.color
429 | elif visual.material.name is not None and visual.material.name in material_map:
430 | color = material_map[visual.material.name].color
431 | else:
432 | return
433 |
434 | if color is None:
435 | return
436 | if isinstance(geom.visual, trimesh.visual.ColorVisuals):
437 | geom.visual.face_colors[:] = [int(255 * channel) for channel in color.rgba]
438 |
439 |
440 | def filename_handler_null(fname):
441 | """A lazy filename handler that simply returns its input.
442 |
443 | Args:
444 | fname (str): A file name.
445 |
446 | Returns:
447 | str: Same file name.
448 | """
449 | return fname
450 |
451 |
452 | def filename_handler_ignore_directive(fname):
453 | """A filename handler that removes anything before (and including) '://'.
454 |
455 | Args:
456 | fname (str): A file name.
457 |
458 | Returns:
459 | str: The file name without the prefix.
460 | """
461 | if "://" in fname or ":\\\\" in fname:
462 | return ":".join(fname.split(":")[1:])[2:]
463 | return fname
464 |
465 |
466 | def filename_handler_ignore_directive_package(fname):
467 | """A filename handler that removes the 'package://' directive and the package it refers to.
468 | It subsequently calls filename_handler_ignore_directive, i.e., it removes any other directive.
469 |
470 | Args:
471 | fname (str): A file name.
472 |
473 | Returns:
474 | str: The file name without 'package://' and the package name.
475 | """
476 | if fname.startswith("package://"):
477 | string_length = len("package://")
478 | return os.path.join(
479 | *os.path.normpath(fname[string_length:]).split(os.path.sep)[1:]
480 | )
481 | return filename_handler_ignore_directive(fname)
482 |
483 |
484 | def filename_handler_add_prefix(fname, prefix):
485 | """A filename handler that adds a prefix.
486 |
487 | Args:
488 | fname (str): A file name.
489 | prefix (str): A prefix.
490 |
491 | Returns:
492 | str: Prefix plus file name.
493 | """
494 | return prefix + fname
495 |
496 |
497 | def filename_handler_absolute2relative(fname, dir):
498 | """A filename handler that turns an absolute file name into a relative one.
499 |
500 | Args:
501 | fname (str): A file name.
502 | dir (str): A directory.
503 |
504 | Returns:
505 | str: The file name relative to the directory.
506 | """
507 | # TODO: that's not right
508 | if fname.startswith(dir):
509 | return fname[len(dir) :]
510 | return fname
511 |
512 |
513 | def filename_handler_relative(fname, dir):
514 | """A filename handler that joins a file name with a directory.
515 |
516 | Args:
517 | fname (str): A file name.
518 | dir (str): A directory.
519 |
520 | Returns:
521 | str: The directory joined with the file name.
522 | """
523 | return os.path.join(dir, filename_handler_ignore_directive_package(fname))
524 |
525 |
526 | def filename_handler_relative_to_urdf_file(fname, urdf_fname):
527 | return filename_handler_relative(fname, os.path.dirname(urdf_fname))
528 |
529 |
530 | def filename_handler_relative_to_urdf_file_recursive(fname, urdf_fname, level=0):
531 | if level == 0:
532 | return filename_handler_relative_to_urdf_file(fname, urdf_fname)
533 | return filename_handler_relative_to_urdf_file_recursive(
534 | fname, os.path.split(urdf_fname)[0], level=level - 1
535 | )
536 |
537 |
538 | def _create_filename_handlers_to_urdf_file_recursive(urdf_fname):
539 | return [
540 | partial(
541 | filename_handler_relative_to_urdf_file_recursive,
542 | urdf_fname=urdf_fname,
543 | level=i,
544 | )
545 | for i in range(len(os.path.normpath(urdf_fname).split(os.path.sep)))
546 | ]
547 |
548 |
549 | def filename_handler_meta(fname, filename_handlers):
550 | """A filename handler that calls other filename handlers until the resulting file name points to an existing file.
551 |
552 | Args:
553 | fname (str): A file name.
554 | filename_handlers (list(fn)): A list of function pointers to filename handlers.
555 |
556 | Returns:
557 | str: The resolved file name that points to an existing file or the input if none of the files exists.
558 | """
559 | for fn in filename_handlers:
560 | candidate_fname = fn(fname=fname)
561 | _logger.debug(f"Checking filename: {candidate_fname}")
562 | if os.path.isfile(candidate_fname):
563 | return candidate_fname
564 | _logger.warning(f"Unable to resolve filename: {fname}")
565 | return fname
566 |
567 |
568 | def filename_handler_magic(fname, dir):
569 | """A magic filename handler.
570 |
571 | Args:
572 | fname (str): A file name.
573 | dir (str): A directory.
574 |
575 | Returns:
576 | str: The file name that exists or the input if nothing is found.
577 | """
578 | return filename_handler_meta(
579 | fname=fname,
580 | filename_handlers=[
581 | partial(filename_handler_relative, dir=dir),
582 | filename_handler_ignore_directive,
583 | ]
584 | + _create_filename_handlers_to_urdf_file_recursive(urdf_fname=dir),
585 | )
586 |
587 |
588 | def validation_handler_strict(errors):
589 | """A validation handler that does not allow any errors.
590 |
591 | Args:
592 | errors (list[yourdfpy.URDFError]): List of errors.
593 |
594 | Returns:
595 | bool: Whether any errors were found.
596 | """
597 | return len(errors) == 0
598 |
599 |
600 | class URDF:
601 | def __init__(
602 | self,
603 | robot: Robot = None,
604 | build_scene_graph: bool = True,
605 | build_collision_scene_graph: bool = False,
606 | load_meshes: bool = True,
607 | load_collision_meshes: bool = False,
608 | filename_handler=None,
609 | mesh_dir: str = "",
610 | force_mesh: bool = False,
611 | force_collision_mesh: bool = True,
612 | ):
613 | """A URDF model.
614 |
615 | Args:
616 | robot (Robot): The robot model. Defaults to None.
617 | build_scene_graph (bool, optional): Whether to build a scene graph to enable transformation queries and forward kinematics. Defaults to True.
618 | build_collision_scene_graph (bool, optional): Whether to build a scene graph for elements. Defaults to False.
619 | load_meshes (bool, optional): Whether to load the meshes referenced in the elements. Defaults to True.
620 | load_collision_meshes (bool, optional): Whether to load the collision meshes referenced in the elements. Defaults to False.
621 | filename_handler ([type], optional): Any function f(in: str) -> str, that maps filenames in the URDF to actual resources. Can be used to customize treatment of `package://` directives or relative/absolute filenames. Defaults to None.
622 | mesh_dir (str, optional): A root directory used for loading meshes. Defaults to "".
623 | force_mesh (bool, optional): Each loaded geometry will be concatenated into a single one (instead of being turned into a graph; in case the underlying file contains multiple geometries). This might loose texture information but the resulting scene graph will be smaller. Defaults to False.
624 | force_collision_mesh (bool, optional): Same as force_mesh, but for collision scene. Defaults to True.
625 | """
626 | if filename_handler is None:
627 | self._filename_handler = partial(filename_handler_magic, dir=mesh_dir)
628 | else:
629 | self._filename_handler = filename_handler
630 |
631 | self.robot = robot
632 | self._create_maps()
633 | self._update_actuated_joints()
634 |
635 | self._cfg = self.zero_cfg
636 |
637 | if build_scene_graph or build_collision_scene_graph:
638 | self._base_link = self._determine_base_link()
639 | else:
640 | self._base_link = None
641 |
642 | self._errors = []
643 |
644 | if build_scene_graph:
645 | self._scene = self._create_scene(
646 | use_collision_geometry=False,
647 | load_geometry=load_meshes,
648 | force_mesh=force_mesh,
649 | force_single_geometry_per_link=force_mesh,
650 | )
651 | else:
652 | self._scene = None
653 |
654 | if build_collision_scene_graph:
655 | self._scene_collision = self._create_scene(
656 | use_collision_geometry=True,
657 | load_geometry=load_collision_meshes,
658 | force_mesh=force_collision_mesh,
659 | force_single_geometry_per_link=force_collision_mesh,
660 | )
661 | else:
662 | self._scene_collision = None
663 |
664 | @property
665 | def scene(self) -> trimesh.Scene:
666 | """A scene object representing the URDF model.
667 |
668 | Returns:
669 | trimesh.Scene: A trimesh scene object.
670 | """
671 | return self._scene
672 |
673 | @property
674 | def collision_scene(self) -> trimesh.Scene:
675 | """A scene object representing the elements of the URDF model.
676 |
677 | Returns:
678 | trimesh.Scene: A trimesh scene object.
679 | """
680 | return self._scene_collision
681 |
682 | @property
683 | def link_map(self) -> dict:
684 | """A dictionary mapping link names to link objects.
685 |
686 | Returns:
687 | dict: Mapping from link name (str) to Link.
688 | """
689 | return self._link_map
690 |
691 | @property
692 | def joint_map(self) -> dict:
693 | """A dictionary mapping joint names to joint objects.
694 |
695 | Returns:
696 | dict: Mapping from joint name (str) to Joint.
697 | """
698 | return self._joint_map
699 |
700 | @property
701 | def joint_names(self):
702 | """List of joint names.
703 |
704 | Returns:
705 | list[str]: List of joint names of the URDF model.
706 | """
707 | return [j.name for j in self.robot.joints]
708 |
709 | @property
710 | def actuated_joints(self):
711 | """List of actuated joints. This excludes mimic and fixed joints.
712 |
713 | Returns:
714 | list[Joint]: List of actuated joints of the URDF model.
715 | """
716 | return self._actuated_joints
717 |
718 | @property
719 | def actuated_dof_indices(self):
720 | """List of DOF indices per actuated joint. Can be used to reference configuration.
721 |
722 | Returns:
723 | list[list[int]]: List of DOF indices per actuated joint.
724 | """
725 | return self._actuated_dof_indices
726 |
727 | @property
728 | def actuated_joint_indices(self):
729 | """List of indices of all joints that are actuated, i.e., not of type mimic or fixed.
730 |
731 | Returns:
732 | list[int]: List of indices of actuated joints.
733 | """
734 | return self._actuated_joint_indices
735 |
736 | @property
737 | def actuated_joint_names(self):
738 | """List of names of actuated joints. This excludes mimic and fixed joints.
739 |
740 | Returns:
741 | list[str]: List of names of actuated joints of the URDF model.
742 | """
743 | return [j.name for j in self._actuated_joints]
744 |
745 | @property
746 | def num_actuated_joints(self):
747 | """Number of actuated joints.
748 |
749 | Returns:
750 | int: Number of actuated joints.
751 | """
752 | return len(self.actuated_joints)
753 |
754 | @property
755 | def num_dofs(self):
756 | """Number of degrees of freedom of actuated joints. Depending on the type of the joint, the number of DOFs might vary.
757 |
758 | Returns:
759 | int: Degrees of freedom.
760 | """
761 | total_num_dofs = 0
762 | for j in self._actuated_joints:
763 | if j.type in ["revolute", "prismatic", "continuous"]:
764 | total_num_dofs += 1
765 | elif j.type == "floating":
766 | total_num_dofs += 6
767 | elif j.type == "planar":
768 | total_num_dofs += 2
769 | return total_num_dofs
770 |
771 | @property
772 | def zero_cfg(self):
773 | """Return the zero configuration.
774 |
775 | Returns:
776 | np.ndarray: The zero configuration.
777 | """
778 | return np.zeros(self.num_dofs)
779 |
780 | @property
781 | def center_cfg(self):
782 | """Return center configuration of URDF model by using the average of each joint's limits if present, otherwise zero.
783 |
784 | Returns:
785 | (n), float: Default configuration of URDF model.
786 | """
787 | config = []
788 | config_names = []
789 | for j in self._actuated_joints:
790 | if j.type == "revolute" or j.type == "prismatic":
791 | if j.limit is not None:
792 | cfg = [j.limit.lower + 0.5 * (j.limit.upper - j.limit.lower)]
793 | else:
794 | cfg = [0.0]
795 | elif j.type == "continuous":
796 | cfg = [0.0]
797 | elif j.type == "floating":
798 | cfg = [0.0] * 6
799 | elif j.type == "planar":
800 | cfg = [0.0] * 2
801 |
802 | config.append(cfg)
803 | config_names.append(j.name)
804 |
805 | for i, j in enumerate(self.robot.joints):
806 | if j.mimic is not None:
807 | index = config_names.index(j.mimic.joint)
808 | config[i][0] = config[index][0] * j.mimic.multiplier + j.mimic.offset
809 |
810 | if len(config) == 0:
811 | return np.array([], dtype=np.float64)
812 | return np.concatenate(config)
813 |
814 | @property
815 | def cfg(self):
816 | """Current configuration.
817 |
818 | Returns:
819 | np.ndarray: Current configuration of URDF model.
820 | """
821 | return self._cfg
822 |
823 | @property
824 | def base_link(self):
825 | """Name of URDF base/root link.
826 |
827 | Returns:
828 | str: Name of base link of URDF model.
829 | """
830 | return self._base_link
831 |
832 | @property
833 | def errors(self) -> list:
834 | """A list with validation errors.
835 |
836 | Returns:
837 | list: A list of validation errors.
838 | """
839 | return self._errors
840 |
841 | def clear_errors(self):
842 | """Clear the validation error log."""
843 | self._errors = []
844 |
845 | def show(self, collision_geometry=False, callback=None):
846 | """Open a simpler viewer displaying the URDF model.
847 |
848 | Args:
849 | collision_geometry (bool, optional): Whether to display the or elements. Defaults to False.
850 | """
851 | if collision_geometry:
852 | if self._scene_collision is None:
853 | raise ValueError(
854 | "No collision scene available. Use build_collision_scene_graph=True and load_collision_meshes=True during loading."
855 | )
856 | else:
857 | self._scene_collision.show(callback=callback)
858 | else:
859 | if self._scene is None:
860 | raise ValueError(
861 | "No scene available. Use build_scene_graph=True and load_meshes=True during loading."
862 | )
863 | elif len(self._scene.bounds_corners) < 1:
864 | raise ValueError(
865 | "Scene is empty, maybe meshes failed to load? Use build_scene_graph=True and load_meshes=True during loading."
866 | )
867 | else:
868 | self._scene.show(callback=callback)
869 |
870 | def validate(self, validation_fn=None) -> bool:
871 | """Validate URDF model.
872 |
873 | Args:
874 | validation_fn (function, optional): A function f(list[yourdfpy.URDFError]) -> bool. None uses the strict handler (any error leads to False). Defaults to None.
875 |
876 | Returns:
877 | bool: Whether the model is valid.
878 | """
879 | self._errors = []
880 | self._validate_robot(self.robot)
881 |
882 | if validation_fn is None:
883 | validation_fn = validation_handler_strict
884 |
885 | return validation_fn(self._errors)
886 |
887 | def _create_maps(self):
888 | self._material_map = {}
889 | for m in self.robot.materials:
890 | self._material_map[m.name] = m
891 |
892 | self._joint_map = {}
893 | for j in self.robot.joints:
894 | self._joint_map[j.name] = j
895 |
896 | self._link_map = {}
897 | for l in self.robot.links:
898 | self._link_map[l.name] = l
899 |
900 | def _update_actuated_joints(self):
901 | self._actuated_joints = []
902 | self._actuated_joint_indices = []
903 | self._actuated_dof_indices = []
904 |
905 | dof_indices_cnt = 0
906 | for i, j in enumerate(self.robot.joints):
907 | if j.mimic is None and j.type != "fixed":
908 | self._actuated_joints.append(j)
909 | self._actuated_joint_indices.append(i)
910 |
911 | if j.type in ["prismatic", "revolute", "continuous"]:
912 | self._actuated_dof_indices.append([dof_indices_cnt])
913 | dof_indices_cnt += 1
914 | elif j.type == "floating":
915 | self._actuated_dof_indices.append(
916 | [dof_indices_cnt, dof_indices_cnt + 1, dof_indices_cnt + 2]
917 | )
918 | dof_indices_cnt += 3
919 | elif j.type == "planar":
920 | self._actuated_dof_indices.append(
921 | [dof_indices_cnt, dof_indices_cnt + 1]
922 | )
923 | dof_indices_cnt += 2
924 |
925 | def _validate_required_attribute(self, attribute, error_msg, allowed_values=None):
926 | if attribute is None:
927 | self._errors.append(URDFIncompleteError(error_msg))
928 | elif isinstance(attribute, str) and len(attribute) == 0:
929 | self._errors.append(URDFIncompleteError(error_msg))
930 |
931 | if allowed_values is not None and attribute is not None:
932 | if attribute not in allowed_values:
933 | self._errors.append(URDFAttributeValueError(error_msg))
934 |
935 | @staticmethod
936 | def load(fname_or_file, **kwargs):
937 | """Load URDF file from filename or file object.
938 |
939 | Args:
940 | fname_or_file (str or file object): A filename or file object, file-like object, stream representing the URDF file.
941 | **build_scene_graph (bool, optional): Whether to build a scene graph to enable transformation queries and forward kinematics. Defaults to True.
942 | **build_collision_scene_graph (bool, optional): Whether to build a scene graph for elements. Defaults to False.
943 | **load_meshes (bool, optional): Whether to load the meshes referenced in the elements. Defaults to True.
944 | **load_collision_meshes (bool, optional): Whether to load the collision meshes referenced in the elements. Defaults to False.
945 | **filename_handler ([type], optional): Any function f(in: str) -> str, that maps filenames in the URDF to actual resources. Can be used to customize treatment of `package://` directives or relative/absolute filenames. Defaults to None.
946 | **mesh_dir (str, optional): A root directory used for loading meshes. Defaults to "".
947 | **force_mesh (bool, optional): Each loaded geometry will be concatenated into a single one (instead of being turned into a graph; in case the underlying file contains multiple geometries). This might loose texture information but the resulting scene graph will be smaller. Defaults to False.
948 | **force_collision_mesh (bool, optional): Same as force_mesh, but for collision scene. Defaults to True.
949 |
950 | Raises:
951 | ValueError: If filename does not exist.
952 |
953 | Returns:
954 | yourdfpy.URDF: URDF model.
955 | """
956 | if isinstance(fname_or_file, six.string_types):
957 | if not os.path.isfile(fname_or_file):
958 | raise ValueError("{} is not a file".format(fname_or_file))
959 |
960 | if not "mesh_dir" in kwargs:
961 | kwargs["mesh_dir"] = os.path.dirname(fname_or_file)
962 |
963 | try:
964 | parser = etree.XMLParser(remove_blank_text=True)
965 | tree = etree.parse(fname_or_file, parser=parser)
966 | xml_root = tree.getroot()
967 | except Exception as e:
968 | _logger.error(e)
969 | _logger.error("Using different parsing approach.")
970 |
971 | events = ("start", "end", "start-ns", "end-ns")
972 | xml = etree.iterparse(fname_or_file, recover=True, events=events)
973 |
974 | # Iterate through all XML elements
975 | for action, elem in xml:
976 | # Skip comments and processing instructions,
977 | # because they do not have names
978 | if not (
979 | isinstance(elem, etree._Comment)
980 | or isinstance(elem, etree._ProcessingInstruction)
981 | ):
982 | # Remove a namespace URI in the element's name
983 | # elem.tag = etree.QName(elem).localname
984 | if action == "end" and ":" in elem.tag:
985 | elem.getparent().remove(elem)
986 |
987 | xml_root = xml.root
988 |
989 | # Remove comments
990 | etree.strip_tags(xml_root, etree.Comment)
991 | etree.cleanup_namespaces(xml_root)
992 |
993 | return URDF(robot=URDF._parse_robot(xml_element=xml_root), **kwargs)
994 |
995 | def contains(self, key, value, element=None) -> bool:
996 | """Checks recursively whether the URDF tree contains the provided key-value pair.
997 |
998 | Args:
999 | key (str): A key.
1000 | value (str): A value.
1001 | element (etree.Element, optional): The XML element from which to start the recursive search. None means URDF root. Defaults to None.
1002 |
1003 | Returns:
1004 | bool: Whether the key-value pair was found.
1005 | """
1006 | if element is None:
1007 | element = self.robot
1008 |
1009 | result = False
1010 | for field in element.__dataclass_fields__:
1011 | field_value = getattr(element, field)
1012 | if is_dataclass(field_value):
1013 | result = result or self.contains(
1014 | key=key, value=value, element=field_value
1015 | )
1016 | elif (
1017 | isinstance(field_value, list)
1018 | and len(field_value) > 0
1019 | and is_dataclass(field_value[0])
1020 | ):
1021 | for field_value_element in field_value:
1022 | result = result or self.contains(
1023 | key=key, value=value, element=field_value_element
1024 | )
1025 | else:
1026 | if key == field and value == field_value:
1027 | result = True
1028 | return result
1029 |
1030 | def _determine_base_link(self):
1031 | """Get the base link of the URDF tree by extracting all links without parents.
1032 | In case multiple links could be root, choose the first.
1033 |
1034 | Returns:
1035 | str: Name of the base link.
1036 | """
1037 | link_names = [l.name for l in self.robot.links]
1038 |
1039 | for j in self.robot.joints:
1040 | link_names.remove(j.child)
1041 |
1042 | if len(link_names) == 0:
1043 | # raise Error?
1044 | return None
1045 |
1046 | return link_names[0]
1047 |
1048 | def _forward_kinematics_joint(self, joint, q=None):
1049 | origin = np.eye(4) if joint.origin is None else joint.origin
1050 |
1051 | if joint.mimic is not None:
1052 | if joint.mimic.joint in self.actuated_joint_names:
1053 | mimic_joint_index = self.actuated_joint_names.index(joint.mimic.joint)
1054 | q = (
1055 | self._cfg[mimic_joint_index] * joint.mimic.multiplier
1056 | + joint.mimic.offset
1057 | )
1058 | else:
1059 | _logger.warning(
1060 | f"Joint '{joint.name}' is supposed to mimic '{joint.mimic.joint}'. But this joint is not actuated - will assume (0.0 + offset)."
1061 | )
1062 | q = 0.0 + joint.mimic.offset
1063 |
1064 | if joint.type in ["revolute", "prismatic", "continuous"]:
1065 | if q is None:
1066 | # Use internal cfg vector for forward kinematics
1067 | q = self.cfg[
1068 | self.actuated_dof_indices[
1069 | self.actuated_joint_names.index(joint.name)
1070 | ]
1071 | ]
1072 |
1073 | if joint.type == "prismatic":
1074 | matrix = origin @ tra.translation_matrix(q * joint.axis)
1075 | else:
1076 | matrix = origin @ tra.rotation_matrix(float(q), joint.axis)
1077 | else:
1078 | # this includes: floating, planar, fixed
1079 | matrix = origin
1080 |
1081 | return matrix, q
1082 |
1083 | def update_cfg(self, configuration):
1084 | """Update joint configuration of URDF; does forward kinematics.
1085 |
1086 | Args:
1087 | configuration (dict, list[float], tuple[float] or np.ndarray): A mapping from joints or joint names to configuration values, or a list containing a value for each actuated joint.
1088 |
1089 | Raises:
1090 | ValueError: Raised if dimensionality of configuration does not match number of actuated joints of URDF model.
1091 | TypeError: Raised if configuration is neither a dict, list, tuple or np.ndarray.
1092 | """
1093 | joint_cfg = []
1094 |
1095 | if isinstance(configuration, dict):
1096 | for joint in configuration:
1097 | if isinstance(joint, six.string_types):
1098 | joint_cfg.append((self._joint_map[joint], configuration[joint]))
1099 | elif isinstance(joint, Joint):
1100 | # TODO: Joint is not hashable; so this branch will not succeed
1101 | joint_cfg.append((joint, configuration[joint]))
1102 | elif isinstance(configuration, (list, tuple, np.ndarray)):
1103 | if len(configuration) == len(self.robot.joints):
1104 | for joint, value in zip(self.robot.joints, configuration):
1105 | joint_cfg.append((joint, value))
1106 | elif len(configuration) == self.num_actuated_joints:
1107 | for joint, value in zip(self._actuated_joints, configuration):
1108 | joint_cfg.append((joint, value))
1109 | else:
1110 | raise ValueError(
1111 | f"Dimensionality of configuration ({len(configuration)}) doesn't match number of all ({len(self.robot.joints)}) or actuated joints ({self.num_actuated_joints})."
1112 | )
1113 | else:
1114 | raise TypeError("Invalid type for configuration")
1115 |
1116 | # append all mimic joints in the update
1117 | for j, q in joint_cfg + [
1118 | (j, 0.0) for j in self.robot.joints if j.mimic is not None
1119 | ]:
1120 | matrix, joint_q = self._forward_kinematics_joint(j, q=q)
1121 |
1122 | # update internal configuration vector - only consider actuated joints
1123 | if j.name in self.actuated_joint_names:
1124 | self._cfg[
1125 | self.actuated_dof_indices[self.actuated_joint_names.index(j.name)]
1126 | ] = joint_q
1127 |
1128 | if self._scene is not None:
1129 | self._scene.graph.update(
1130 | frame_from=j.parent, frame_to=j.child, matrix=matrix
1131 | )
1132 | if self._scene_collision is not None:
1133 | self._scene_collision.graph.update(
1134 | frame_from=j.parent, frame_to=j.child, matrix=matrix
1135 | )
1136 |
1137 | def get_transform(self, frame_to, frame_from=None, collision_geometry=False):
1138 | """Get the transform from one frame to another.
1139 |
1140 | Args:
1141 | frame_to (str): Node name.
1142 | frame_from (str, optional): Node name. If None it will be set to self.base_frame. Defaults to None.
1143 | collision_geometry (bool, optional): Whether to use the collision geometry scene graph (instead of the visual geometry). Defaults to False.
1144 |
1145 | Raises:
1146 | ValueError: Raised if scene graph wasn't constructed during intialization.
1147 |
1148 | Returns:
1149 | (4, 4) float: Homogeneous transformation matrix
1150 | """
1151 | if collision_geometry:
1152 | if self._scene_collision is None:
1153 | raise ValueError(
1154 | "No collision scene available. Use build_collision_scene_graph=True during loading."
1155 | )
1156 | else:
1157 | return self._scene_collision.graph.get(
1158 | frame_to=frame_to, frame_from=frame_from
1159 | )[0]
1160 | else:
1161 | if self._scene is None:
1162 | raise ValueError(
1163 | "No scene available. Use build_scene_graph=True during loading."
1164 | )
1165 | else:
1166 | return self._scene.graph.get(frame_to=frame_to, frame_from=frame_from)[
1167 | 0
1168 | ]
1169 |
1170 | def _link_mesh(self, link, collision_geometry=True):
1171 | geometries = link.collisions if collision_geometry else link.visuals
1172 |
1173 | if len(geometries) == 0:
1174 | return None
1175 |
1176 | meshes = []
1177 | for g in geometries:
1178 | for m in g.geometry.meshes:
1179 | m = m.copy()
1180 | pose = g.origin
1181 | if g.geometry.mesh is not None:
1182 | if g.geometry.mesh.scale is not None:
1183 | S = np.eye(4)
1184 | S[:3, :3] = np.diag(g.geometry.mesh.scale)
1185 | pose = pose.dot(S)
1186 | m.apply_transform(pose)
1187 | meshes.append(m)
1188 | if len(meshes) == 0:
1189 | return None
1190 | self._collision_mesh = meshes[0] + meshes[1:]
1191 | return self._collision_mesh
1192 |
1193 | def _geometry2trimeshscene(self, geometry, load_file, force_mesh, skip_materials):
1194 | new_s = None
1195 | if geometry.box is not None:
1196 | new_s = trimesh.primitives.Box(extents=geometry.box.size).scene()
1197 | elif geometry.sphere is not None:
1198 | new_s = trimesh.primitives.Sphere(radius=geometry.sphere.radius).scene()
1199 | elif geometry.cylinder is not None:
1200 | new_s = trimesh.primitives.Cylinder(
1201 | radius=geometry.cylinder.radius, height=geometry.cylinder.length
1202 | ).scene()
1203 | elif geometry.mesh is not None and load_file:
1204 | new_filename = self._filename_handler(fname=geometry.mesh.filename)
1205 |
1206 | if os.path.isfile(new_filename):
1207 | _logger.debug(f"Loading {geometry.mesh.filename} as {new_filename}")
1208 |
1209 | if force_mesh:
1210 | new_g = trimesh.load(
1211 | new_filename,
1212 | ignore_broken=True,
1213 | force="mesh",
1214 | skip_materials=skip_materials,
1215 | )
1216 |
1217 | # add original filename
1218 | if "file_path" not in new_g.metadata:
1219 | new_g.metadata["file_path"] = os.path.abspath(new_filename)
1220 | new_g.metadata["file_name"] = os.path.basename(new_filename)
1221 |
1222 | new_s = trimesh.Scene()
1223 | new_s.add_geometry(new_g)
1224 | else:
1225 | new_s = trimesh.load(
1226 | new_filename,
1227 | ignore_broken=True,
1228 | force="scene",
1229 | skip_materials=skip_materials,
1230 | )
1231 |
1232 | if "file_path" in new_s.metadata:
1233 | for i, (_, geom) in enumerate(new_s.geometry.items()):
1234 | if "file_path" not in geom.metadata:
1235 | geom.metadata["file_path"] = new_s.metadata["file_path"]
1236 | geom.metadata["file_name"] = new_s.metadata["file_name"]
1237 | geom.metadata["file_element"] = i
1238 |
1239 | # scale mesh appropriately
1240 | if geometry.mesh.scale is not None:
1241 | if isinstance(geometry.mesh.scale, float):
1242 | new_s = new_s.scaled(geometry.mesh.scale)
1243 | elif isinstance(geometry.mesh.scale, np.ndarray):
1244 | new_s = new_s.scaled(geometry.mesh.scale)
1245 | else:
1246 | _logger.warning(
1247 | f"Warning: Can't interpret scale '{geometry.mesh.scale}'"
1248 | )
1249 | else:
1250 | _logger.warning(f"Can't find {new_filename}")
1251 | return new_s
1252 |
1253 | def _add_geometries_to_scene(
1254 | self,
1255 | s,
1256 | geometries,
1257 | link_name,
1258 | load_geometry,
1259 | force_mesh,
1260 | force_single_geometry,
1261 | skip_materials,
1262 | ):
1263 | if force_single_geometry:
1264 | tmp_scene = trimesh.Scene(base_frame=link_name)
1265 |
1266 | first_geom_name = None
1267 |
1268 | for v in geometries:
1269 | if v.geometry is not None:
1270 | if first_geom_name is None:
1271 | first_geom_name = v.name
1272 |
1273 | new_s = self._geometry2trimeshscene(
1274 | geometry=v.geometry,
1275 | load_file=load_geometry,
1276 | force_mesh=force_mesh,
1277 | skip_materials=skip_materials,
1278 | )
1279 | if new_s is not None:
1280 | origin = v.origin if v.origin is not None else np.eye(4)
1281 |
1282 | if force_single_geometry:
1283 | for name in new_s.graph.nodes_geometry:
1284 | T, geom_name = new_s.graph.get(name)
1285 | geom = new_s.geometry[geom_name]
1286 |
1287 | if isinstance(v, Visual):
1288 | apply_visual_color(geom, v, self._material_map)
1289 | tmp_scene.add_geometry(
1290 | geometry=geom,
1291 | geom_name=v.name,
1292 | parent_node_name=link_name,
1293 | transform=origin @ T,
1294 | )
1295 | else:
1296 | for name in new_s.graph.nodes_geometry:
1297 | T, geom_name = new_s.graph.get(name)
1298 | geom = new_s.geometry[geom_name]
1299 |
1300 | if isinstance(v, Visual):
1301 | apply_visual_color(geom, v, self._material_map)
1302 | s.add_geometry(
1303 | geometry=geom,
1304 | geom_name=v.name,
1305 | parent_node_name=link_name,
1306 | transform=origin @ T,
1307 | )
1308 |
1309 | if force_single_geometry and len(tmp_scene.geometry) > 0:
1310 | s.add_geometry(
1311 | geometry=tmp_scene.dump(concatenate=True),
1312 | geom_name=first_geom_name,
1313 | parent_node_name=link_name,
1314 | transform=np.eye(4),
1315 | )
1316 |
1317 | def _create_scene(
1318 | self,
1319 | use_collision_geometry=False,
1320 | load_geometry=True,
1321 | force_mesh=False,
1322 | force_single_geometry_per_link=False,
1323 | ):
1324 | s = trimesh.scene.Scene(base_frame=self._base_link)
1325 |
1326 | for j in self.robot.joints:
1327 | matrix, _ = self._forward_kinematics_joint(j)
1328 |
1329 | s.graph.update(frame_from=j.parent, frame_to=j.child, matrix=matrix)
1330 |
1331 | for l in self.robot.links:
1332 | if l.name not in s.graph.nodes and l.name != s.graph.base_frame:
1333 | _logger.warning(
1334 | f"{l.name} not connected via joints. Will add link to base frame."
1335 | )
1336 | s.graph.update(frame_from=s.graph.base_frame, frame_to=l.name)
1337 |
1338 | meshes = l.collisions if use_collision_geometry else l.visuals
1339 | self._add_geometries_to_scene(
1340 | s,
1341 | geometries=meshes,
1342 | link_name=l.name,
1343 | load_geometry=load_geometry,
1344 | force_mesh=force_mesh,
1345 | force_single_geometry=force_single_geometry_per_link,
1346 | skip_materials=use_collision_geometry,
1347 | )
1348 |
1349 | return s
1350 |
1351 | def _successors(self, node):
1352 | """
1353 | Get all nodes of the scene that succeed a specified node.
1354 |
1355 | Parameters
1356 | ------------
1357 | node : any
1358 | Hashable key in `scene.graph`
1359 |
1360 | Returns
1361 | -----------
1362 | subnodes : set[str]
1363 | Set of nodes.
1364 | """
1365 | # get every node that is a successor to specified node
1366 | # this includes `node`
1367 | return self._scene.graph.transforms.successors(node)
1368 |
1369 | def _create_subrobot(self, robot_name, root_link_name):
1370 | subrobot = Robot(name=robot_name)
1371 | subnodes = self._successors(node=root_link_name)
1372 |
1373 | if len(subnodes) > 0:
1374 | for node in subnodes:
1375 | if node in self.link_map:
1376 | subrobot.links.append(copy.deepcopy(self.link_map[node]))
1377 | for joint_name, joint in self.joint_map.items():
1378 | if joint.parent in subnodes and joint.child in subnodes:
1379 | subrobot.joints.append(copy.deepcopy(self.joint_map[joint_name]))
1380 |
1381 | return subrobot
1382 |
1383 | def split_along_joints(self, joint_type="floating", **kwargs):
1384 | """Split URDF model along a particular joint type.
1385 | The result is a set of URDF models which together compose the original URDF.
1386 |
1387 | Args:
1388 | joint_type (str, or list[str], optional): Type of joint to use for splitting. Defaults to "floating".
1389 | **kwargs: Arguments delegated to URDF constructor of new URDF models.
1390 |
1391 | Returns:
1392 | list[(np.ndarray, yourdfpy.URDF)]: A list of tuples (np.ndarray, yourdfpy.URDF) where each homogeneous 4x4 matrix describes the root transformation of the respective URDF model w.r.t. the original URDF.
1393 | """
1394 | root_urdf = URDF(
1395 | robot=copy.deepcopy(self.robot), build_scene_graph=False, load_meshes=False
1396 | )
1397 | result = []
1398 |
1399 | joint_types = joint_type if isinstance(joint_type, list) else [joint_type]
1400 |
1401 | # find all relevant joints
1402 | joint_names = [j.name for j in self.robot.joints if j.type in joint_types]
1403 | for joint_name in joint_names:
1404 | root_link = self.link_map[self.joint_map[joint_name].child]
1405 | new_robot = self._create_subrobot(
1406 | robot_name=root_link.name,
1407 | root_link_name=root_link.name,
1408 | )
1409 |
1410 | result.append(
1411 | (
1412 | self._scene.graph.get(root_link.name)[0],
1413 | URDF(robot=new_robot, **kwargs),
1414 | )
1415 | )
1416 |
1417 | # remove links and joints from root robot
1418 | for j in new_robot.joints:
1419 | root_urdf.robot.joints.remove(root_urdf.joint_map[j.name])
1420 | for l in new_robot.links:
1421 | root_urdf.robot.links.remove(root_urdf.link_map[l.name])
1422 |
1423 | # remove joint that connects root urdf to root_link
1424 | if root_link.name in [j.child for j in root_urdf.robot.joints]:
1425 | root_urdf.robot.joints.remove(
1426 | root_urdf.robot.joints[
1427 | [j.child for j in root_urdf.robot.joints].index(root_link.name)
1428 | ]
1429 | )
1430 |
1431 | result.insert(0, (np.eye(4), URDF(robot=root_urdf.robot, **kwargs)))
1432 |
1433 | return result
1434 |
1435 | def validate_filenames(self):
1436 | for l in self.robot.links:
1437 | meshes = [
1438 | m.geometry.mesh
1439 | for m in l.collisions + l.visuals
1440 | if m.geometry.mesh is not None
1441 | ]
1442 | for m in meshes:
1443 | _logger.debug(m.filename, "-->", self._filename_handler(m.filename))
1444 | if not os.path.isfile(self._filename_handler(m.filename)):
1445 | return False
1446 | return True
1447 |
1448 | def write_xml(self):
1449 | """Write URDF model to an XML element hierarchy.
1450 |
1451 | Returns:
1452 | etree.ElementTree: XML data.
1453 | """
1454 | xml_element = self._write_robot(self.robot)
1455 | return etree.ElementTree(xml_element)
1456 |
1457 | def write_xml_string(self, **kwargs):
1458 | """Write URDF model to a string.
1459 |
1460 | Returns:
1461 | str: String of the xml representation of the URDF model.
1462 | """
1463 | xml_element = self.write_xml()
1464 | return etree.tostring(xml_element, xml_declaration=True, *kwargs)
1465 |
1466 | def write_xml_file(self, fname):
1467 | """Write URDF model to an xml file.
1468 |
1469 | Args:
1470 | fname (str): Filename of the file to be written. Usually ends in `.urdf`.
1471 | """
1472 | xml_element = self.write_xml()
1473 | xml_element.write(fname, xml_declaration=True, pretty_print=True)
1474 |
1475 | def _parse_mimic(xml_element):
1476 | if xml_element is None:
1477 | return None
1478 |
1479 | return Mimic(
1480 | joint=xml_element.get("joint"),
1481 | multiplier=_str2float(xml_element.get("multiplier", 1.0)),
1482 | offset=_str2float(xml_element.get("offset", 0.0)),
1483 | )
1484 |
1485 | def _write_mimic(self, xml_parent, mimic):
1486 | etree.SubElement(
1487 | xml_parent,
1488 | "mimic",
1489 | attrib={
1490 | "joint": mimic.joint,
1491 | "multiplier": str(mimic.multiplier),
1492 | "offset": str(mimic.offset),
1493 | },
1494 | )
1495 |
1496 | def _parse_safety_controller(xml_element):
1497 | if xml_element is None:
1498 | return None
1499 |
1500 | return SafetyController(
1501 | soft_lower_limit=_str2float(xml_element.get("soft_lower_limit")),
1502 | soft_upper_limit=_str2float(xml_element.get("soft_upper_limit")),
1503 | k_position=_str2float(xml_element.get("k_position")),
1504 | k_velocity=_str2float(xml_element.get("k_velocity")),
1505 | )
1506 |
1507 | def _write_safety_controller(self, xml_parent, safety_controller):
1508 | etree.SubElement(
1509 | xml_parent,
1510 | "safety_controller",
1511 | attrib={
1512 | "soft_lower_limit": str(safety_controller.soft_lower_limit),
1513 | "soft_upper_limit": str(safety_controller.soft_upper_limit),
1514 | "k_position": str(safety_controller.k_position),
1515 | "k_velocity": str(safety_controller.k_velocity),
1516 | },
1517 | )
1518 |
1519 | def _parse_transmission_joint(xml_element):
1520 | if xml_element is None:
1521 | return None
1522 |
1523 | transmission_joint = TransmissionJoint(name=xml_element.get("name"))
1524 |
1525 | for h in xml_element.findall("hardware_interface"):
1526 | transmission_joint.hardware_interfaces.append(h.text)
1527 |
1528 | return transmission_joint
1529 |
1530 | def _write_transmission_joint(self, xml_parent, transmission_joint):
1531 | xml_element = etree.SubElement(
1532 | xml_parent,
1533 | "joint",
1534 | attrib={
1535 | "name": str(transmission_joint.name),
1536 | },
1537 | )
1538 | for h in transmission_joint.hardware_interfaces:
1539 | tmp = etree.SubElement(
1540 | xml_element,
1541 | "hardwareInterface",
1542 | )
1543 | tmp.text = h
1544 |
1545 | def _parse_actuator(xml_element):
1546 | if xml_element is None:
1547 | return None
1548 |
1549 | actuator = Actuator(name=xml_element.get("name"))
1550 | if xml_element.find("mechanicalReduction"):
1551 | actuator.mechanical_reduction = float(
1552 | xml_element.find("mechanicalReduction").text
1553 | )
1554 |
1555 | for h in xml_element.findall("hardwareInterface"):
1556 | actuator.hardware_interfaces.append(h.text)
1557 |
1558 | return actuator
1559 |
1560 | def _write_actuator(self, xml_parent, actuator):
1561 | xml_element = etree.SubElement(
1562 | xml_parent,
1563 | "actuator",
1564 | attrib={
1565 | "name": str(actuator.name),
1566 | },
1567 | )
1568 | if actuator.mechanical_reduction is not None:
1569 | tmp = etree.SubElement("mechanicalReduction")
1570 | tmp.text = str(actuator.mechanical_reduction)
1571 |
1572 | for h in actuator.hardware_interfaces:
1573 | tmp = etree.SubElement(
1574 | xml_element,
1575 | "hardwareInterface",
1576 | )
1577 | tmp.text = h
1578 |
1579 | def _parse_transmission(xml_element):
1580 | if xml_element is None:
1581 | return None
1582 |
1583 | transmission = Transmission(name=xml_element.get("name"))
1584 |
1585 | for j in xml_element.findall("joint"):
1586 | transmission.joints.append(URDF._parse_transmission_joint(j))
1587 | for a in xml_element.findall("actuator"):
1588 | transmission.actuators.append(URDF._parse_actuator(a))
1589 |
1590 | return transmission
1591 |
1592 | def _write_transmission(self, xml_parent, transmission):
1593 | xml_element = etree.SubElement(
1594 | xml_parent,
1595 | "transmission",
1596 | attrib={
1597 | "name": str(transmission.name),
1598 | },
1599 | )
1600 |
1601 | for j in transmission.joints:
1602 | self._write_transmission_joint(xml_element, j)
1603 |
1604 | for a in transmission.actuators:
1605 | self._write_actuator(xml_element, a)
1606 |
1607 | def _parse_calibration(xml_element):
1608 | if xml_element is None:
1609 | return None
1610 |
1611 | return Calibration(
1612 | rising=_str2float(xml_element.get("rising")),
1613 | falling=_str2float(xml_element.get("falling")),
1614 | )
1615 |
1616 | def _write_calibration(self, xml_parent, calibration):
1617 | etree.SubElement(
1618 | xml_parent,
1619 | "calibration",
1620 | attrib={
1621 | "rising": str(calibration.rising),
1622 | "falling": str(calibration.falling),
1623 | },
1624 | )
1625 |
1626 | def _parse_box(xml_element):
1627 | # In case the element uses comma as a separator
1628 | size = xml_element.attrib["size"].replace(',', ' ').split()
1629 | return Box(size=np.array(size, dtype=np.float64))
1630 |
1631 | def _write_box(self, xml_parent, box):
1632 | etree.SubElement(
1633 | xml_parent, "box", attrib={"size": " ".join(map(str, box.size))}
1634 | )
1635 |
1636 | def _parse_cylinder(xml_element):
1637 | return Cylinder(
1638 | radius=float(xml_element.attrib["radius"]),
1639 | length=float(xml_element.attrib["length"]),
1640 | )
1641 |
1642 | def _write_cylinder(self, xml_parent, cylinder):
1643 | etree.SubElement(
1644 | xml_parent,
1645 | "cylinder",
1646 | attrib={"radius": str(cylinder.radius), "length": str(cylinder.length)},
1647 | )
1648 |
1649 | def _parse_sphere(xml_element):
1650 | return Sphere(radius=float(xml_element.attrib["radius"]))
1651 |
1652 | def _write_sphere(self, xml_parent, sphere):
1653 | etree.SubElement(xml_parent, "sphere", attrib={"radius": str(sphere.radius)})
1654 |
1655 | def _parse_scale(xml_element):
1656 | if "scale" in xml_element.attrib:
1657 | # In case the element uses comma as a separator
1658 | s = xml_element.get("scale").replace(',', ' ').split()
1659 | if len(s) == 0:
1660 | return None
1661 | elif len(s) == 1:
1662 | return float(s[0])
1663 | else:
1664 | return np.array(list(map(float, s)))
1665 | return None
1666 |
1667 | def _write_scale(self, xml_parent, scale):
1668 | if scale is not None:
1669 | if isinstance(scale, float) or isinstance(scale, int):
1670 | xml_parent.set("scale", " ".join([str(scale)] * 3))
1671 | else:
1672 | xml_parent.set("scale", " ".join(map(str, scale)))
1673 |
1674 | def _parse_mesh(xml_element):
1675 | return Mesh(
1676 | filename=xml_element.get("filename"), scale=URDF._parse_scale(xml_element)
1677 | )
1678 |
1679 | def _write_mesh(self, xml_parent, mesh):
1680 | # TODO: turn into different filename handler
1681 | xml_element = etree.SubElement(
1682 | xml_parent,
1683 | "mesh",
1684 | attrib={"filename": self._filename_handler(mesh.filename)},
1685 | )
1686 |
1687 | self._write_scale(xml_element, mesh.scale)
1688 |
1689 | def _parse_geometry(xml_element):
1690 | geometry = Geometry()
1691 | if xml_element[0].tag == "box":
1692 | geometry.box = URDF._parse_box(xml_element[0])
1693 | elif xml_element[0].tag == "cylinder":
1694 | geometry.cylinder = URDF._parse_cylinder(xml_element[0])
1695 | elif xml_element[0].tag == "sphere":
1696 | geometry.sphere = URDF._parse_sphere(xml_element[0])
1697 | elif xml_element[0].tag == "mesh":
1698 | geometry.mesh = URDF._parse_mesh(xml_element[0])
1699 | else:
1700 | raise ValueError(f"Unknown tag: {xml_element[0].tag}")
1701 |
1702 | return geometry
1703 |
1704 | def _validate_geometry(self, geometry):
1705 | if geometry is None:
1706 | self._errors.append(URDFIncompleteError(" is missing."))
1707 |
1708 | num_nones = sum(
1709 | [
1710 | x is not None
1711 | for x in [
1712 | geometry.box,
1713 | geometry.cylinder,
1714 | geometry.sphere,
1715 | geometry.mesh,
1716 | ]
1717 | ]
1718 | )
1719 | if num_nones < 1:
1720 | self._errors.append(
1721 | URDFIncompleteError(
1722 | "One of , , , needs to be defined as a child of ."
1723 | )
1724 | )
1725 | elif num_nones > 1:
1726 | self._errors.append(
1727 | URDFError(
1728 | "Too many of , , , defined as a child of . Only one allowed."
1729 | )
1730 | )
1731 |
1732 | def _write_geometry(self, xml_parent, geometry):
1733 | if geometry is None:
1734 | return
1735 |
1736 | xml_element = etree.SubElement(xml_parent, "geometry")
1737 | if geometry.box is not None:
1738 | self._write_box(xml_element, geometry.box)
1739 | elif geometry.cylinder is not None:
1740 | self._write_cylinder(xml_element, geometry.cylinder)
1741 | elif geometry.sphere is not None:
1742 | self._write_sphere(xml_element, geometry.sphere)
1743 | elif geometry.mesh is not None:
1744 | self._write_mesh(xml_element, geometry.mesh)
1745 |
1746 | def _parse_origin(xml_element):
1747 | if xml_element is None:
1748 | return None
1749 |
1750 | xyz = xml_element.get("xyz", default="0 0 0")
1751 | rpy = xml_element.get("rpy", default="0 0 0")
1752 |
1753 | return tra.compose_matrix(
1754 | translate=np.array(list(map(float, xyz.split()))),
1755 | angles=np.array(list(map(float, rpy.split()))),
1756 | )
1757 |
1758 | def _write_origin(self, xml_parent, origin):
1759 | if origin is None:
1760 | return
1761 |
1762 | etree.SubElement(
1763 | xml_parent,
1764 | "origin",
1765 | attrib={
1766 | "xyz": " ".join(map(str, tra.translation_from_matrix(origin))),
1767 | "rpy": " ".join(map(str, tra.euler_from_matrix(origin))),
1768 | },
1769 | )
1770 |
1771 | def _parse_color(xml_element):
1772 | if xml_element is None:
1773 | return None
1774 |
1775 | rgba = xml_element.get("rgba", default="1 1 1 1")
1776 |
1777 | return Color(rgba=np.array(list(map(float, rgba.split()))))
1778 |
1779 | def _write_color(self, xml_parent, color):
1780 | if color is None:
1781 | return
1782 |
1783 | etree.SubElement(
1784 | xml_parent, "color", attrib={"rgba": " ".join(map(str, color.rgba))}
1785 | )
1786 |
1787 | def _parse_texture(xml_element):
1788 | if xml_element is None:
1789 | return None
1790 |
1791 | # TODO: use texture filename handler
1792 | return Texture(filename=xml_element.get("filename", default=None))
1793 |
1794 | def _write_texture(self, xml_parent, texture):
1795 | if texture is None:
1796 | return
1797 |
1798 | # TODO: use texture filename handler
1799 | etree.SubElement(xml_parent, "texture", attrib={"filename": texture.filename})
1800 |
1801 | def _parse_material(xml_element):
1802 | if xml_element is None:
1803 | return None
1804 |
1805 | material = Material(name=xml_element.get("name"))
1806 | material.color = URDF._parse_color(xml_element.find("color"))
1807 | material.texture = URDF._parse_texture(xml_element.find("texture"))
1808 |
1809 | return material
1810 |
1811 | def _write_material(self, xml_parent, material):
1812 | if material is None:
1813 | return
1814 |
1815 | attrib = {"name": material.name} if material.name is not None else {}
1816 | xml_element = etree.SubElement(
1817 | xml_parent,
1818 | "material",
1819 | attrib=attrib,
1820 | )
1821 |
1822 | self._write_color(xml_element, material.color)
1823 | self._write_texture(xml_element, material.texture)
1824 |
1825 | def _parse_visual(xml_element):
1826 | visual = Visual(name=xml_element.get("name"))
1827 |
1828 | visual.geometry = URDF._parse_geometry(xml_element.find("geometry"))
1829 | visual.origin = URDF._parse_origin(xml_element.find("origin"))
1830 | visual.material = URDF._parse_material(xml_element.find("material"))
1831 |
1832 | return visual
1833 |
1834 | def _validate_visual(self, visual):
1835 | self._validate_geometry(visual.geometry)
1836 |
1837 | def _write_visual(self, xml_parent, visual):
1838 | attrib = {"name": visual.name} if visual.name is not None else {}
1839 | xml_element = etree.SubElement(
1840 | xml_parent,
1841 | "visual",
1842 | attrib=attrib,
1843 | )
1844 |
1845 | self._write_geometry(xml_element, visual.geometry)
1846 | self._write_origin(xml_element, visual.origin)
1847 | self._write_material(xml_element, visual.material)
1848 |
1849 | def _parse_collision(xml_element):
1850 | collision = Collision(name=xml_element.get("name"))
1851 |
1852 | collision.geometry = URDF._parse_geometry(xml_element.find("geometry"))
1853 | collision.origin = URDF._parse_origin(xml_element.find("origin"))
1854 |
1855 | return collision
1856 |
1857 | def _validate_collision(self, collision):
1858 | self._validate_geometry(collision.geometry)
1859 |
1860 | def _write_collision(self, xml_parent, collision):
1861 | attrib = {"name": collision.name} if collision.name is not None else {}
1862 | xml_element = etree.SubElement(
1863 | xml_parent,
1864 | "collision",
1865 | attrib=attrib,
1866 | )
1867 |
1868 | self._write_geometry(xml_element, collision.geometry)
1869 | self._write_origin(xml_element, collision.origin)
1870 |
1871 | def _parse_inertia(xml_element):
1872 | if xml_element is None:
1873 | return None
1874 |
1875 | x = xml_element
1876 |
1877 | return np.array(
1878 | [
1879 | [
1880 | x.get("ixx", default=1.0),
1881 | x.get("ixy", default=0.0),
1882 | x.get("ixz", default=0.0),
1883 | ],
1884 | [
1885 | x.get("ixy", default=0.0),
1886 | x.get("iyy", default=1.0),
1887 | x.get("iyz", default=0.0),
1888 | ],
1889 | [
1890 | x.get("ixz", default=0.0),
1891 | x.get("iyz", default=0.0),
1892 | x.get("izz", default=1.0),
1893 | ],
1894 | ],
1895 | dtype=np.float64,
1896 | )
1897 |
1898 | def _write_inertia(self, xml_parent, inertia):
1899 | if inertia is None:
1900 | return None
1901 |
1902 | etree.SubElement(
1903 | xml_parent,
1904 | "inertia",
1905 | attrib={
1906 | "ixx": str(inertia[0, 0]),
1907 | "ixy": str(inertia[0, 1]),
1908 | "ixz": str(inertia[0, 2]),
1909 | "iyy": str(inertia[1, 1]),
1910 | "iyz": str(inertia[1, 2]),
1911 | "izz": str(inertia[2, 2]),
1912 | },
1913 | )
1914 |
1915 | def _parse_mass(xml_element):
1916 | if xml_element is None:
1917 | return None
1918 |
1919 | return _str2float(xml_element.get("value", default=0.0))
1920 |
1921 | def _write_mass(self, xml_parent, mass):
1922 | if mass is None:
1923 | return
1924 |
1925 | etree.SubElement(
1926 | xml_parent,
1927 | "mass",
1928 | attrib={
1929 | "value": str(mass),
1930 | },
1931 | )
1932 |
1933 | def _parse_inertial(xml_element):
1934 | if xml_element is None:
1935 | return None
1936 |
1937 | inertial = Inertial()
1938 | inertial.origin = URDF._parse_origin(xml_element.find("origin"))
1939 | inertial.inertia = URDF._parse_inertia(xml_element.find("inertia"))
1940 | inertial.mass = URDF._parse_mass(xml_element.find("mass"))
1941 |
1942 | return inertial
1943 |
1944 | def _write_inertial(self, xml_parent, inertial):
1945 | if inertial is None:
1946 | return
1947 |
1948 | xml_element = etree.SubElement(xml_parent, "inertial")
1949 |
1950 | self._write_origin(xml_element, inertial.origin)
1951 | self._write_mass(xml_element, inertial.mass)
1952 | self._write_inertia(xml_element, inertial.inertia)
1953 |
1954 | def _parse_link(xml_element):
1955 | link = Link(name=xml_element.attrib["name"])
1956 |
1957 | link.inertial = URDF._parse_inertial(xml_element.find("inertial"))
1958 |
1959 | for v in xml_element.findall("visual"):
1960 | link.visuals.append(URDF._parse_visual(v))
1961 |
1962 | for c in xml_element.findall("collision"):
1963 | link.collisions.append(URDF._parse_collision(c))
1964 |
1965 | return link
1966 |
1967 | def _validate_link(self, link):
1968 | self._validate_required_attribute(
1969 | attribute=link.name, error_msg="The tag misses a 'name' attribute."
1970 | )
1971 |
1972 | for v in link.visuals:
1973 | self._validate_visual(v)
1974 |
1975 | for c in link.collisions:
1976 | self._validate_collision(c)
1977 |
1978 | def _write_link(self, xml_parent, link):
1979 | xml_element = etree.SubElement(
1980 | xml_parent,
1981 | "link",
1982 | attrib={
1983 | "name": link.name,
1984 | },
1985 | )
1986 |
1987 | self._write_inertial(xml_element, link.inertial)
1988 | for visual in link.visuals:
1989 | self._write_visual(xml_element, visual)
1990 | for collision in link.collisions:
1991 | self._write_collision(xml_element, collision)
1992 |
1993 | def _parse_axis(xml_element):
1994 | if xml_element is None:
1995 | return np.array([1.0, 0, 0])
1996 |
1997 | xyz = xml_element.get("xyz", "1 0 0")
1998 | return np.array(list(map(float, xyz.split())))
1999 |
2000 | def _write_axis(self, xml_parent, axis):
2001 | if axis is None:
2002 | return
2003 |
2004 | etree.SubElement(xml_parent, "axis", attrib={"xyz": " ".join(map(str, axis))})
2005 |
2006 | def _parse_limit(xml_element):
2007 | if xml_element is None:
2008 | return None
2009 |
2010 | return Limit(
2011 | effort=_str2float(xml_element.get("effort", default=None)),
2012 | velocity=_str2float(xml_element.get("velocity", default=None)),
2013 | lower=_str2float(xml_element.get("lower", default=None)),
2014 | upper=_str2float(xml_element.get("upper", default=None)),
2015 | )
2016 |
2017 | def _validate_limit(self, limit, type):
2018 | if type in ["revolute", "prismatic"]:
2019 | self._validate_required_attribute(
2020 | limit,
2021 | error_msg="The of a (prismatic, revolute) joint is missing.",
2022 | )
2023 |
2024 | if limit is not None:
2025 | self._validate_required_attribute(
2026 | limit.upper,
2027 | error_msg="Tag of joint is missing attribute 'upper'.",
2028 | )
2029 | self._validate_required_attribute(
2030 | limit.lower,
2031 | error_msg="Tag of joint is missing attribute 'lower'.",
2032 | )
2033 |
2034 | if limit is not None:
2035 | self._validate_required_attribute(
2036 | limit.effort,
2037 | error_msg="Tag of joint is missing attribute 'effort'.",
2038 | )
2039 |
2040 | self._validate_required_attribute(
2041 | limit.velocity,
2042 | error_msg="Tag of joint is missing attribute 'velocity'.",
2043 | )
2044 |
2045 | def _write_limit(self, xml_parent, limit):
2046 | if limit is None:
2047 | return
2048 |
2049 | attrib = {}
2050 | if limit.effort is not None:
2051 | attrib["effort"] = str(limit.effort)
2052 | if limit.velocity is not None:
2053 | attrib["velocity"] = str(limit.velocity)
2054 | if limit.lower is not None:
2055 | attrib["lower"] = str(limit.lower)
2056 | if limit.upper is not None:
2057 | attrib["upper"] = str(limit.upper)
2058 |
2059 | etree.SubElement(
2060 | xml_parent,
2061 | "limit",
2062 | attrib=attrib,
2063 | )
2064 |
2065 | def _parse_dynamics(xml_element):
2066 | if xml_element is None:
2067 | return None
2068 |
2069 | dynamics = Dynamics()
2070 | dynamics.damping = xml_element.get("damping", default=None)
2071 | dynamics.friction = xml_element.get("friction", default=None)
2072 |
2073 | return dynamics
2074 |
2075 | def _write_dynamics(self, xml_parent, dynamics):
2076 | if dynamics is None:
2077 | return
2078 |
2079 | attrib = {}
2080 | if dynamics.damping is not None:
2081 | attrib["damping"] = str(dynamics.damping)
2082 | if dynamics.friction is not None:
2083 | attrib["friction"] = str(dynamics.friction)
2084 |
2085 | etree.SubElement(
2086 | xml_parent,
2087 | "dynamics",
2088 | attrib=attrib,
2089 | )
2090 |
2091 | def _parse_joint(xml_element):
2092 | joint = Joint(name=xml_element.attrib["name"])
2093 |
2094 | joint.type = xml_element.get("type", default=None)
2095 | joint.parent = xml_element.find("parent").get("link")
2096 | joint.child = xml_element.find("child").get("link")
2097 | joint.origin = URDF._parse_origin(xml_element.find("origin"))
2098 | joint.axis = URDF._parse_axis(xml_element.find("axis"))
2099 | joint.limit = URDF._parse_limit(xml_element.find("limit"))
2100 | joint.dynamics = URDF._parse_dynamics(xml_element.find("dynamics"))
2101 | joint.mimic = URDF._parse_mimic(xml_element.find("mimic"))
2102 | joint.calibration = URDF._parse_calibration(xml_element.find("calibration"))
2103 | joint.safety_controller = URDF._parse_safety_controller(
2104 | xml_element.find("safety_controller")
2105 | )
2106 |
2107 | return joint
2108 |
2109 | def _validate_joint(self, joint):
2110 | self._validate_required_attribute(
2111 | attribute=joint.name,
2112 | error_msg="The tag misses a 'name' attribute.",
2113 | )
2114 |
2115 | allowed_types = [
2116 | "revolute",
2117 | "continuous",
2118 | "prismatic",
2119 | "fixed",
2120 | "floating",
2121 | "planar",
2122 | ]
2123 | self._validate_required_attribute(
2124 | attribute=joint.type,
2125 | error_msg=f"The tag misses a 'type' attribute or value is not part of allowed values [{', '.join(allowed_types)}].",
2126 | allowed_values=allowed_types,
2127 | )
2128 |
2129 | self._validate_required_attribute(
2130 | joint.parent,
2131 | error_msg=f"The of a is missing.",
2132 | )
2133 |
2134 | self._validate_required_attribute(
2135 | joint.child,
2136 | error_msg=f"The of a is missing.",
2137 | )
2138 |
2139 | self._validate_limit(joint.limit, type=joint.type)
2140 |
2141 | def _write_joint(self, xml_parent, joint):
2142 | xml_element = etree.SubElement(
2143 | xml_parent,
2144 | "joint",
2145 | attrib={
2146 | "name": joint.name,
2147 | "type": joint.type,
2148 | },
2149 | )
2150 |
2151 | etree.SubElement(xml_element, "parent", attrib={"link": joint.parent})
2152 | etree.SubElement(xml_element, "child", attrib={"link": joint.child})
2153 | self._write_origin(xml_element, joint.origin)
2154 | self._write_axis(xml_element, joint.axis)
2155 | self._write_limit(xml_element, joint.limit)
2156 | self._write_dynamics(xml_element, joint.dynamics)
2157 |
2158 | @staticmethod
2159 | def _parse_robot(xml_element):
2160 | robot = Robot(name=xml_element.attrib["name"])
2161 |
2162 | for l in xml_element.findall("link"):
2163 | robot.links.append(URDF._parse_link(l))
2164 | for j in xml_element.findall("joint"):
2165 | robot.joints.append(URDF._parse_joint(j))
2166 | for m in xml_element.findall("material"):
2167 | robot.materials.append(URDF._parse_material(m))
2168 | return robot
2169 |
2170 | def _validate_robot(self, robot):
2171 | if robot is not None:
2172 | self._validate_required_attribute(
2173 | attribute=robot.name,
2174 | error_msg="The tag misses a 'name' attribute.",
2175 | )
2176 |
2177 | for l in robot.links:
2178 | self._validate_link(l)
2179 |
2180 | for j in robot.joints:
2181 | self._validate_joint(j)
2182 |
2183 | def _write_robot(self, robot):
2184 | xml_element = etree.Element("robot", attrib={"name": robot.name})
2185 | for link in robot.links:
2186 | self._write_link(xml_element, link)
2187 | for joint in robot.joints:
2188 | self._write_joint(xml_element, joint)
2189 | for material in robot.materials:
2190 | self._write_material(xml_element, material)
2191 |
2192 | return xml_element
2193 |
2194 | def __eq__(self, other):
2195 | if not isinstance(other, URDF):
2196 | raise NotImplemented
2197 | return self.robot == other.robot
2198 |
--------------------------------------------------------------------------------