├── .coveragerc
├── .flake8
├── .github
└── workflows
│ └── ci_checks.yml
├── .gitignore
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── docs
└── assets
│ ├── lisdf_logo.png
│ └── lisdf_logo.svg
├── lisdf
├── __init__.py
├── components
│ ├── __init__.py
│ ├── base.py
│ ├── control.py
│ ├── gui.py
│ ├── material.py
│ ├── model.py
│ ├── model_mjcf.py
│ ├── model_sdf.py
│ ├── model_urdf.py
│ ├── pddl.py
│ ├── scene.py
│ ├── sensor.py
│ ├── shape.py
│ ├── srdf.py
│ └── state.py
├── parsing
│ ├── __init__.py
│ ├── all.py
│ ├── lispddl-builtins.pddl
│ ├── lispddl-v1.0.grammar
│ ├── mjcf.py
│ ├── parse_sdf.py
│ ├── pddl_j.py
│ ├── sdf.py
│ ├── sdf_j.py
│ ├── srdf.py
│ ├── string_utils.py
│ ├── urdf.py
│ ├── urdf_j.py
│ ├── xml_j
│ │ ├── __init__.py
│ │ ├── visitor.py
│ │ └── xml.py
│ └── xml_reflection
│ │ ├── __init__.py
│ │ ├── basics.py
│ │ └── core.py
├── plan_executor
│ ├── __init__.py
│ ├── executor.py
│ ├── gripper_executor.py
│ ├── interpolator.py
│ ├── joint_space_path_executor.py
│ ├── lisdf_executor.py
│ └── robots
│ │ ├── __init__.py
│ │ ├── common.py
│ │ └── panda.py
├── planner_output
│ ├── __init__.py
│ ├── command.py
│ ├── common.py
│ ├── config.py
│ └── plan.py
├── py.typed
└── utils
│ ├── __init__.py
│ ├── printing.py
│ ├── transformations.py
│ ├── transformations_more.py
│ └── typing.py
├── mypy.ini
├── pyproject.toml
├── scripts
├── mjcf_coverage.py
├── mjcf_expand.py
├── planner_output_demo.py
├── run_checks.sh
└── sdf_coverage.py
└── tests
├── __init__.py
├── conftest.py
├── test_parsing
├── __init__.py
├── test_parse_mjcf.py
├── test_parse_sdf.py
├── test_parse_sdf_j.py
├── test_parse_urdf_j.py
├── test_urdf.py
├── test_urdf_error.py
└── xml_matching.py
├── test_plan_executor
├── __init__.py
├── conftest.py
├── test_executor.py
├── test_gripper_executor.py
├── test_interpolator.py
├── test_joint_space_path_executor.py
└── test_lisdf_executor.py
├── test_planner_output
├── __init__.py
├── conftest.py
├── test_command.py
├── test_common.py
└── test_plan.py
└── test_utils
└── test_transformation_more.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 |
3 | [report]
4 | # Regexes for lines to exclude from consideration
5 | exclude_lines =
6 | # Have to re-enable the standard pragma
7 | # per https://coverage.readthedocs.io/en/latest/config.html#syntax
8 | pragma: no cover
9 |
10 | # Don't complain about abstract methods, they aren't run
11 | @abstractmethod
12 | @abc.abstractmethod
13 |
14 | omit =
15 | # The GREAT transformations.py. We don't need to test it here.
16 | # It has been tested everywhere.
17 | lisdf/utils/transformations.py
18 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 88
3 | extend-ignore = E203
4 | exclude = (?x)(
5 | .env/
6 | | env/
7 | | .venv/
8 | | venv/
9 | )
--------------------------------------------------------------------------------
/.github/workflows/ci_checks.yml:
--------------------------------------------------------------------------------
1 | name: ci_checks
2 |
3 | on: [push]
4 |
5 | jobs:
6 | unit-tests:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | python-version: [3.8]
11 | steps:
12 | - name: Checkout repo and submodules
13 | uses: actions/checkout@v2
14 | with:
15 | submodules: recursive
16 | - name: Set up Python ${{ matrix.python-version }}
17 | uses: actions/setup-python@v2
18 | with:
19 | python-version: ${{ matrix.python-version }}
20 | - run: | # Unit tests require lisdf-models
21 | pip install -e .[develop]
22 | pip install lisdf_models@git+https://github.com/Learning-and-Intelligent-Systems/lisdf-models.git
23 | - name: Pytest
24 | run: |
25 | # Note tests/ directory is not included in coverage
26 | pytest -s tests/ --cov-config=.coveragerc --cov=lisdf/ --cov-fail-under=75 --cov-report=term-missing:skip-covered
27 | static-type-checking:
28 | runs-on: ubuntu-latest
29 | strategy:
30 | matrix:
31 | python-version: [3.8]
32 | steps:
33 | - uses: actions/checkout@v2
34 | - name: Set up Python ${{ matrix.python-version }}
35 | uses: actions/setup-python@v2
36 | with:
37 | python-version: ${{ matrix.python-version }}
38 | - name: Install dependencies
39 | run: | # mypy needs lisdf-models for lisdf/parsing/parse_sdf.py
40 | pip install -e .[develop]
41 | pip install lisdf_models@git+https://github.com/Learning-and-Intelligent-Systems/lisdf-models.git
42 | - name: Mypy
43 | run: |
44 | mypy . --config-file mypy.ini
45 | lint:
46 | runs-on: ubuntu-latest
47 | strategy:
48 | matrix:
49 | python-version: [3.8]
50 | steps:
51 | - uses: actions/checkout@v2
52 | - name: Set up Python ${{ matrix.python-version }}
53 | uses: actions/setup-python@v2
54 | with:
55 | python-version: ${{ matrix.python-version }}
56 | - name: Install dependencies
57 | run: |
58 | pip install -e .[develop]
59 | - name: Flake8
60 | run: |
61 | flake8
62 | autoformat:
63 | runs-on: ubuntu-latest
64 | strategy:
65 | matrix:
66 | python-version: [3.8]
67 | steps:
68 | - uses: actions/checkout@v2
69 | - name: Set up Python ${{ matrix.python-version }}
70 | uses: actions/setup-python@v2
71 | with:
72 | python-version: ${{ matrix.python-version }}
73 | - name: Run isort to organize imports
74 | uses: isort/isort-action@master
75 | - name: Run black to check code formatting
76 | uses: psf/black@stable
77 |
78 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Vim related
2 |
3 | /.vim-template*
4 |
5 | # Byte-compiled / optimized / DLL files
6 | __pycache__/
7 | *.py[cod]
8 | *$py.class
9 |
10 | # C extensions
11 | *.so
12 |
13 | # Distribution / packaging
14 | .Python
15 | build/
16 | develop-eggs/
17 | dist/
18 | downloads/
19 | eggs/
20 | .eggs/
21 | lib/
22 | lib64/
23 | parts/
24 | sdist/
25 | var/
26 | wheels/
27 | pip-wheel-metadata/
28 | share/python-wheels/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 | MANIFEST
33 |
34 | # PyInstaller
35 | # Usually these files are written by a python script from a template
36 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
37 | *.manifest
38 | *.spec
39 |
40 | # Installer logs
41 | pip-log.txt
42 | pip-delete-this-directory.txt
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .nox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | *.py,cover
55 | .hypothesis/
56 | .pytest_cache/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | db.sqlite3
66 | db.sqlite3-journal
67 |
68 | # Flask stuff:
69 | instance/
70 | .webassets-cache
71 |
72 | # Scrapy stuff:
73 | .scrapy
74 |
75 | # Sphinx documentation
76 | docs/_build/
77 |
78 | # PyBuilder
79 | target/
80 |
81 | # Jupyter Notebook
82 | .ipynb_checkpoints
83 |
84 | # IPython
85 | profile_default/
86 | ipython_config.py
87 |
88 | # pyenv
89 | .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
99 | __pypackages__/
100 |
101 | # Celery stuff
102 | celerybeat-schedule
103 | celerybeat.pid
104 |
105 | # SageMath parsed files
106 | *.sage.py
107 |
108 | # Environments
109 | .env
110 | .venv
111 | env/
112 | venv/
113 | ENV/
114 | env.bak/
115 | venv.bak/
116 |
117 | # Spyder project settings
118 | .spyderproject
119 | .spyproject
120 |
121 | # Rope project settings
122 | .ropeproject
123 |
124 | # mkdocs documentation
125 | /site
126 |
127 | # mypy
128 | .mypy_cache/
129 | .dmypy.json
130 | dmypy.json
131 |
132 | # Pyre type checker
133 | .pyre/
134 |
135 | # IDEA IDEs
136 | .idea/
137 |
138 | # DS store is annoying
139 | **/.DS_Store
140 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to LISdf
2 | Follow the instructions in the [README](README.md) if you haven't already setup your development environment.
3 |
4 | ### Pushing your Changes
5 | You can't directly push to the `main` branch. All contributions will require review to ensure code is understandable
6 | and can be built upon.
7 |
8 | 1. To contribute new code, make a new Git branch in this repository (don't use a fork, since that will not properly trigger
9 | the checks when you make a PR). When your code is ready for review, [make a PR](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request)
10 | and request reviews from the appropriate people (if you're not sure who to request a review from, default to Nishanth
11 | and/or Will).
12 | 2. To merge a PR, you need at least one approval, and you have to pass the 4 checks defined in
13 | `.github/workflows/ci_checks.yml` (these will be run automatically on GitHub whenever you push or open a PR).
14 | You can run these checks locally in one line via `./scripts/run_checks.sh`. For more details, refer to the next section.
15 |
16 | ### Formatting and Checks
17 | We use `isort` and `black` for autoformatting, `flake8` and `mypy` for linting and type checking, and `pytest`
18 | for unit and integration testing. You can run all these commands in one simple script via `./scripts/run_checks.sh`.
19 |
20 | If you wish to run these commands individually, you can run these commands from the repo directory:
21 | 1. `isort . && black .`
22 | 2. `flake8 .`
23 | 3. `mypy . --config-file mypy.ini`
24 | 4. `pytest -s tests/ --cov-config=.coveragerc --cov=lisdf/ --cov-fail-under=75 --cov-report=term-missing:skip-covered --durations=10`
25 |
26 | * The 1st command is the autoformatting check, which runs `black` and `isort`.
27 | * The 2nd command is the linter check, which runs `flake8`
28 | * The 3rd command is the static typing check, which uses `mypy` to verify type annotations.
29 | * The 4th command is the unit testing check, which verifies that unit tests pass and that code is adequately covered.
30 | * The "75" means that 75% of all lines in every file in `lisdf/` must be covered (excludes `tests/`).
31 |
32 | ### Publishing to PyPI
33 | Right now, the plan is to manually publish to PyPI when required. We can automate the release process later if needed.
34 |
35 | We use [Hatch](https://hatch.pypa.io/latest/) as our build system because it is lightweight, easy to use yet powerful
36 | in comparison to other tools like `poetry`.
37 |
38 | 1. Increment `__version__` in `lisdf/__init__.py` to the new version number.
39 | - We try to follow the [semantic versioning](https://semver.org/) convention.
40 | 2. Install `hatch` via `pip install hatch` if you haven't already.
41 | 3. Build the package via `hatch build`
42 | 4. Inspect the build artifacts in `dist/` to ensure that everything looks good.
43 | - Note: the `.whl` file is just a zip file, so you can unzip it to inspect the contents.
44 | 5. Publish to PyPI via `hatch publish`
45 | - Pre-requisite: you will need to have a PyPI account and be added as a collaborator of the package. Ask Will to
46 | add you if you don't have access yet.
47 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Learning and Intelligent Systems
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # LISdf
2 |
3 |
4 |

5 |
6 | [](https://pypi.org/project/lisdf)
7 | [](https://pypi.org/project/lisdf)
8 | [](https://pypi.org/project/lisdf)
9 | [](https://pypistats.org/packages/lisdf)
10 | 
11 | 
12 |
13 |
14 | A repository for a universal I/O spec for Task and Motion Planning (TAMP), along with scripts to convert from
15 | popular specs to our spec. Includes:
16 |
17 | - LISdf specification for specifying scenes for TAMP.
18 | - A sophisticated parser for reading `.lisdf` scene files.
19 | - The LISdf Plan Output format, with helpers to read, write, validate and run plans.
20 |
21 | **Note:** this repository is under active development and is not intended for general use.
22 | Please contact `willshen at mit.edu` and `jiayuanm at mit.edu` if you are interested in using this package.
23 |
24 | -----
25 |
26 | **Table of Contents**
27 |
28 |
29 | - [Installation](#installation)
30 | - [Documentation](#documentation)
31 | - [Contributing](#contributing)
32 | - [License](#license)
33 | - [Authors](#authors)
34 | - [Change Log](#change-log)
35 |
36 | ## Installation
37 |
38 | **Dependencies:** this package required Python 3.8 or higher. Although the dependencies within `lisdf` are minimal,
39 | we recommend you use a [conda env](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html)
40 | or [virtual env](https://docs.python.org/3.8/library/venv.html) with an appropriate Python version.
41 |
42 | ### Installing with `pip`
43 |
44 | ```
45 | pip install lisdf
46 | ```
47 |
48 | ### Installing from Source
49 |
50 | Clone the repository and install the dependencies with `pip`:
51 |
52 | ```console
53 | > git clone git@github.com:Learning-and-Intelligent-Systems/lisdf.git
54 | > pip install -e .
55 | ```
56 |
57 | ## Documentation
58 | - LISdf Input Spec: coming soon...
59 | - [LISdf Plan Output Spec (Google Doc)](https://docs.google.com/document/d/15V7K-ljLGx-4hJJaihaDM4-MXGuTXvXhEdNQgRum75A/edit#heading=h.2m2ax6udwea4)
60 |
61 | ## Contributing
62 |
63 | ### Dev Dependencies
64 | Follow the instructions below:
65 |
66 | 1. Clone the repository using `git clone`.
67 | - If you are creating a virtual environment within the project directory, then you might want to call it one of
68 | `.env, env, .venv, venv` as the code checks have been configured to exclude those directories.
69 | 2. Run `pip install -e '.[develop]'` to install all dependencies for development/contribution.
70 | 3. (Optional, required for unit tests) Install the `lisdf-models` model files by running
71 | ```
72 | pip install lisdf_models@git+https://github.com/Learning-and-Intelligent-Systems/lisdf-models.git
73 | ```
74 | **WARNING:** the [`lisdf-models` repository](https://github.com/Learning-and-Intelligent-Systems/lisdf-models)
75 | is ~700MB big as of 10th September 2022.
76 | 4. Check [CONTRIBUTING.md](CONTRIBUTING.md) for more information on how to contribute to this repository, including
77 | how to run the tests, code checks, and publishing to PyPI.
78 |
79 |
80 | ## License
81 | This repository is licensed under the MIT License. See [LICENSE](LICENSE) for more details.
82 |
83 |
84 | ## Authors
85 | LISdf is an initiative within the [Learning and Intelligent Systems (LIS) group](https://lis.csail.mit.edu) at
86 | [MIT CSAIL](https://www.csail.mit.edu/).
87 |
88 | Contributors and Programmers (alphabetical order):
89 | - Aidan Curtis
90 | - Jiayuan Mao
91 | - Nishanth Kumar
92 | - Sahit Chintalapudi
93 | - Tom Silver
94 | - William Shen
95 | - Zhutian Yang
96 |
97 | Other contributors who helped with general discussions, design, feedback (alphabetical order):
98 | - Leslie Kaelbling
99 | - Michael Noseworthy
100 | - Nicholas Roy
101 | - Rachel Holladay
102 | - Tomás Lozano-Pérez
103 | - Yilun Du
104 |
105 |
106 | ## Change Log
107 |
108 | ### 0.1.0
109 | Initial release to PyPI.
110 |
111 | -----
112 |
113 | LISdf = Learning and Intelligent Systems Description Format
114 |
--------------------------------------------------------------------------------
/docs/assets/lisdf_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/docs/assets/lisdf_logo.png
--------------------------------------------------------------------------------
/docs/assets/lisdf_logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
65 |
--------------------------------------------------------------------------------
/lisdf/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.1.1"
2 |
--------------------------------------------------------------------------------
/lisdf/components/__init__.py:
--------------------------------------------------------------------------------
1 | from lisdf.components.base import ( # noqa: F401
2 | Pose,
3 | StringConfigurable,
4 | set_name_scope_sep,
5 | )
6 | from lisdf.components.control import ( # noqa: F401
7 | ContinuousJointInfo,
8 | FixedJointInfo,
9 | JointCalibration,
10 | JointControlInfo,
11 | JointDynamics,
12 | JointInfo,
13 | JointLimit,
14 | JointMimic,
15 | PrismaticJointInfo,
16 | RevoluteJointInfo,
17 | )
18 | from lisdf.components.model import ( # noqa: F401
19 | Collision,
20 | Inertia,
21 | Inertial,
22 | Joint,
23 | Link,
24 | Model,
25 | SDFInclude,
26 | SurfaceContact,
27 | SurfaceFriction,
28 | SurfaceInfo,
29 | URDFInclude,
30 | Visual,
31 | )
32 |
33 | from .gui import GUI, GUICamera # noqa: F401
34 | from .material import RGBA, Material, MJCFMaterial, PhongMaterial, Texture # noqa: F401
35 | from .model_mjcf import MJCFCollision, MJCFModel, MJCFVisual # noqa: F401
36 | from .model_sdf import ( # noqa: F401
37 | SDFLink,
38 | SDFSurfaceContact,
39 | SDFSurfaceFriction,
40 | SDFVisual,
41 | )
42 | from .model_urdf import URDFModel # noqa: F401
43 | from .pddl import ( # noqa: F401
44 | PDDL_AND,
45 | PDDL_EQUALS,
46 | PDDL_EXISTS,
47 | PDDL_FORALL,
48 | PDDL_GREATER,
49 | PDDL_GREATER_EQUALS,
50 | PDDL_IMPLIES,
51 | PDDL_LESS,
52 | PDDL_LESS_EQUALS,
53 | PDDL_NOT,
54 | PDDL_NOT_EQUALS,
55 | PDDL_OR,
56 | PDDLDomain,
57 | PDDLFunctionCall,
58 | PDDLLiteral,
59 | PDDLNamedValue,
60 | PDDLObject,
61 | PDDLOperator,
62 | PDDLPredicate,
63 | PDDLProblem,
64 | PDDLProposition,
65 | PDDLSDFObject,
66 | PDDLStringConfigurable,
67 | PDDLType,
68 | PDDLValue,
69 | PDDLVariable,
70 | PDDLVectorValue,
71 | )
72 | from .scene import LISDF, World # noqa: F401
73 | from .sensor import CameraSensor, Sensor # noqa: F401
74 | from .shape import ( # noqa: F401
75 | BoxShapeInfo,
76 | CapsuleShapeInfo,
77 | CylinderShapeInfo,
78 | MeshShapeInfo,
79 | PlaneShapeInfo,
80 | ShapeInfo,
81 | SphereShapeInfo,
82 | )
83 | from .srdf import ( # noqa: F401
84 | ChainIdentifier,
85 | DisableCollisions,
86 | Group,
87 | GroupIdentifier,
88 | JointIdentifier,
89 | LinkIdentifier,
90 | )
91 | from .state import ( # noqa: F401
92 | JointAxisState,
93 | JointState,
94 | LinkState,
95 | ModelState,
96 | WorldState,
97 | )
98 |
--------------------------------------------------------------------------------
/lisdf/components/base.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from collections import defaultdict
3 | from dataclasses import dataclass
4 | from functools import cached_property
5 | from typing import (
6 | Any,
7 | Callable,
8 | ClassVar,
9 | DefaultDict,
10 | Dict,
11 | List,
12 | Optional,
13 | Tuple,
14 | Type,
15 | TypeVar,
16 | )
17 |
18 | import numpy as np
19 |
20 | from lisdf.utils.transformations import (
21 | euler_from_quaternion,
22 | quaternion_from_euler,
23 | quaternion_multiply,
24 | )
25 | from lisdf.utils.transformations_more import lookat_rpy
26 | from lisdf.utils.typing import Vector3f, Vector4f, Vector6f
27 |
28 | T = TypeVar("T")
29 |
30 | NAME_SCOPE_SEP: Optional[str] = "::"
31 |
32 |
33 | def set_name_scope_sep(sep: Optional[str]) -> None:
34 | """Set the name scope seperator to None to disable name scoping."""
35 | global NAME_SCOPE_SEP
36 | NAME_SCOPE_SEP = sep
37 |
38 |
39 | class StringifyContext(object):
40 | def __init__(self, **kwargs) -> None:
41 | self.stacks: DefaultDict[str, List[Any]] = defaultdict(list)
42 | self.warnings: List[Tuple[StringConfigurable, str]] = list()
43 | self.options = kwargs
44 |
45 | def warning(self, obj: "StringConfigurable", message: str) -> None:
46 | self.warnings.append((obj, message))
47 |
48 | def st_push(self, name: str, value: Any) -> None:
49 | self.stacks[name].append(value)
50 |
51 | def st_pop(self, name: str, default: Any = None) -> Any:
52 | return self.stacks[name].pop() if len(self.stacks[name]) > 0 else default
53 |
54 | def st_top(self, name: str, default: Any = None) -> Any:
55 | return self.stacks[name][-1] if len(self.stacks[name]) > 0 else default
56 |
57 | def get_scoped_name(self, name: str) -> str:
58 | """
59 | Prepend the current scope to the name.
60 |
61 | Args:
62 | name: the input name.
63 |
64 | Returns:
65 | When the use_scoped_name option is True, the name is prepended with
66 | the current scope. For example, when we are stringifying a model,
67 | the to_sdf method will do seomthing like this:
68 | ```
69 | ctx.push_scoped_name(self.model_name)
70 | for link in self.links:
71 | link.to_sdf()
72 | ctx.pop_scoped_name()
73 | ```
74 |
75 | Then, inside the link.to_sdf method, users can call the function
76 | get_scoped_name to get the name of the link. It will return:
77 | `model_name + "::" + link.name`.
78 | """
79 | if not self.options["use_scoped_name"]:
80 | return name
81 |
82 | parent_name = self.st_top("model_name", None)
83 | if parent_name is None:
84 | return name
85 | if NAME_SCOPE_SEP is None:
86 | return name
87 |
88 | return f"{parent_name}{NAME_SCOPE_SEP}{name}"
89 |
90 | def push_scoped_name(self, name: str) -> None:
91 | self.st_push("model_name", self.get_scoped_name(name))
92 |
93 | def pop_scoped_name(self) -> None:
94 | self.st_pop("model_name")
95 |
96 | def get_scoped_pose(self, pose: "Pose") -> "Pose":
97 | """
98 | In SDF files we constantly allow nested definition of poses.
99 | For example, we can specify the pose of a model then all links under
100 | this model will be transformed. This helper function maintains a stack
101 | of poses from the root node. The usage is similar to the `get_scoped_name`.
102 | """
103 | parent_pose = self.st_top("pose", None)
104 | if parent_pose is None:
105 | return pose
106 | return parent_pose * pose
107 |
108 | def push_scoped_pose(self, pose: "Pose") -> None:
109 | self.st_push("pose", self.get_scoped_pose(pose))
110 |
111 | def pop_scoped_pose(self) -> None:
112 | self.st_pop("pose")
113 |
114 |
115 | class StringConfigurable(ABC):
116 | # TODO(Jiayuan Mao @ 03/24): implement these methods for the child classes.
117 |
118 | DEFAULT_LISDF_STRINGIFY_OPTIONS: ClassVar[Dict[str, Any]] = {}
119 | DEFAULT_SDF_STRINGIFY_OPTIONS: ClassVar[Dict[str, Any]] = {}
120 | DEFAULT_URDF_STRINGIFY_OPTIONS: ClassVar[Dict[str, Any]] = {
121 | # The URDF standard supports defining the material for a visual element
122 | # inside the visual element itself. However, this is not supported by
123 | # some URDF parsers. Set this option to False to enforce all material
124 | # definitions to be defined at the root level.
125 | "allow_embedded_material": False,
126 | # When export the URDF model, whether to use the scoped name (i.e. names
127 | # that is composed of the parent name and the child name).
128 | "use_scoped_name": False,
129 | }
130 |
131 | def to_lisdf(self, ctx: Optional[StringifyContext] = None, **kwargs) -> str:
132 | if ctx is None:
133 | for k, v in type(self).DEFAULT_LISDF_STRINGIFY_OPTIONS.items():
134 | kwargs.setdefault(k, v)
135 | ctx = StringifyContext(**kwargs)
136 | return self._to_lisdf(ctx)
137 |
138 | def to_sdf(self, ctx: Optional[StringifyContext] = None, **kwargs) -> str:
139 | if ctx is None:
140 | for k, v in type(self).DEFAULT_SDF_STRINGIFY_OPTIONS.items():
141 | kwargs.setdefault(k, v)
142 | ctx = StringifyContext(**kwargs)
143 | return self._to_sdf(ctx)
144 |
145 | def to_urdf(self, ctx: Optional[StringifyContext] = None, **kwargs) -> str:
146 | if ctx is None:
147 | for k, v in type(self).DEFAULT_URDF_STRINGIFY_OPTIONS.items():
148 | kwargs.setdefault(k, v)
149 | ctx = StringifyContext(**kwargs)
150 | return self._to_urdf(ctx)
151 |
152 | def _to_lisdf(self, ctx: StringifyContext) -> str:
153 | return self._to_sdf(ctx)
154 |
155 | def _to_sdf(self, ctx: StringifyContext) -> str:
156 | raise NotImplementedError()
157 |
158 | def _to_urdf(self, ctx: StringifyContext) -> str:
159 | raise NotImplementedError()
160 |
161 |
162 | def unsupported_stringify(
163 | *, disable_sdf: bool = False, disable_urdf: bool = False
164 | ) -> Callable[[Type[T]], Type[T]]:
165 | # TODO (Jiayuan Mao @ 04/03): find a more checker/IDE-friendly way to inject.
166 | def decorator(cls: Type[T]) -> Type[T]:
167 | if not disable_sdf:
168 |
169 | def _to_sdf(self: StringConfigurable, ctx: StringifyContext) -> str:
170 | ctx.warning(
171 | self, "Unsupported SDF stringification for {}".format(cls.__name__)
172 | )
173 | return ""
174 |
175 | setattr(cls, "to_sdf", _to_sdf)
176 |
177 | if not disable_urdf:
178 |
179 | def _to_urdf(self: StringConfigurable, ctx: StringifyContext) -> str:
180 | ctx.warning(
181 | self,
182 | "Unsupported URDF stringification for {}.".format(cls.__name__),
183 | )
184 | return ""
185 |
186 | setattr(cls, "to_urdf", _to_urdf)
187 | return cls
188 |
189 | return decorator
190 |
191 |
192 | @dataclass
193 | class Pose(StringConfigurable):
194 | pos: Vector3f
195 | quat_wxyz: Vector4f
196 |
197 | @classmethod
198 | def from_rpy_6d(cls, a: Vector6f) -> "Pose":
199 | """Construct a Pose object from a 6-dimensional vector: x, y, z, r, p, y."""
200 | return cls.from_rpy(a[:3], a[3:])
201 |
202 | @classmethod
203 | def from_rpy(cls, pos: Vector3f, rpy: Vector3f) -> "Pose":
204 | """Construct a Pose object two 3-dimensional vector."""
205 | return cls.from_quat_xyzw(pos, quaternion_from_euler(*rpy)) # type: ignore
206 |
207 | @classmethod
208 | def from_quat_xyzw(cls, pos: Vector3f, xyzw: Vector4f) -> "Pose":
209 | """Construct a Pose object from a 3d position vector and a
210 | 4d quaternion vector, expressed in xyzw."""
211 | return cls(pos, np.array([xyzw[3], xyzw[0], xyzw[1], xyzw[2]]))
212 |
213 | @classmethod
214 | def from_lookat(cls, xyz: Vector3f, point_to: Vector3f) -> "Pose":
215 | """Construct the roll, pitch, yaw angles of a camera looking at a target.
216 | This function assumes that the camera is pointing to the z-axis ([0, 0, 1]),
217 | in the camera frame.
218 |
219 | Args:
220 | camera_pos: the position of the camera.
221 | target_pos: the target position.
222 |
223 | Returns:
224 | a Pose object.
225 | """
226 | return cls.from_rpy(xyz, lookat_rpy(xyz, point_to))
227 |
228 | @classmethod
229 | def identity(cls) -> "Pose":
230 | """Construct the identity pose (x=y=z=r=p=y=0)."""
231 | return cls(
232 | pos=np.zeros(3, dtype="float32"),
233 | quat_wxyz=np.array([1, 0, 0, 0], dtype="float32"),
234 | )
235 |
236 | @cached_property
237 | def quat_xyzw(self) -> Vector4f:
238 | return np.array(
239 | [self.quat_wxyz[1], self.quat_wxyz[2], self.quat_wxyz[3], self.quat_wxyz[0]]
240 | )
241 |
242 | @cached_property
243 | def rpy(self) -> Vector3f:
244 | return euler_from_quaternion(self.quat_xyzw) # type: ignore
245 |
246 | def __mul__(self, other: "Pose") -> "Pose":
247 | quat = quaternion_multiply(self.quat_xyzw, other.quat_xyzw) # type: ignore
248 | return Pose(self.pos + other.pos, quat)
249 |
250 | def _to_sdf(self, ctx: StringifyContext) -> str:
251 | return (
252 | f"{self.pos[0]} {self.pos[1]} {self.pos[2]} "
253 | f"{self.rpy[0]} {self.rpy[1]} {self.rpy[2]}"
254 | )
255 |
256 | def _to_urdf(self, ctx: StringifyContext) -> str:
257 | urdf_pose = ctx.get_scoped_pose(self)
258 | return (
259 | f''
261 | )
262 |
--------------------------------------------------------------------------------
/lisdf/components/control.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from dataclasses import dataclass
3 | from typing import ClassVar, Dict, Optional, Type
4 |
5 | from lisdf.components.base import (
6 | StringConfigurable,
7 | StringifyContext,
8 | unsupported_stringify,
9 | )
10 | from lisdf.utils.printing import indent_text
11 | from lisdf.utils.typing import Vector3f
12 |
13 | """A series of data structures for joints."""
14 |
15 |
16 | @dataclass
17 | class JointDynamics(StringConfigurable):
18 | damping: float = 0
19 | friction: float = 0
20 | armature: float = 0 # used by MJCF only.
21 |
22 | def _to_sdf(self, ctx: StringifyContext) -> str:
23 | if self.armature != 0:
24 | ctx.warning(self, "armature is not supported in SDF.")
25 | return f"""
26 | {self.damping}
27 | {self.friction}
28 | """
29 |
30 | def _to_urdf(self, ctx: StringifyContext) -> str:
31 | if self.armature != 0:
32 | ctx.warning(self, "armature is not supported in SDF.")
33 | return f''
34 |
35 |
36 | @dataclass
37 | class JointLimit(StringConfigurable):
38 | lower: Optional[float] = None
39 | upper: Optional[float] = None
40 | effort: Optional[float] = None
41 | velocity: Optional[float] = None
42 |
43 | def _to_sdf(self, ctx: StringifyContext) -> str:
44 | fmt = "\n"
45 | if self.lower is not None:
46 | fmt += f" {self.lower}\n"
47 | if self.upper is not None:
48 | fmt += f" {self.upper}\n"
49 | if self.effort is not None:
50 | fmt += f" {self.effort}\n"
51 | if self.velocity is not None:
52 | fmt += f" {self.velocity}\n"
53 | return fmt + ""
54 |
55 | def _to_urdf(self, ctx: StringifyContext) -> str:
56 | fmt = ""
66 |
67 |
68 | @dataclass
69 | @unsupported_stringify(disable_sdf=True)
70 | class JointCalibration(StringConfigurable):
71 | falling: float = 0
72 | rising: float = 0
73 |
74 | def _to_urdf(self, ctx: StringifyContext) -> str:
75 | return f''
76 |
77 |
78 | @dataclass
79 | @unsupported_stringify(disable_sdf=True)
80 | class JointMimic(StringConfigurable):
81 | joint: str
82 | multiplier: float = 1
83 | offset: float = 0
84 |
85 | # flake8: noqa: E501
86 | def _to_urdf(self, ctx: StringifyContext) -> str:
87 | return f''
88 |
89 |
90 | @dataclass
91 | @unsupported_stringify(disable_sdf=True)
92 | class JointControlInfo(StringConfigurable):
93 | lower: Optional[float] = None
94 | upper: Optional[float] = None
95 | velocity: Optional[float] = None
96 | position: Optional[float] = None
97 |
98 | def _to_urdf(self, ctx: StringifyContext) -> str:
99 | fmt = ""
109 |
110 |
111 | @dataclass
112 | class JointInfo(StringConfigurable, ABC):
113 | """
114 | When inherit from this class, child classes should pass type="XXX"
115 | as a keyword argument. This will register a new JointInfo type
116 | in this class. Then,
117 |
118 | >>> JointInfo.from_type('hinge', axis=np.array([0, 0, 1], dtype='float32'))
119 |
120 | will be equivalent to C.HingeJointInfo(axis=np.array([0, 0, 1], dtype='float32'))
121 | """
122 |
123 | type: ClassVar[str] = "JointInfo"
124 | type_mapping: ClassVar[Dict[str, Type["JointInfo"]]] = dict()
125 |
126 | def __init_subclass__(cls, type: str, **kwargs):
127 | super().__init_subclass__(**kwargs)
128 | setattr(cls, "type", type)
129 | JointInfo.type_mapping[type] = cls
130 |
131 | @staticmethod
132 | def from_type(type, **kwargs) -> "JointInfo":
133 | return JointInfo.type_mapping[type](**kwargs)
134 |
135 |
136 | @dataclass
137 | class FixedJointInfo(JointInfo, type="fixed"):
138 | """A fixed joint does not have any degrees of freedom."""
139 |
140 | def _to_sdf(self, ctx: StringifyContext) -> str:
141 | return ""
142 |
143 | def _to_urdf(self, ctx: StringifyContext) -> str:
144 | return ""
145 |
146 |
147 | @dataclass
148 | class SingleAxisJointInfo(JointInfo, type="controllable"):
149 | axis: Vector3f
150 | limit: Optional[JointLimit] = None
151 | dynamics: Optional[JointDynamics] = None
152 | calibration: Optional[JointCalibration] = None
153 | mimic: Optional[JointMimic] = None
154 |
155 | def _to_sdf(self, ctx: StringifyContext) -> str:
156 | return f"""
157 | {self.axis[0]} {self.axis[1]} {self.axis[2]}
158 | {indent_text(self.limit.to_sdf(ctx)) if self.limit is not None else ""}
159 | {indent_text(self.dynamics.to_sdf(ctx)) if self.dynamics is not None else ""}
160 | {indent_text(self.calibration.to_sdf(ctx))
161 | if self.calibration is not None else ""}
162 | {indent_text(self.mimic.to_sdf(ctx)) if self.mimic is not None else ""}
163 | """
164 |
165 | def _to_urdf(self, ctx: StringifyContext) -> str:
166 | return f"""
167 | {indent_text(self.limit.to_urdf(ctx)) if self.limit is not None else ""}
168 | {indent_text(self.dynamics.to_urdf(ctx))
169 | if self.dynamics is not None else ""}
170 | {indent_text(self.calibration.to_urdf(ctx))
171 | if self.calibration is not None else ""}
172 | {indent_text(self.mimic.to_urdf(ctx)) if self.mimic is not None else ""}"""
173 |
174 |
175 | @dataclass
176 | class ContinuousJointInfo(SingleAxisJointInfo, type="continuous"):
177 | pass
178 |
179 |
180 | @dataclass
181 | class RevoluteJointInfo(SingleAxisJointInfo, type="revolute"):
182 | pass
183 |
184 |
185 | @dataclass
186 | class PrismaticJointInfo(SingleAxisJointInfo, type="prismatic"):
187 | pass
188 |
--------------------------------------------------------------------------------
/lisdf/components/gui.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Optional
3 |
4 | from lisdf.components.base import (
5 | Pose,
6 | StringConfigurable,
7 | StringifyContext,
8 | unsupported_stringify,
9 | )
10 | from lisdf.utils.printing import indent_text
11 | from lisdf.utils.typing import Vector3f
12 |
13 |
14 | @dataclass
15 | @unsupported_stringify(disable_urdf=True)
16 | class GUICamera(StringConfigurable):
17 | name: Optional[str]
18 | pose: Pose
19 | projection_type: str = "perspective"
20 |
21 | # TODO(Jiayuan Mao @ 03/26): Add track_visual.
22 |
23 | @classmethod
24 | def from_lookat(
25 | cls,
26 | xyz: Vector3f,
27 | point_to: Vector3f,
28 | name: str = "camera",
29 | projection_type: str = "perspective",
30 | ) -> "GUICamera":
31 | pose = Pose.from_lookat(xyz, point_to)
32 | return cls(name, pose, projection_type)
33 |
34 | def __post_init__(self) -> None:
35 | assert self.projection_type in ["perspective", "orthographic"]
36 |
37 | def _to_sdf(self, ctx: StringifyContext) -> str:
38 | name_str = f' name="{self.name}"' if self.name else ""
39 | return f"""
40 | {self.pose.to_sdf(ctx)}
41 | {self.projection_type}
42 | """
43 |
44 |
45 | @dataclass
46 | @unsupported_stringify(disable_urdf=True)
47 | class GUI(StringConfigurable):
48 | camera: GUICamera
49 |
50 | def _to_sdf(self, ctx: StringifyContext) -> str:
51 | return f"""
52 | {indent_text(self.camera.to_sdf(ctx))}
53 | """
54 |
--------------------------------------------------------------------------------
/lisdf/components/material.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from dataclasses import dataclass, field
3 |
4 | import numpy as np
5 |
6 | from lisdf.components.base import (
7 | StringConfigurable,
8 | StringifyContext,
9 | unsupported_stringify,
10 | )
11 | from lisdf.utils.typing import Vector4f
12 |
13 |
14 | @dataclass(frozen=True)
15 | class Material(StringConfigurable, ABC):
16 | pass
17 |
18 |
19 | @dataclass(frozen=True)
20 | class RGBA(Material):
21 | r: float
22 | g: float
23 | b: float
24 | a: float
25 |
26 | @classmethod
27 | def from_numpy(cls, a: Vector4f) -> "RGBA":
28 | if a.shape == (3,):
29 | return cls(a[0], a[1], a[2], 1)
30 | assert a.shape == (4,)
31 | return cls(a[0], a[1], a[2], a[3])
32 |
33 | def _to_sdf(self, ctx: StringifyContext) -> str:
34 | return f"""
35 | {self.r:.3f} {self.g:.3f} {self.b:.3f} {self.a:.3f}
36 | {self.r:.3f} {self.g:.3f} {self.b:.3f} {self.a:.3f}
37 | """
38 |
39 | def _to_urdf(self, ctx: StringifyContext) -> str:
40 | return f''
41 |
42 |
43 | @dataclass(frozen=True)
44 | class PhongMaterial(Material):
45 | ambient: Vector4f = field(
46 | default_factory=lambda: np.array([1, 1, 1, 1], dtype="float32")
47 | )
48 | diffuse: Vector4f = field(
49 | default_factory=lambda: np.array([1, 1, 1, 1], dtype="float32")
50 | )
51 | specular: Vector4f = field(
52 | default_factory=lambda: np.array([0, 0, 0, 1], dtype="float32")
53 | )
54 | emissive: Vector4f = field(
55 | default_factory=lambda: np.array([0, 0, 0, 1], dtype="float32")
56 | )
57 |
58 | # flake8: noqa: E501
59 | def _to_sdf(self, ctx: StringifyContext) -> str:
60 | return f"""
61 | {self.ambient[0]} {self.ambient[1]} {self.ambient[2]} {self.ambient[3]}
62 | {self.diffuse[0]} {self.diffuse[1]} {self.diffuse[2]} {self.diffuse[3]}
63 | {self.specular[0]} {self.specular[1]} {self.specular[2]} {self.specular[3]}
64 | {self.emissive[0]} {self.emissive[1]} {self.emissive[2]} {self.emissive[3]}
65 | """
66 |
67 | # flake8: noqa: E501
68 | def _to_urdf(self, ctx: StringifyContext) -> str:
69 | ctx.warning(self, "PhongMaterial is not supported in URDF.")
70 | return f''
71 |
72 |
73 | @dataclass(frozen=True)
74 | @unsupported_stringify(disable_sdf=True)
75 | class Texture(Material):
76 | filename: str
77 |
78 | def _to_urdf(self, ctx: StringifyContext) -> str:
79 | return f''
80 |
81 |
82 | @dataclass(frozen=True)
83 | @unsupported_stringify(disable_sdf=True, disable_urdf=True)
84 | class MJCFMaterial(Material):
85 | identifier: str # TODO
86 |
87 | def _to_sdf(self, ctx: StringifyContext) -> str:
88 | ctx.warning(self, "MJCF material is not supported in SDF.")
89 | return ""
90 |
91 | def _to_urdf(self, ctx: StringifyContext) -> str:
92 | ctx.warning(self, "MJCF material is not supported in URDF.")
93 | return ""
94 |
--------------------------------------------------------------------------------
/lisdf/components/model_mjcf.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 | from lisdf.components.base import StringifyContext
4 | from lisdf.components.model import Collision, Model, Visual
5 |
6 |
7 | @dataclass
8 | class MJCFCollision(Collision):
9 | inertial_group: int = 3
10 | contact_type: int = 0
11 | contact_affinity: int = 0
12 | contact_dim: int = 3
13 |
14 | def _to_sdf(self, ctx: StringifyContext) -> str:
15 | ctx.warning(
16 | self,
17 | "Contact and inertial features in MJCFCollision is not supported in SDF.",
18 | )
19 | return super()._to_sdf(ctx)
20 |
21 | def _to_urdf(self, ctx: StringifyContext) -> str:
22 | ctx.warning(
23 | self,
24 | "Contact and inertial features in MJCFCollision is not supported in URDF.",
25 | )
26 | return super()._to_urdf(ctx)
27 |
28 |
29 | @dataclass
30 | class MJCFVisual(Visual):
31 | pass
32 |
33 |
34 | @dataclass
35 | class MJCFModel(Model):
36 | pass
37 |
--------------------------------------------------------------------------------
/lisdf/components/model_sdf.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 | from lisdf.components.base import StringifyContext, unsupported_stringify
4 | from lisdf.components.model import Link, SurfaceContact, SurfaceFriction, Visual
5 |
6 |
7 | @dataclass
8 | @unsupported_stringify(disable_urdf=True)
9 | class SDFSurfaceContact(SurfaceContact):
10 | collide_bitmask: int = 0xFFFF
11 | collide_without_contact: bool = False
12 |
13 | def _to_sdf(self, ctx: StringifyContext) -> str:
14 | return f"""
15 | {self.collide_bitmask}
16 | {self.collide_without_contact}
17 | """
18 |
19 |
20 | @dataclass
21 | @unsupported_stringify(disable_urdf=True)
22 | class SDFSurfaceFriction(SurfaceFriction):
23 | ode_mu: float = 0.0
24 | ode_mu2: float = 0.0
25 |
26 | def _to_sdf(self, ctx: StringifyContext) -> str:
27 | return f"""
28 |
29 | {self.ode_mu}
30 | {self.ode_mu2}
31 |
32 | """
33 |
34 |
35 | @dataclass
36 | class SDFVisual(Visual):
37 | cast_shadows: bool = True
38 |
39 | def _to_sdf(self, ctx: StringifyContext) -> str:
40 | fmt = super()._to_sdf(ctx).split("\n")
41 | fmt.insert(-1, f" {self.cast_shadows}")
42 | return "\n".join(fmt)
43 |
44 | def _to_urdf(self, ctx: StringifyContext) -> str:
45 | if not self.cast_shadows:
46 | ctx.warning(self, "Visual::cast_shadows is not supported in URDF")
47 | return super()._to_urdf(ctx)
48 |
49 |
50 | @dataclass
51 | class SDFLink(Link):
52 | self_collide: bool = True
53 |
54 | def _to_sdf(self, ctx: StringifyContext) -> str:
55 | fmt = super()._to_sdf(ctx).split("\n")
56 | fmt.insert(-1, f" {self.self_collide}")
57 | return "\n".join(fmt)
58 |
59 | def _to_urdf(self, ctx: StringifyContext) -> str:
60 | if not self.self_collide:
61 | ctx.warning(self, "Visual::self_collide is not supported in URDF.")
62 | return super()._to_urdf(ctx)
63 |
--------------------------------------------------------------------------------
/lisdf/components/model_urdf.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 | from lisdf.components.model import Model
4 |
5 |
6 | @dataclass
7 | class URDFModel(Model):
8 | pass
9 |
--------------------------------------------------------------------------------
/lisdf/components/pddl.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from dataclasses import dataclass, field
3 | from typing import Any, ClassVar, Dict, List, Optional, Union
4 |
5 | import numpy as np
6 |
7 | from lisdf.components.base import NAME_SCOPE_SEP, StringifyContext
8 | from lisdf.utils.printing import indent_text
9 |
10 | # Not used for now. In the future, we want to implement more complex expressions.
11 | PDDL_NOT = "not"
12 | PDDL_AND = "and"
13 | PDDL_OR = "or"
14 | PDDL_IMPLIES = "=>"
15 | PDDL_FORALL = "forall"
16 | PDDL_EXISTS = "exists"
17 | PDDL_EQUALS = "="
18 | PDDL_LESS = "<"
19 | PDDL_GREATER = ">"
20 | PDDL_LESS_EQUALS = "<="
21 | PDDL_GREATER_EQUALS = ">="
22 | PDDL_NOT_EQUALS = "!="
23 |
24 | PDDL_SCOPE_SEP = "::"
25 |
26 |
27 | def set_pddl_scope_sep(sep: str) -> None:
28 | global PDDL_SCOPE_SEP
29 | PDDL_SCOPE_SEP = sep
30 |
31 |
32 | class PDDLStringConfigurable(ABC):
33 | DEFAULT_PDDL_STRINGIFY_OPTIONS: ClassVar[Dict[str, Any]] = {
34 | "use_types": True,
35 | }
36 |
37 | def to_pddl(self, ctx: Optional[StringifyContext] = None, **kwargs) -> str:
38 | if ctx is None:
39 | for k, v in PDDLStringConfigurable.DEFAULT_PDDL_STRINGIFY_OPTIONS.items():
40 | kwargs.setdefault(k, v)
41 | ctx = StringifyContext(**kwargs)
42 | return self._to_pddl(ctx)
43 |
44 | def _to_pddl(self, ctx: StringifyContext) -> str:
45 | raise NotImplementedError()
46 |
47 |
48 | @dataclass
49 | class PDDLType(PDDLStringConfigurable):
50 | identifier: str
51 | parent: Optional["PDDLType"] = None
52 | scope: Optional[str] = None
53 |
54 | def __init__(self, identifier: str, parent: Optional["PDDLType"] = None) -> None:
55 | if PDDL_SCOPE_SEP in identifier:
56 | self.scope, self.identifier = identifier.split(PDDL_SCOPE_SEP)
57 | else:
58 | self.identifier = identifier
59 | self.scope = None
60 | self.parent = parent
61 |
62 | @property
63 | def pddl_name(self) -> str:
64 | return (
65 | self.scope + PDDL_SCOPE_SEP + self.identifier
66 | if self.scope
67 | else self.identifier
68 | )
69 |
70 | def _to_pddl(self, ctx: StringifyContext) -> str:
71 | if self.parent is None:
72 | return self.pddl_name
73 | else:
74 | return f"{self.pddl_name} - {self.parent.to_pddl(ctx)}"
75 |
76 |
77 | @dataclass
78 | class PDDLVariable(PDDLStringConfigurable):
79 | name: str
80 | type: Optional[PDDLType] = None
81 |
82 | @property
83 | def pddl_name(self) -> str:
84 | return self.name
85 |
86 | def _to_pddl(self, ctx: StringifyContext) -> str:
87 | if ctx.options["use_types"] and self.type is not None:
88 | return f"({self.name} - {self.type.to_pddl(ctx)})"
89 | else:
90 | return self.name
91 |
92 |
93 | @dataclass
94 | class PDDLPredicate(PDDLStringConfigurable):
95 | name: str
96 | arguments: List[PDDLVariable] = field(default_factory=list)
97 | return_type: PDDLType = PDDLType("bool")
98 | scope_name: Optional[str] = None
99 |
100 | def __init__(
101 | self,
102 | name: str,
103 | arguments: Optional[List[PDDLVariable]] = None,
104 | return_type: Optional[PDDLType] = None,
105 | ):
106 | if PDDL_SCOPE_SEP in name:
107 | self.scope_name, self.name = name.split(PDDL_SCOPE_SEP)
108 | else:
109 | self.name = name
110 | self.scope_name = None
111 |
112 | self.arguments = arguments if arguments is not None else []
113 | self.return_type = return_type if return_type is not None else PDDLType("bool")
114 |
115 | @property
116 | def pddl_name(self) -> str:
117 | return (
118 | self.scope_name + PDDL_SCOPE_SEP + self.name
119 | if self.scope_name
120 | else self.name
121 | )
122 |
123 | def _to_pddl(self, ctx: StringifyContext) -> str:
124 | # TODO (Jiayuan Mao@04/04): warning about non-boolean return type or
125 | # automatically convert to boolean.
126 |
127 | arguments_str = " ".join([v.to_pddl(ctx) for v in self.arguments])
128 | return f"({self.pddl_name} {arguments_str})"
129 |
130 |
131 | @dataclass
132 | class PDDLOperator(PDDLStringConfigurable):
133 | name: str
134 | arguments: List[PDDLVariable] = field(default_factory=list)
135 | preconditions: List[PDDLPredicate] = field(default_factory=list)
136 | add_effects: List[PDDLPredicate] = field(default_factory=list)
137 | del_effects: List[PDDLPredicate] = field(default_factory=list)
138 |
139 | def _to_pddl(self, ctx: StringifyContext) -> str:
140 | raise NotImplementedError()
141 |
142 |
143 | # TODO (Jiayuan Mao@04/04): add support for advanced PDDL operators.
144 | # a.k.a. PDSketch :)
145 |
146 |
147 | @dataclass
148 | class PDDLObject(PDDLStringConfigurable):
149 | name: str
150 | type: Optional[PDDLType] = None
151 | sdf_object: Optional["PDDLSDFObject"] = None
152 |
153 | @property
154 | def pddl_name(self) -> str:
155 | return self.name
156 |
157 | def _to_pddl(self, ctx: StringifyContext) -> str:
158 | if ctx.options["use_types"] and self.type is not None:
159 | return f"{self.name} - {self.type.to_pddl(ctx)}"
160 | else:
161 | return self.name
162 |
163 |
164 | @dataclass(frozen=True)
165 | class PDDLSDFObject(PDDLStringConfigurable):
166 | model_name: str
167 | name: Optional[str]
168 | sdf_type: PDDLType
169 |
170 | @property
171 | def pddl_name(self) -> str:
172 | if self.name is None:
173 | return self.model_name
174 | else:
175 | if NAME_SCOPE_SEP is None:
176 | return self.name
177 | return self.model_name + NAME_SCOPE_SEP + self.name
178 |
179 | def _to_pddl(self, ctx: StringifyContext) -> str:
180 | return self.pddl_name
181 |
182 | def to_pddl_object(self) -> PDDLObject:
183 | return PDDLObject(self.pddl_name, self.sdf_type, sdf_object=self)
184 |
185 |
186 | @dataclass
187 | class PDDLValue(PDDLStringConfigurable, ABC):
188 | pass
189 |
190 |
191 | @dataclass
192 | class PDDLLiteral(PDDLValue):
193 | value: Union[bool, int, float, str]
194 |
195 | def _to_pddl(self, ctx: StringifyContext) -> str:
196 | return str(self.value)
197 |
198 |
199 | @dataclass
200 | class PDDLVectorValue(PDDLValue):
201 | value: np.ndarray
202 |
203 | def _to_pddl(self, ctx: StringifyContext) -> str:
204 | return str(self.value.tolist())
205 |
206 |
207 | @dataclass
208 | class PDDLNamedValue(PDDLStringConfigurable):
209 | name: str
210 | value: PDDLValue
211 |
212 | def _to_pddl(self, ctx: StringifyContext) -> str:
213 | return f"{self.name}={self.value.to_pddl(ctx)}"
214 |
215 |
216 | @dataclass
217 | class PDDLProposition(PDDLStringConfigurable):
218 | predicate: PDDLPredicate
219 | arguments: List[PDDLObject] = field(default_factory=list)
220 |
221 | def _to_pddl(self, ctx: StringifyContext) -> str:
222 | arguments_str = " ".join(
223 | a.pddl_name if isinstance(a, PDDLObject) else a.to_pddl(ctx)
224 | for a in self.arguments
225 | )
226 | return f"({self.predicate.pddl_name} {arguments_str})"
227 |
228 |
229 | @dataclass
230 | class PDDLDomain(PDDLStringConfigurable):
231 | name: str
232 | types: Dict[str, PDDLType] = field(default_factory=dict)
233 | constants: Dict[str, PDDLObject] = field(default_factory=dict)
234 | predicates: Dict[str, PDDLPredicate] = field(default_factory=dict)
235 | operators: Dict[str, PDDLOperator] = field(default_factory=dict)
236 |
237 | def _to_pddl(self, ctx: StringifyContext) -> str:
238 | fmt = f"(define (domain {self.name})\n"
239 | fmt += " (:requirements :strips :typing)\n"
240 | fmt += " (:types\n"
241 | fmt += (
242 | " "
243 | + indent_text("\n".join([t.to_pddl(ctx) for t in self.types.values()]), 2)
244 | + "\n"
245 | )
246 | fmt += " )\n"
247 | fmt += " (:constants\n"
248 | fmt += (
249 | " "
250 | + indent_text(
251 | "\n".join([c.to_pddl(ctx) for c in self.constants.values()]), 2
252 | )
253 | + "\n"
254 | )
255 | fmt += " )\n"
256 | fmt += " (:predicates\n"
257 | fmt += (
258 | " "
259 | + indent_text(
260 | "\n".join([p.to_pddl(ctx) for p in self.predicates.values()]), 2
261 | )
262 | + "\n"
263 | )
264 | fmt += " )\n"
265 | fmt += ")\n"
266 | return fmt
267 |
268 |
269 | @dataclass
270 | class PDDLProblem(PDDLStringConfigurable):
271 | name: str
272 | domain: PDDLDomain
273 | objects: Dict[str, PDDLObject] = field(default_factory=dict)
274 | init: List[PDDLProposition] = field(default_factory=list)
275 | conjunctive_goal: List[PDDLPredicate] = field(default_factory=list)
276 |
277 | def _to_pddl(self, ctx: StringifyContext) -> str:
278 | fmt = f"(define (problem {self.name})\n"
279 | fmt += f" (:domain {self.domain.name})\n" if self.domain is not None else ""
280 | fmt += " (:objects\n"
281 | fmt += (
282 | " "
283 | + indent_text("\n".join([o.to_pddl(ctx) for o in self.objects.values()]), 2)
284 | + "\n"
285 | )
286 | fmt += " )\n"
287 | fmt += " (:init\n"
288 | fmt += (
289 | " "
290 | + indent_text("\n".join([p.to_pddl(ctx) for p in self.init]), 2)
291 | + "\n"
292 | )
293 | fmt += " )\n"
294 | fmt += " (:goal (and\n"
295 | fmt += (
296 | " "
297 | + indent_text("\n".join([p.to_pddl(ctx) for p in self.conjunctive_goal]), 2)
298 | + "\n"
299 | )
300 | fmt += " ))\n"
301 | fmt += ")\n"
302 | return fmt
303 |
304 |
305 | @dataclass
306 | class PDDLFunctionCall(PDDLStringConfigurable):
307 | """This is currently only used as a temporary data structure for
308 | function applications in PDDL. Since we currently only support the most
309 | basic strips functionality, the op_name can only be and or not. But in the
310 | future we might want to support more complex function applications. Then we
311 | should split this class to a base class and multiple derived classes.
312 | """
313 |
314 | op_name: str
315 | arguments: List[Union["PDDLFunctionCall", PDDLVariable, PDDLValue]] = field(
316 | default_factory=list
317 | )
318 |
319 | def _to_pddl(self, ctx: StringifyContext) -> str:
320 | arguments_str = " ".join(a.to_pddl(ctx) for a in self.arguments)
321 | return f"({self.op_name} {arguments_str})"
322 |
--------------------------------------------------------------------------------
/lisdf/components/scene.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | from typing import Dict, List, Optional, Union
3 |
4 | from lisdf.components.base import NAME_SCOPE_SEP, StringConfigurable, StringifyContext
5 | from lisdf.components.gui import GUI
6 | from lisdf.components.model import Joint, Link, Model, SDFInclude, URDFInclude
7 | from lisdf.components.state import WorldState
8 | from lisdf.utils.printing import indent_text
9 |
10 |
11 | @dataclass
12 | class World(StringConfigurable):
13 | name: Optional[str] = None
14 | static: bool = False
15 | models: List[Union[Model, SDFInclude, URDFInclude]] = field(default_factory=list)
16 | states: List[WorldState] = field(default_factory=list)
17 | gui: Optional[GUI] = None
18 |
19 | def _to_sdf(self, ctx: StringifyContext) -> str:
20 | name_str = f' name="{self.name}"' if self.name is not None else ""
21 | fmt = ""
22 | fmt += f"\n"
23 | fmt += f" {self.static}\n"
24 | for model in self.models:
25 | fmt += f" {indent_text(model.to_sdf(ctx))}\n"
26 | for state in self.states:
27 | fmt += f" {indent_text(state.to_sdf(ctx))}\n"
28 | if self.gui is not None:
29 | fmt += f" {indent_text(self.gui.to_sdf(ctx))}\n"
30 | fmt += "\n"
31 | return fmt
32 |
33 | def _to_urdf(self, ctx: StringifyContext) -> str:
34 | assert len(self.models) == 1, "URDF only supports one model in a world."
35 | return self.models[0].to_urdf(ctx)
36 |
37 |
38 | class LISDF(StringConfigurable):
39 | SUPPORTED_VERSIONS = {"1.5", "1.6", "1.7", "1.8", "1.9"}
40 |
41 | def __init__(self, sdf_version: str = "1.9"):
42 | self.sdf_version = ""
43 | self.set_sdf_version(sdf_version)
44 |
45 | self.model: Optional[Model] = None
46 | self.worlds: List[World] = list()
47 |
48 | self.link_dict: Dict[str, Link] = dict()
49 | self.joint_dict: Dict[str, Joint] = dict()
50 | self.model_dict: Dict[str, Model] = dict()
51 |
52 | def set_sdf_version(self, version: str) -> None:
53 | split = version.split(".")
54 |
55 | if len(split) != 2:
56 | raise ValueError("The version attribute should be in the form 'x.y'")
57 |
58 | if split[0] == "" or split[1] == "":
59 | raise ValueError("Empty major or minor number is not allowed")
60 |
61 | if int(split[0]) < 0 or int(split[1]) < 0:
62 | raise ValueError("Version number must be positive")
63 |
64 | if version not in self.SUPPORTED_VERSIONS:
65 | raise ValueError(
66 | "Invalid version; only %s is supported"
67 | % (",".join(self.SUPPORTED_VERSIONS))
68 | )
69 | self.sdf_version = version
70 |
71 | def build_lookup_tables(self):
72 | assert len(self.worlds) <= 1, "Only one world is supported."
73 |
74 | def add_model(model: Model, model_name: Optional[str] = None):
75 | if model_name is None:
76 | model_name = model.name
77 |
78 | assert (
79 | model_name not in self.model_dict
80 | ), f"Model name already exists: {model_name}."
81 | self.model_dict[model_name] = model
82 |
83 | for link in model.links:
84 | link_name = (
85 | model_name + NAME_SCOPE_SEP + link.name
86 | if NAME_SCOPE_SEP is not None
87 | else link.name
88 | )
89 | assert (
90 | link_name not in self.link_dict
91 | ), f"Link name already exists: {link_name}."
92 | self.link_dict[link_name] = link
93 | for joint in model.joints:
94 | joint_name = (
95 | model_name + NAME_SCOPE_SEP + joint.name
96 | if NAME_SCOPE_SEP is not None
97 | else joint.name
98 | )
99 | assert (
100 | joint_name not in self.joint_dict
101 | ), f"Joint name already exists: {joint_name}."
102 | self.joint_dict[joint_name] = joint
103 |
104 | if self.model is not None:
105 | add_model(self.model)
106 | else:
107 | for world in self.worlds:
108 | # NB(Jiayuan Mao):: Currently we only support at most one world
109 | # in a LISDF file. Thus, we don't need to prepend the world name
110 | # to the model name.
111 | for model in world.models:
112 | if isinstance(model, URDFInclude):
113 | add_model(model.content, model.name)
114 | elif isinstance(model, SDFInclude):
115 | if model.content.model is not None:
116 | add_model(model.content.model, model.name)
117 | else:
118 | for model in model.content.worlds[0].models:
119 | add_model(model, model.name)
120 | elif isinstance(model, Model):
121 | add_model(model)
122 | else:
123 | raise TypeError(f"Unsupported model type: {type(model)}.")
124 |
125 | def _to_sdf(self, ctx: StringifyContext) -> str:
126 | fmt = '\n'
127 | fmt += f'\n'
128 | if self.model is not None:
129 | fmt += f" {indent_text(self.model.to_sdf(ctx))}\n"
130 | for world in self.worlds:
131 | fmt += f" {indent_text(world.to_sdf(ctx))}\n"
132 | fmt += "\n"
133 | return fmt
134 |
135 | def _to_urdf(self, ctx: StringifyContext) -> str:
136 | assert (
137 | len(self.worlds) == 0 and self.model is not None
138 | ), "URDF only supports one model in a definition file."
139 | return self.model.to_urdf(ctx)
140 |
--------------------------------------------------------------------------------
/lisdf/components/sensor.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from dataclasses import dataclass
3 | from typing import ClassVar, Dict, Type
4 |
5 | from lisdf.components.base import (
6 | StringConfigurable,
7 | StringifyContext,
8 | unsupported_stringify,
9 | )
10 |
11 |
12 | @dataclass
13 | class Sensor(StringConfigurable, ABC):
14 | name: str
15 |
16 | type: ClassVar[str] = "Sensor"
17 | type_mapping: ClassVar[Dict[str, Type["Sensor"]]] = dict()
18 |
19 | def __init_subclass__(cls, type: str, **kwargs):
20 | super().__init_subclass__(**kwargs)
21 | setattr(cls, "type", type)
22 | Sensor.type_mapping[type] = cls
23 |
24 | @staticmethod
25 | def from_type(type, **kwargs) -> "Sensor":
26 | return Sensor.type_mapping[type](**kwargs)
27 |
28 |
29 | @dataclass
30 | @unsupported_stringify(disable_urdf=True)
31 | class CameraSensor(Sensor, type="camera"):
32 | def _to_sdf(self, ctx: StringifyContext) -> str:
33 | return f''
34 |
--------------------------------------------------------------------------------
/lisdf/components/shape.py:
--------------------------------------------------------------------------------
1 | """
2 | This file defines the basic structures for shapes, including built-in shapes and meshes.
3 |
4 | TODO(Jiayuan Mao @ 03/23): consider object and material mapping?
5 | """
6 |
7 | from abc import ABC
8 | from dataclasses import dataclass
9 | from typing import ClassVar, Dict, Optional, Type
10 |
11 | from lisdf.components.base import StringConfigurable, StringifyContext
12 | from lisdf.utils.typing import Vector3f
13 |
14 |
15 | @dataclass
16 | class ShapeInfo(StringConfigurable, ABC):
17 | type: ClassVar[str] = "ShapeInfo"
18 | type_mapping: ClassVar[Dict[str, Type["ShapeInfo"]]] = dict()
19 |
20 | def __init_subclass__(cls, type: str, **kwargs):
21 | super().__init_subclass__(**kwargs)
22 | setattr(cls, "type", type)
23 | ShapeInfo.type_mapping[type] = cls
24 |
25 | @staticmethod
26 | def from_type(type, **kwargs) -> "ShapeInfo":
27 | return ShapeInfo.type_mapping[type](**kwargs)
28 |
29 |
30 | @dataclass
31 | class BoxShapeInfo(ShapeInfo, type="box"):
32 | size: Vector3f
33 |
34 | def _to_sdf(self, ctx: StringifyContext) -> str:
35 | return f"""
36 | {self.size[0]} {self.size[1]} {self.size[2]}
37 |
38 | """
39 |
40 | def _to_urdf(self, ctx: StringifyContext) -> str:
41 | return f''
42 |
43 |
44 | @dataclass
45 | class SphereShapeInfo(ShapeInfo, type="sphere"):
46 | radius: float
47 |
48 | def _to_sdf(self, ctx: StringifyContext) -> str:
49 | return f"""
50 | {self.radius}
51 |
52 | """
53 |
54 | def _to_urdf(self, ctx: StringifyContext) -> str:
55 | return f''
56 |
57 |
58 | @dataclass
59 | class CylinderShapeInfo(ShapeInfo, type="cylinder"):
60 | radius: float
61 | half_height: float # follows the mujoco standard.
62 |
63 | @property
64 | def length(self) -> float:
65 | return self.half_height * 2
66 |
67 | def _to_sdf(self, ctx: StringifyContext) -> str:
68 | return f"""
69 | {self.radius}
70 | {self.length}
71 |
72 | """
73 |
74 | def _to_urdf(self, ctx: StringifyContext) -> str:
75 | return f''
76 |
77 |
78 | @dataclass
79 | class CapsuleShapeInfo(ShapeInfo, type="capsule"):
80 | radius: float
81 | half_height: float # follows the mujoco standard.
82 |
83 | @property
84 | def length(self) -> float:
85 | return self.half_height * 2
86 |
87 | def _to_sdf(self, ctx: StringifyContext) -> str:
88 | return f"""
89 | {self.radius}
90 | {self.length}
91 |
92 | """
93 |
94 | def _to_urdf(self, ctx: StringifyContext) -> str:
95 | return f''
96 |
97 |
98 | @dataclass
99 | class MeshShapeInfo(ShapeInfo, type="mesh"):
100 | filename: str
101 | size: Vector3f
102 |
103 | def _to_sdf(self, ctx: StringifyContext) -> str:
104 | return f"""
105 | {self.filename}
106 | {self.size[0]} {self.size[1]} {self.size[2]}
107 |
108 | """
109 |
110 | def _to_urdf(self, ctx: StringifyContext) -> str:
111 | return (
112 | ""
116 | )
117 |
118 |
119 | @dataclass
120 | class PlaneShapeInfo(ShapeInfo, type="plane"):
121 | half_width: float # follows the mujoco standard.
122 | half_height: float
123 | normal: Optional[Vector3f] = None
124 |
125 | @property
126 | def width(self) -> float:
127 | return self.half_width * 2
128 |
129 | @property
130 | def height(self) -> float:
131 | return self.half_height * 2
132 |
133 | def _to_sdf(self, ctx: StringifyContext) -> str:
134 | return f"""
135 | {self.width} {self.height}
136 |
137 | """
138 |
139 | def _to_urdf(self, ctx: StringifyContext) -> str:
140 | return f''
141 |
--------------------------------------------------------------------------------
/lisdf/components/srdf.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | from typing import List, Optional
3 |
4 | from lisdf.components.base import (
5 | StringConfigurable,
6 | StringifyContext,
7 | unsupported_stringify,
8 | )
9 | from lisdf.utils.printing import indent_text
10 |
11 |
12 | @dataclass
13 | @unsupported_stringify(disable_sdf=True, disable_urdf=True)
14 | class JointIdentifier(StringConfigurable):
15 | name: str
16 |
17 | def _to_lisdf(self, ctx: StringifyContext) -> str:
18 | return f''
19 |
20 |
21 | @dataclass
22 | @unsupported_stringify(disable_sdf=True, disable_urdf=True)
23 | class LinkIdentifier(StringConfigurable):
24 | name: str
25 |
26 | def _to_lisdf(self, ctx: StringifyContext) -> str:
27 | return f''
28 |
29 |
30 | @dataclass
31 | @unsupported_stringify(disable_sdf=True, disable_urdf=True)
32 | class ChainIdentifier(StringConfigurable):
33 | base_link_name: str
34 | tip_link_name: str
35 |
36 | def _to_lisdf(self, ctx: StringifyContext) -> str:
37 | return f'' # noqa: E501
38 |
39 |
40 | @dataclass
41 | @unsupported_stringify(disable_sdf=True, disable_urdf=True)
42 | class GroupIdentifier(StringConfigurable):
43 | name: str
44 |
45 | def _to_lisdf(self, ctx: StringifyContext) -> str:
46 | return f''
47 |
48 |
49 | @dataclass
50 | @unsupported_stringify(disable_sdf=True, disable_urdf=True)
51 | class Group(StringConfigurable):
52 | name: str
53 | joints: List[JointIdentifier] = field(default_factory=list)
54 | links: List[LinkIdentifier] = field(default_factory=list)
55 | chains: List[ChainIdentifier] = field(default_factory=list)
56 | sub_groups: List[GroupIdentifier] = field(default_factory=list)
57 |
58 | def _to_lisdf(self, ctx: StringifyContext) -> str:
59 | fmt = f'\n'
60 | for joint in self.joints:
61 | fmt += indent_text(joint.to_lisdf(ctx), strip=False)
62 | for link in self.links:
63 | fmt += indent_text(link.to_lisdf(ctx), strip=False)
64 | for chain in self.chains:
65 | fmt += indent_text(chain.to_lisdf(ctx), strip=False)
66 | for sub_group in self.sub_groups:
67 | fmt += indent_text(sub_group.to_lisdf(ctx), strip=False)
68 | fmt += ""
69 | return fmt
70 |
71 |
72 | @dataclass
73 | @unsupported_stringify(disable_sdf=True, disable_urdf=True)
74 | class DisableCollisions(StringConfigurable):
75 | link1_name: str
76 | link2_name: str
77 | reason: Optional[str] = None
78 |
79 | def _to_lisdf(self, ctx: StringifyContext) -> str:
80 | reason_str = f' reason="{self.reason}"' if self.reason else ""
81 | return f'' # noqa: E501
82 |
--------------------------------------------------------------------------------
/lisdf/components/state.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | from typing import List, Optional
3 |
4 | from lisdf.components.base import (
5 | Pose,
6 | StringConfigurable,
7 | StringifyContext,
8 | unsupported_stringify,
9 | )
10 | from lisdf.utils.printing import indent_text
11 |
12 |
13 | @dataclass
14 | @unsupported_stringify(disable_urdf=True)
15 | class JointAxisState(StringConfigurable):
16 | axis: int
17 | value: float
18 |
19 | def _to_sdf(self, ctx: StringifyContext) -> str:
20 | return f'{self.value}'
21 |
22 |
23 | @dataclass
24 | @unsupported_stringify(disable_urdf=True)
25 | class JointState(StringConfigurable):
26 | name: str
27 | axis_states: List[JointAxisState] = field(default_factory=list)
28 |
29 | def _to_sdf(self, ctx: StringifyContext) -> str:
30 | fmt = f'\n'
31 | for axis_state in self.axis_states:
32 | fmt += indent_text(axis_state.to_sdf(ctx)) + "\n"
33 | fmt += ""
34 | return fmt
35 |
36 |
37 | @dataclass
38 | @unsupported_stringify(disable_urdf=True)
39 | class LinkState(StringConfigurable):
40 | name: str
41 | pose: Optional[Pose] = None
42 |
43 | def _to_sdf(self, ctx: StringifyContext) -> str:
44 | fmt = f'\n'
45 | if self.pose is not None:
46 | fmt += indent_text(self.pose.to_sdf(ctx)) + "\n"
47 | fmt += ""
48 | return fmt
49 |
50 |
51 | @dataclass
52 | @unsupported_stringify(disable_urdf=True)
53 | class ModelState(StringConfigurable):
54 | name: str
55 | parent: Optional[str] = None
56 | pose: Optional[Pose] = None
57 | joint_states: List[JointState] = field(default_factory=list)
58 | link_states: List[LinkState] = field(default_factory=list)
59 |
60 | def _to_sdf(self, ctx: StringifyContext) -> str:
61 | fmt = f'\n'
62 | if self.pose is not None:
63 | fmt += indent_text(self.pose.to_sdf(ctx)) + "\n"
64 | for joint_state in self.joint_states:
65 | fmt += indent_text(joint_state.to_sdf(ctx)) + "\n"
66 | for link_state in self.link_states:
67 | fmt += indent_text(link_state.to_sdf(ctx)) + "\n"
68 | fmt += ""
69 | return fmt
70 |
71 |
72 | @dataclass
73 | @unsupported_stringify(disable_urdf=True)
74 | class WorldState(StringConfigurable):
75 | name: str
76 | model_states: List[ModelState] = field(default_factory=list)
77 |
78 | def _to_sdf(self, ctx: StringifyContext) -> str:
79 | fmt = f'\n'
80 | for model_state in self.model_states:
81 | fmt += indent_text(model_state.to_sdf(ctx)) + "\n"
82 | fmt += ""
83 | return fmt
84 |
--------------------------------------------------------------------------------
/lisdf/parsing/__init__.py:
--------------------------------------------------------------------------------
1 | # A large chunk of the code in the `lisdf.parsing` modules and submodules were
2 | # modified from https://github.com/ros/urdf_parser_py to support recursion.
3 |
4 |
5 | from .all import load_all # noqa: F401
6 | from .mjcf import load_mjcf # noqa: F401
7 | from .pddl_j import load_pddl, load_pddl_string # noqa: F401
8 | from .sdf_j import load_sdf, load_sdf_string # noqa: F401
9 | from .urdf_j import load_urdf, load_urdf_string # noqa: F401
10 |
--------------------------------------------------------------------------------
/lisdf/parsing/all.py:
--------------------------------------------------------------------------------
1 | from typing import Tuple
2 |
3 | import lisdf.components as C
4 | from lisdf.parsing.pddl_j import load_pddl
5 | from lisdf.parsing.sdf_j import load_sdf
6 |
7 |
8 | def load_all(
9 | lisdf_file: str, domain_file: str, problem_file: str, *, verbose: bool = False
10 | ) -> Tuple[C.LISDF, C.PDDLDomain, C.PDDLProblem]:
11 | lisdf = load_sdf(lisdf_file, verbose=verbose)
12 | domain, problem = load_pddl(domain_file, problem_file, lisdf=lisdf)
13 | return lisdf, domain, problem
14 |
--------------------------------------------------------------------------------
/lisdf/parsing/lispddl-builtins.pddl:
--------------------------------------------------------------------------------
1 | (define
2 | (domain lispddl-builtins)
3 | (:types
4 | sdf::object
5 | sdf::value
6 | sdf::link - sdf::object
7 | sdf::joint - sdf::object
8 | sdf::group - sdf::object
9 | sdf::model - sdf::object
10 | sdf::link-pose - sdf::value
11 | sdf::joint-pose - sdf::value
12 | sdf::group-pose - sdf::value
13 | sdf::model-pose - sdf::value
14 | )
15 | (:predicates
16 | ; predicates for downstream parsers that do not support typing.
17 | (sdf::is-link ?link)
18 | (sdf::is-joint ?joint)
19 | (sdf::is-group ?group)
20 | (sdf::is-model ?model)
21 | (sdf::is-link-pose ?pose)
22 | (sdf::is-joint-pose ?pose)
23 | (sdf::is-group-pose ?pose)
24 | (sdf::is-model-pose ?pose)
25 |
26 | ; specify poses for link, joint, group, and model.
27 | (sdf::at-link-pose ?link - sdf::link ?pose - sdf::link-pose)
28 | (sdf::at-joint-pose ?joint - sdf::joint ?pose - sdf::joint-pose)
29 | (sdf::at-group-pose ?group - sdf::group ?pose - sdf::group-pose)
30 | (sdf::at-model-pose ?model - sdf::model ?pose - sdf::model-pose)
31 | )
32 | )
33 |
--------------------------------------------------------------------------------
/lisdf/parsing/lispddl-v1.0.grammar:
--------------------------------------------------------------------------------
1 | start: definition | expr
2 |
3 | definition: "(" "define" definition_decl supp_definitions ")"
4 |
5 | definition_decl: "(" definition_type definition_name ")"
6 | ?definition_type: VARNAME
7 | ?definition_name: VARNAME
8 |
9 | supp_definitions: supp_definition* supp_definition
10 | ?supp_definition: domain_definition
11 | | requirements_definition
12 | | types_definition
13 | | constants_definition
14 | | predicates_definition
15 | | objects_definition
16 | | init_definition
17 | | goal_definition
18 |
19 | // domain
20 | domain_definition: "(" ":" "domain" domain_name ")"
21 | domain_name: VARNAME
22 |
23 | // requirements
24 | requirements_definition: "(" ":" "requirements" requirement_name* ")"
25 | requirement_name: ":" VARNAME
26 |
27 | // types
28 | types_definition: "(" ":" "types" type_definition* ")"
29 | type_definition: object_type_name+
30 | | object_type_name+ "-" parent_type_name
31 | parent_type_name: object_type_name
32 |
33 | // constants
34 | constants_definition: "(" ":" "constants" constant_definition* ")"
35 | constant_definition: allconstant
36 |
37 | // predicates
38 | predicates_definition: "(" ":" "predicates" predicate_definition* ")"
39 | predicate_definition: "(" predicate_name allvariable* ")"
40 | ?predicate_name: function_name | method_name
41 |
42 | // objects
43 | objects_definition: "(" ":" "objects" object_definition* ")"
44 | object_definition: allconstant
45 |
46 | // init
47 | init_definition: "(" ":" "init" init_definition_item* ")"
48 | init_definition_item: function_call
49 |
50 | // goal
51 | goal_definition: "(" ":" "goal" function_call ")"
52 |
53 | // variable name
54 | %import common.LETTER
55 | %import common.DIGIT
56 | VARNAME: LETTER ("_"|"-"|LETTER|DIGIT)*
57 | CONSTNAME: (LETTER|"@"|"_") ("_"|"-"|":"|"#"|LETTER|DIGIT)*
58 |
59 | // variables and constants
60 | VARIABLE: "?" VARNAME
61 | variable: VARIABLE
62 | CONSTANT: CONSTNAME
63 | constant: CONSTANT
64 | typedvariable: variable "-" object_type_name
65 | typedconstant: constant "-" object_type_name
66 |
67 | ?allconstant: constant | typedconstant
68 | ?allvariable: variable | typedvariable
69 |
70 | // type name
71 | type_name: CONSTANT
72 | object_type_name: type_name
73 |
74 | // literal types
75 | ?literal: bool | number | string | list
76 | TRUE: "true"
77 | FALSE: "false"
78 | bool: TRUE | FALSE
79 |
80 | // numbers
81 | INT: DIGIT+
82 | SIGNED_INT: ["+"|"-"] INT
83 | // DECIMAL: INT "." INT? | "." INT
84 | DECIMAL: INT "." INT?
85 |
86 | // float = /-?\d+(\.\d+)?([eE][+-]?\d+)?/
87 | _EXP: ("e"|"E") SIGNED_INT
88 | FLOAT: INT _EXP | DECIMAL _EXP?
89 | SIGNED_FLOAT: ["+"|"-"] FLOAT
90 |
91 | NUMBER: FLOAT | INT
92 | SIGNED_NUMBER: ["+"|"-"] NUMBER
93 |
94 | int: INT | SIGNED_INT
95 | float: NUMBER | SIGNED_NUMBER
96 | ?number: int | float
97 |
98 | %import common.ESCAPED_STRING
99 | string: ESCAPED_STRING
100 | list: "[" "]"
101 | | "[" literal ("," literal)* "]"
102 | | "(" literal ("," literal)* ")"
103 |
104 | named_literal: CONSTNAME "=" literal
105 |
106 | // expression
107 | ?expr_function_call: function_call | quantified_function_call
108 | ?expr: variable | constant | literal | named_literal | expr_function_call
109 |
110 | function_call: "(" (function_name|method_name) expr* ")"
111 | function_name: VARNAME
112 | method_name: VARNAME SCOPE_SEP VARNAME
113 | SCOPE_SEP: "::"
114 |
115 | EXISTS: "exists"
116 | FORALL: "forall"
117 | IOTA: "iota"
118 | ?quantifier: EXISTS | FORALL | IOTA
119 | quantified_function_call: "(" quantifier "(" typedvariable ")" expr ")"
120 |
121 | %import common.WS
122 | %ignore WS
123 |
124 | %import common.NEWLINE
125 | COMMENT: ";" /(.)+/ NEWLINE
126 | | "#" /(.)+/ NEWLINE
127 | %ignore COMMENT
128 |
--------------------------------------------------------------------------------
/lisdf/parsing/parse_sdf.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Union
3 |
4 | from lisdf.parsing.sdf import SDF, Collision, Link, Mesh, Visual
5 | from lisdf.parsing.urdf import Robot as URDF
6 |
7 |
8 | def _handle_component(component, model_path: str) -> None:
9 | """
10 | Handle component and inject URI into link component geometry
11 | """
12 | if isinstance(component, Link):
13 | for link_component in component.aggregate_order:
14 | # TODO: are there other types we need to consider?
15 | if isinstance(link_component, (Collision, Visual)) and isinstance(
16 | link_component.geometry, Mesh
17 | ):
18 | link_component.geometry.uri = os.path.join(
19 | model_path, link_component.geometry.uri
20 | )
21 |
22 |
23 | def inject_absolute_path(sdf: SDF, model_path: str) -> SDF:
24 | """
25 | This function replace relative paths to object and material
26 | files with absolute paths so the sdf object is self-contained.
27 | """
28 | for world in sdf.aggregate_order:
29 | for model in world.models:
30 | for component in model.aggregate_order:
31 | _handle_component(component, model_path)
32 | return sdf
33 |
34 |
35 | def load_urdf(model_name: str, models_dir: str) -> URDF:
36 | urdf_path = os.path.join(models_dir, model_name)
37 | with open(urdf_path) as f:
38 | xml_str = f.read()
39 | urdf = URDF.from_xml_string(xml_str)
40 | return urdf
41 |
42 |
43 | def load_sdf(model_name: str, models_dir: str) -> SDF:
44 | sdf_path = os.path.join(models_dir, model_name)
45 | with open(sdf_path) as f:
46 | xml_str = f.read()
47 | sdf = SDF.from_xml_string(xml_str)
48 | for world in sdf.aggregate_order:
49 | # Load all the includes in the world
50 | for include in world.includes:
51 | include_model: Union[SDF, URDF]
52 | if include.uri.endswith("urdf"):
53 | include_model = load_urdf(include.uri, models_dir=models_dir)
54 | else:
55 | include_model = load_sdf(include.model_name, models_dir=models_dir)
56 |
57 | world.models.append(include_model)
58 | model_path = os.path.join(models_dir, os.path.dirname(model_name))
59 | return inject_absolute_path(sdf, model_path)
60 |
61 |
62 | if __name__ == "__main__": # pragma: no cover
63 | import lisdf_models
64 |
65 | models_dir = os.path.dirname(lisdf_models.__file__)
66 | sdf_test = "mud_test/model.sdf"
67 | sdf_results = load_sdf(sdf_test, models_dir=models_dir)
68 | # print(sdf_results)
69 |
--------------------------------------------------------------------------------
/lisdf/parsing/pddl_j.py:
--------------------------------------------------------------------------------
1 | import os.path as osp
2 | from typing import Optional, Tuple
3 |
4 | import numpy as np
5 | from lark import Lark, Transformer, Tree, v_args
6 |
7 | import lisdf.components as C
8 | from lisdf.components.base import NAME_SCOPE_SEP
9 |
10 | # lark.v_args
11 | inline_args = v_args(inline=True)
12 | DEBUG_LOG_COMPOSE = False
13 |
14 |
15 | class LISPDDLParser(object):
16 | grammar_file = osp.join(osp.dirname(__file__), "lispddl-v1.0.grammar")
17 | builtins_file = osp.join(osp.dirname(__file__), "lispddl-builtins.pddl")
18 |
19 | def __init__(self):
20 | with open(type(self).grammar_file) as f:
21 | self.lark = Lark(f, propagate_positions=True)
22 |
23 | def load(self, file):
24 | with open(file) as f:
25 | return self.lark.parse(f.read())
26 |
27 | def loads(self, string):
28 | return self.lark.parse(string)
29 |
30 | def transform(
31 | self, lisdf: C.LISDF, domain_tree: Tree, problem_tree
32 | ) -> Tuple[C.PDDLDomain, C.PDDLProblem]:
33 | domain_tree = domain_tree.children[0]
34 | problem_tree = problem_tree.children[0]
35 |
36 | domain = C.PDDLDomain("")
37 | transformer = PDDLVisitor(lisdf, domain, C.PDDLProblem("", domain))
38 | transformer.set_mode("extend")
39 | builtins = self.load(type(self).builtins_file).children[0]
40 | transformer.transform(builtins)
41 | transformer.set_mode("domain")
42 | transformer.transform(domain_tree)
43 | transformer.set_mode("problem")
44 | transformer.transform(problem_tree)
45 | return transformer.domain, transformer.problem
46 |
47 |
48 | class PDDLVisitor(Transformer):
49 | def __init__(
50 | self, lisdf: Optional[C.LISDF], domain: C.PDDLDomain, problem: C.PDDLProblem
51 | ) -> None:
52 | super().__init__()
53 |
54 | self.lisdf = lisdf
55 | self.domain = domain
56 | self.problem = problem
57 | self.mode = "domain"
58 |
59 | def set_mode(self, mode: str) -> None:
60 | assert mode in ("domain", "problem", "extend")
61 | self.mode = mode
62 |
63 | @inline_args
64 | def definition_decl(self, definition_type, definition_name):
65 | if self.mode == "domain":
66 | assert definition_type.value == "domain"
67 | self.domain.name = definition_name.value
68 | elif self.mode == "problem":
69 | assert definition_type.value == "problem"
70 | self.problem.name = definition_name.value
71 | elif self.mode == "extend":
72 | assert definition_type.value == "domain"
73 |
74 | def type_definition(self, args):
75 | # Very ugly hack to handle multi-line definition in PDDL.
76 | # In PDDL, type definition can be separated by newline.
77 | # This kinds of breaks the parsing strategy that ignores all whitespaces.
78 | # More specifically, consider the following two definitions:
79 | # ```
80 | # (:types
81 | # a
82 | # b - a
83 | # )
84 | # ```
85 | # and
86 | # ```
87 | # (:types
88 | # a b - a
89 | # )
90 | if args[-1].data == "parent_type_name":
91 | parent_line, parent_name = args[-1].children[0].children[0]
92 | args = args[:-1]
93 | else:
94 | parent_line, parent_name = -1, None
95 | for arg in args:
96 | arg_line, arg_name = arg.children[0]
97 | if arg_line == parent_line:
98 | parent_type = self.domain.types[parent_name]
99 | self.domain.types[arg_name] = C.PDDLType(arg_name, parent_type)
100 | else:
101 | self.domain.types[arg_name] = C.PDDLType(arg_name, None)
102 |
103 | @inline_args
104 | def constant_definition(self, constant):
105 | self.domain.constants[constant.name] = constant
106 |
107 | @inline_args
108 | def predicate_definition(self, name, *args):
109 | self.domain.predicates[name] = C.PDDLPredicate(name, args)
110 |
111 | @inline_args
112 | def object_definition(self, constant):
113 | self.problem.objects[constant.name] = constant
114 |
115 | @inline_args
116 | def init_definition_item(self, proposition):
117 | assert isinstance(proposition, C.PDDLProposition)
118 | self.problem.init.append(proposition)
119 | for arg in proposition.arguments:
120 | if isinstance(arg, C.PDDLObject):
121 | if (
122 | arg.name not in self.problem.objects
123 | and arg.name not in self.domain.constants
124 | and arg.sdf_object is None
125 | ):
126 | raise NameError(
127 | "Unknown object: {} in {}.".format(
128 | arg.name, proposition.to_pddl()
129 | )
130 | )
131 |
132 | @inline_args
133 | def goal_definition(self, goal):
134 | assert goal.op_name == "and"
135 | for g in goal.arguments:
136 | self.problem.conjunctive_goal.append(g)
137 |
138 | @inline_args
139 | def variable(self, name):
140 | return C.PDDLVariable(name.value)
141 |
142 | @inline_args
143 | def typedvariable(self, name, typename):
144 | # name is of type `PDDLVariable`.
145 | return C.PDDLVariable(name.name, self.domain.types[typename.children[0][1]])
146 |
147 | @inline_args
148 | def constant(self, name):
149 | name = name.value
150 | sdf_object = None
151 | if self.lisdf is not None:
152 | if name in self.lisdf.model_dict:
153 | sdf_object = C.PDDLSDFObject(
154 | name, None, self.domain.types["sdf::model"]
155 | )
156 | elif name in self.lisdf.link_dict:
157 | model_name, lname = "", name if NAME_SCOPE_SEP is None else name.split(
158 | NAME_SCOPE_SEP
159 | )
160 | sdf_object = C.PDDLSDFObject(
161 | model_name, lname, self.domain.types["sdf::link"]
162 | )
163 | elif name in self.lisdf.joint_dict:
164 | model_name, jname = "", name if NAME_SCOPE_SEP is None else name.split(
165 | NAME_SCOPE_SEP
166 | )
167 | sdf_object = C.PDDLSDFObject(
168 | model_name, jname, self.domain.types["sdf::joint"]
169 | )
170 | else:
171 | # The name refers to a standard PDDL object, not linked
172 | # to any SDF object.
173 | pass
174 |
175 | type = None
176 | if sdf_object is not None:
177 | type = sdf_object.sdf_type
178 | if name in self.problem.objects:
179 | type = self.problem.objects[name].type
180 |
181 | return C.PDDLObject(name, type, sdf_object=sdf_object)
182 |
183 | @inline_args
184 | def typedconstant(self, name, typename):
185 | # name is of type `PDDLObject`.
186 | return C.PDDLObject(
187 | name.name,
188 | self.domain.types[typename.children[0][1]],
189 | sdf_object=name.sdf_object,
190 | )
191 |
192 | @inline_args
193 | def int(self, v):
194 | return C.PDDLLiteral(int(v.value))
195 |
196 | @inline_args
197 | def float(self, v):
198 | return C.PDDLLiteral(float(v.value))
199 |
200 | @inline_args
201 | def string(self, v):
202 | return C.PDDLLiteral(v.value[1:-1])
203 |
204 | @inline_args
205 | def list(self, *args):
206 | return C.PDDLVectorValue(np.array([arg.value for arg in args]))
207 |
208 | @inline_args
209 | def named_literal(self, name, value):
210 | return C.PDDLNamedValue(name.value, value)
211 |
212 | @inline_args
213 | def type_name(self, name):
214 | # propagate the "lineno" of the type definition up.
215 | return name.line, name.value
216 |
217 | @inline_args
218 | def function_name(self, name):
219 | return name.value
220 |
221 | @inline_args
222 | def method_name(self, feature_name, _, method_name):
223 | return feature_name.value + "::" + method_name.value
224 |
225 | @inline_args
226 | def function_call(self, name, *args):
227 | if name == "and":
228 | return C.PDDLFunctionCall(C.PDDL_AND, args)
229 | elif name == "or":
230 | return C.PDDLFunctionCall(C.PDDL_OR, args)
231 | elif name == "not":
232 | return C.PDDLFunctionCall(C.PDDL_NOT, args)
233 | elif name == "exists":
234 | return C.PDDLFunctionCall(C.PDDL_EXISTS, args)
235 | elif name == "forall":
236 | return C.PDDLFunctionCall(C.PDDL_FORALL, args)
237 | else:
238 | assert name in self.domain.predicates, 'Unknown predicate "{}"'.format(name)
239 | return C.PDDLProposition(self.domain.predicates[name], args)
240 |
241 |
242 | default_pddl_parser = LISPDDLParser()
243 |
244 |
245 | def load_pddl(
246 | domain_file: str, problem_file: str, lisdf: Optional[C.LISDF] = None
247 | ) -> Tuple[C.PDDLDomain, C.PDDLProblem]:
248 | if lisdf is None:
249 | lisdf = C.LISDF()
250 |
251 | return default_pddl_parser.transform(
252 | lisdf,
253 | default_pddl_parser.load(domain_file),
254 | default_pddl_parser.load(problem_file),
255 | )
256 |
257 |
258 | def load_pddl_string(
259 | domain_string: str, problem_string: str, lisdf: Optional[C.LISDF] = None
260 | ) -> Tuple[C.PDDLDomain, C.PDDLProblem]:
261 | if lisdf is None:
262 | lisdf = C.LISDF()
263 |
264 | return default_pddl_parser.transform(
265 | lisdf,
266 | default_pddl_parser.loads(domain_string),
267 | default_pddl_parser.loads(problem_string),
268 | )
269 |
--------------------------------------------------------------------------------
/lisdf/parsing/srdf.py:
--------------------------------------------------------------------------------
1 | import lisdf.components as C
2 |
3 | from .xml_j.visitor import XMLVisitorInterface, check_done_decorator
4 | from .xml_j.xml import XMLNode
5 |
6 |
7 | class SRDFParserMixin(XMLVisitorInterface):
8 | """
9 | A configurable parameter for controlling the syntax of the SRDF file.
10 |
11 | When USE_ATTRIBUTES is true, arguments are specified as attributes of the
12 | XML node. For example,
13 |
14 |
15 | When USE_ATTRIBUTES is false, arguments are specified as child nodes of the
16 | XML node. For example,
17 |
18 |
19 |
20 |
21 | """
22 |
23 | USE_ATTRIBUTES = True
24 |
25 | def group_init(self, node: XMLNode):
26 | if self.get_scope() != "group": # not already in a group.
27 | self.enter_scope("group")
28 | node.attributes["is_group_def"] = True # store a temporary attribute.
29 |
30 | @check_done_decorator
31 | def group(self, node: XMLNode):
32 | is_group_def = node.attributes.pop("is_group_def", False)
33 |
34 | if is_group_def:
35 | self.exit_scope("group")
36 | group = C.Group(node.attributes.pop("name"))
37 | for c in node.pop_all_children():
38 | if c.tag == "group":
39 | group.sub_groups.append(c.data)
40 | elif c.tag == "link":
41 | group.links.append(c.data)
42 | elif c.tag == "joint":
43 | group.joints.append(c.data)
44 | elif c.tag == "chain":
45 | group.joints.append(c.data)
46 | else:
47 | raise ValueError(f"Unknown tag {c.tag}.")
48 | return node.set_data(group)
49 |
50 | return node.set_data(C.GroupIdentifier(node.attributes.pop("name")))
51 |
52 | @check_done_decorator
53 | def group_link(self, node: XMLNode):
54 | return node.set_data(C.LinkIdentifier(node.attributes.pop("name")))
55 |
56 | @check_done_decorator
57 | def group_joint(self, node: XMLNode):
58 | return node.set_data(C.JointIdentifier(node.attributes.pop("name")))
59 |
60 | @check_done_decorator
61 | def group_chain(self, node: XMLNode):
62 | return node.set_data(
63 | C.ChainIdentifier(
64 | node.attributes.pop("base_link"), node.attributes.pop("tip_link")
65 | )
66 | )
67 |
68 | @check_done_decorator
69 | def disable_collisions(self, node: XMLNode):
70 | if not self.USE_ATTRIBUTES:
71 | return node.set_data(
72 | C.DisableCollisions(
73 | node.pop("link1", return_type="node", required=True).attributes.pop(
74 | "name"
75 | ),
76 | node.pop("link2", return_type="node", required=True).attributes.pop(
77 | "name"
78 | ),
79 | node.pop("reason", return_type="text", default=None),
80 | )
81 | )
82 | else:
83 | return node.set_data(
84 | C.DisableCollisions(
85 | node.attributes.pop("link1"),
86 | node.attributes.pop("link2"),
87 | node.attributes.pop("reason", None),
88 | )
89 | )
90 |
--------------------------------------------------------------------------------
/lisdf/parsing/string_utils.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, Tuple
2 |
3 | import numpy as np
4 |
5 | from lisdf.utils.transformations import quaternion_from_euler as _quaternion_from_euler
6 | from lisdf.utils.typing import Vector2f, Vector3f, Vector4f, Vector6f
7 |
8 |
9 | def safe_float(string: Optional[str]) -> Optional[float]:
10 | if string is None:
11 | return None
12 | return float(string)
13 |
14 |
15 | def vector2f(string: str) -> Vector2f:
16 | rv = np.fromstring(string, sep=" ", dtype="float32")
17 | assert rv.shape == (2,)
18 | return rv
19 |
20 |
21 | def vector3f(string: str) -> Vector3f:
22 | rv = np.fromstring(string, sep=" ", dtype="float32")
23 | if rv.shape == (1,):
24 | return np.repeat(rv, 3)
25 | assert rv.shape == (3,)
26 | return rv
27 |
28 |
29 | def vector3f_or_float(string: str) -> Tuple[Optional[float], Vector3f]:
30 | if string.count(" ") == 0:
31 | return float(string), vector3f(string)
32 | return None, vector3f(string)
33 |
34 |
35 | def wxyz_from_euler(euler: str) -> Vector4f:
36 | eulerf = vector3f(euler)
37 | quat = _quaternion_from_euler(*eulerf) # type: ignore
38 | return np.array([quat[3], quat[0], quat[1], quat[2]], dtype="float32")
39 |
40 |
41 | def vector4f(string: str) -> Vector4f:
42 | rv = np.fromstring(string, sep=" ", dtype="float32")
43 | assert rv.shape == (4,)
44 | return rv
45 |
46 |
47 | def vector6f(string: str) -> Vector6f:
48 | rv = np.fromstring(string, sep=" ", dtype="float32")
49 | assert rv.shape == (6,)
50 | return rv
51 |
52 |
53 | def bool_string(string: str) -> bool:
54 | string = string.lower()
55 | assert string in ("true", "false", "0", "1")
56 | return string == "true" or string == "1"
57 |
--------------------------------------------------------------------------------
/lisdf/parsing/xml_j/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/lisdf/parsing/xml_j/__init__.py
--------------------------------------------------------------------------------
/lisdf/parsing/xml_j/visitor.py:
--------------------------------------------------------------------------------
1 | import functools
2 | import os.path as osp
3 | from abc import ABC, abstractmethod
4 | from collections import defaultdict
5 | from typing import Any, Callable, Dict, List, Optional
6 |
7 | from lisdf.parsing.xml_j.xml import XMLNode, load_file, load_string
8 | from lisdf.utils.printing import indent_text
9 |
10 |
11 | class XMLVisitorInterface(ABC):
12 | @abstractmethod
13 | def get_scope(self) -> Optional[str]:
14 | raise NotImplementedError()
15 |
16 | @abstractmethod
17 | def enter_scope(self, scope: str) -> None:
18 | raise NotImplementedError()
19 |
20 | @abstractmethod
21 | def exit_scope(self, scope: str) -> None:
22 | raise NotImplementedError()
23 |
24 |
25 | class XMLVisitor(XMLVisitorInterface):
26 | """
27 | A Top-Down Visitor for the XML tree.
28 |
29 | The basic idea is to have a list of callbacks for each XML node type.
30 | There are a few stacks that keep track of path from the root to the
31 | current node.
32 |
33 |
34 | - The filename stack keeps track of the current file being processed.
35 | For example, when you handle an tag, you may call the
36 | load_file method to load the included file. In this case, the filename
37 | will be pushed into the stack.
38 | - The tag stack keeps track of the current tag being processed.
39 | - The scope stack is a special stack maintained by the programmer.
40 | It is used to decide which function to be called given the tag.
41 | Specifically, when the scope stack is empty, upon seeing a new node with
42 | tag , the visitor will call the function named model(node).
43 | If the scope stack is empty, say, the top element is "state". Upon seeing
44 | a new node with tag , the visitor will call the function named
45 | state_model(node).
46 | The programmer can use enter_scope and exit_scope to push and pop.
47 |
48 | See the docstring for the visit method for detailed explanation.
49 | """
50 |
51 | def __init__(self) -> None:
52 | self.filename_stack: List[str] = list()
53 | self.node_stack: List[XMLNode] = list()
54 | self.scope_stack: List[str] = list()
55 |
56 | # The SDF parser doesn't rely on additional stacks.
57 | # These are primarily for the MJCF parser.
58 |
59 | # optional stacks.
60 | self._st: Dict[str, List[Any]] = defaultdict(list)
61 | # optional data.
62 | self._data: Dict[str, Dict[str, Any]] = defaultdict(dict)
63 | self._indent: int = 0
64 | self._verbose = False
65 |
66 | def set_verbose(self, flag: bool = True) -> None:
67 | self._verbose = flag
68 |
69 | def _get_processor(self, tag: str) -> Optional[Callable[[XMLNode], Any]]:
70 | if len(self.scope_stack) == 0:
71 | tag = tag.replace("-", "_")
72 | return getattr(self, tag, None)
73 | funcname = self.scope_stack[-1] + "_" + tag.replace("-", "_")
74 | if hasattr(self, funcname):
75 | return getattr(self, funcname)
76 | funcname = tag.replace("-", "_")
77 | return getattr(self, funcname, None)
78 |
79 | def get_scope(self) -> Optional[str]:
80 | return self.scope_stack[-1] if len(self.scope_stack) > 0 else None
81 |
82 | def enter_scope(self, scope: str) -> None:
83 | self.scope_stack.append(scope)
84 |
85 | def exit_scope(self, scope: str) -> None:
86 | el = self.scope_stack.pop()
87 | if el != scope:
88 | raise NameError(
89 | 'Exiting scope "{}" but current scope is "{}".'.format(scope, el)
90 | )
91 |
92 | def load_file(self, filename: str) -> Any:
93 | node = load_file(filename)
94 | return self.visit(filename, node)
95 |
96 | def load_string(self, string: str) -> Any:
97 | node = load_string(string)
98 | return self.visit("string_file", node)
99 |
100 | def _resolve_path(self, path: str) -> str:
101 | return osp.normpath(osp.join(osp.dirname(self.filename_stack[-1]), path))
102 |
103 | def visit(self, filename: str, root: XMLNode) -> Any:
104 | """
105 | The visit function will traverse the XML tree and call the functions
106 | based on the tag. Specifically, when entering a new node with tag ,
107 | the visitor will call the function named model_init(node).
108 | Then, the visitor will recursively call the visit function for each
109 | child, and finally call the function named model(node).
110 |
111 | The strategy that I prefer is to use the check_done helper function.
112 | Specifically, when processing a node, one should always use
113 | node.pop() for selecting a child node, and node.attributes.pop() for
114 | selecting an attribute. After done processing the node, one should
115 | call check_done(node). If the node is not empty, the helper function
116 | will raise an Error.
117 |
118 | Args:
119 | filename: the filename of the corresponding XML file.
120 | root: the root node of the XML tree.
121 |
122 | Returns:
123 | The return value of user's function applied to the root node.
124 | """
125 | self.filename_stack.append(filename)
126 | if self._verbose:
127 | print(indent_text(filename, self._indent))
128 |
129 | def _inner(node: XMLNode):
130 | if self._verbose:
131 | print(indent_text(node.open_tag(), self._indent))
132 | if node.text:
133 | print(indent_text(node.text, self._indent + 1))
134 | self._indent += 1
135 | try:
136 | proc = self._get_processor(node.tag + "_init")
137 | if proc is not None:
138 | rv = proc(node)
139 | if rv == "skip":
140 | return None
141 |
142 | self.node_stack.append(node)
143 | for i, c in enumerate(node.children):
144 | node.children[i] = _inner(c)
145 | node.children = [c for c in node.children if c is not None]
146 | self.node_stack.pop()
147 | proc = self._get_processor(node.tag)
148 |
149 | if proc is not None:
150 | if self._verbose:
151 | print(indent_text(node.close_tag(), self._indent - 1))
152 | return proc(node)
153 | if self._verbose:
154 | print(indent_text(node.close_tag(), self._indent - 1))
155 | return node
156 | finally:
157 | self._indent -= 1
158 |
159 | # NB(Jiayuan Mao @ 03/25): Essentially, `defer self.filename_stack.pop()`.
160 | try:
161 | root = root.clone()
162 | return _inner(root)
163 | finally:
164 | self.filename_stack.pop()
165 |
166 |
167 | def check_done(node: XMLNode, attr: bool = True, children: bool = True) -> None:
168 | """A helper function to check whether all attributes and children of a
169 | node have been processed."""
170 | if attr:
171 | if len(node.attributes) != 0:
172 | print("Unprocessed attributes.")
173 | print(node)
174 | print("-" * 120)
175 | raise ValueError()
176 | if children:
177 | if len(node.children) != 0:
178 | print("Unprocessed children.")
179 | print(node)
180 | print("-" * 120)
181 | raise ValueError()
182 | return None
183 |
184 |
185 | def check_done_decorator(func, attr=True, children=True):
186 | @functools.wraps(func)
187 | def wrapped(*args, **kwargs):
188 | rv = func(*args, **kwargs)
189 | if rv is not None:
190 | check_done(rv, attr, children)
191 | return rv
192 |
193 | return wrapped
194 |
--------------------------------------------------------------------------------
/lisdf/parsing/xml_j/xml.py:
--------------------------------------------------------------------------------
1 | import re
2 | import xml.etree.ElementTree as et
3 | from typing import Any, List, Optional
4 |
5 | from lisdf.utils.printing import indent_text
6 |
7 |
8 | class _G(dict):
9 | def __getattr__(self, attr):
10 | return self[attr]
11 |
12 |
13 | class XMLNode(object):
14 | tag: str
15 | attributes: _G
16 | children: List["XMLNode"]
17 | parent: Optional["XMLNode"] = None
18 | text: Optional[str]
19 | data: Any
20 |
21 | def __init__(self, tag: str):
22 | self.tag = tag
23 | self.attributes = _G()
24 | self.children = list()
25 | self.parent = None
26 | self.text = ""
27 | self.data = None
28 |
29 | def add(self, node: "XMLNode"):
30 | self.children.append(node)
31 | node.set_parent(self)
32 |
33 | def set_text(self, text: str):
34 | self.text = text.strip()
35 |
36 | def set_parent(self, parent: "XMLNode"):
37 | self.parent = parent
38 |
39 | def set_data(self, data: Any):
40 | self.data = data
41 | return self
42 |
43 | def pop(self, tag: str, required=False, return_type="text", default=None) -> Any:
44 | """Pop a child node from the children list based on the tag name.
45 | This function also checks the uniqueness of such child node."""
46 | assert return_type in ("node", "text", "data")
47 |
48 | rv = list()
49 | for i, c in enumerate(self.children):
50 | if c.tag == tag:
51 | rv.append(i)
52 | assert len(rv) in (0, 1)
53 | if len(rv) == 0:
54 | assert not required
55 | return default
56 | else:
57 | obj = self.children[rv[0]]
58 | self.children = self.children[: rv[0]] + self.children[rv[0] + 1 :]
59 | if return_type == "node":
60 | return obj
61 | elif return_type == "text":
62 | return obj.text
63 | elif return_type == "data":
64 | return obj.data
65 | else:
66 | raise ValueError("Unknown return type: {}.".format(return_type))
67 |
68 | def pop_all_children(self) -> List["XMLNode"]:
69 | try:
70 | return self.children
71 | finally:
72 | self.children = list()
73 |
74 | def clone(self) -> "XMLNode":
75 | node = XMLNode(self.tag)
76 | node.attributes.update(self.attributes.copy())
77 | node.children = [c.clone() for c in self.children]
78 | for c in node.children:
79 | c.parent = self
80 | node.text = self.text
81 | return node
82 |
83 | def open_tag(self) -> str:
84 | if len(self.attributes):
85 | attributes = " " + (
86 | " ".join([f'{key}="{value}"' for key, value in self.attributes.items()])
87 | )
88 | else:
89 | attributes = ""
90 | return f"<{self.tag}{attributes}>"
91 |
92 | def close_tag(self) -> str:
93 | return f"{self.tag}>"
94 |
95 | def __str__(self):
96 | fmt = ""
97 | if len(self.attributes):
98 | attributes = " " + (
99 | " ".join([f'{key}="{value}"' for key, value in self.attributes.items()])
100 | )
101 | else:
102 | attributes = ""
103 | fmt += f"<{self.tag}{attributes}"
104 | if self.text or len(self.children):
105 | fmt += ">\n"
106 | if self.text:
107 | fmt += indent_text(self.text) + "\n"
108 | for c in self.children:
109 | fmt += indent_text(str(c)).rstrip() + "\n"
110 | fmt += f"{self.tag}>\n"
111 | else:
112 | fmt += " />\n"
113 | return fmt
114 |
115 | __repr__ = __str__
116 |
117 |
118 | def _xml2obj(element) -> XMLNode:
119 | if element.tag is None and len(element.attrib) == 0 and len(element) == 0:
120 | return element.text
121 |
122 | node = XMLNode(element.tag)
123 | node.attributes.update(element.attrib)
124 | if element.text is not None:
125 | node.set_text(element.text)
126 | for c in element:
127 | c = _xml2obj(c)
128 | if isinstance(c, str):
129 | node.set_text(c)
130 | else:
131 | node.add(c)
132 | return node
133 |
134 |
135 | def load_string(value: str) -> XMLNode:
136 | # TODO:: temporary fix for some drake files.
137 | value = re.sub(' xmlns="[^"]+"', "", value, count=1)
138 | return _xml2obj(et.fromstring(value))
139 |
140 |
141 | def load_file(filename: str) -> XMLNode:
142 | with open(filename) as f:
143 | return load_string(f.read())
144 |
--------------------------------------------------------------------------------
/lisdf/parsing/xml_reflection/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import * # noqa: F401,F403
2 |
--------------------------------------------------------------------------------
/lisdf/parsing/xml_reflection/basics.py:
--------------------------------------------------------------------------------
1 | import collections
2 | from xml.dom import minidom
3 | from xml.etree import ElementTree as ET
4 |
5 | import yaml
6 |
7 |
8 | def xml_string(rootXml, addHeader=True):
9 | # From: https://stackoverflow.com/a/1206856/170413
10 | # TODO(eacousineau): This does not preserve attribute order. Fix it.
11 | dom = minidom.parseString(ET.tostring(rootXml))
12 | lines = dom.toprettyxml(indent=" ").split("\n")
13 | if lines and lines[0].startswith(" float:
26 | raise NotImplementedError
27 |
28 | @property
29 | def end_time(self) -> float:
30 | return self.start_time + self.duration
31 |
32 | def finished(self, current_time: float) -> bool:
33 | """Whether this executor has finished executing"""
34 | return current_time >= self.end_time
35 |
36 | @abstractmethod
37 | def execute(self, current_time: float) -> None:
38 | """
39 | Execute the command at the given time to update the robot configurations
40 | """
41 | raise NotImplementedError
42 |
--------------------------------------------------------------------------------
/lisdf/plan_executor/gripper_executor.py:
--------------------------------------------------------------------------------
1 | from typing import ClassVar
2 |
3 | from lisdf.plan_executor.executor import CommandExecutor
4 | from lisdf.plan_executor.robots.common import RobotWithGripper
5 | from lisdf.planner_output.command import ActuateGripper, GripperPosition
6 |
7 |
8 | class ActuateGripperExecutor(CommandExecutor):
9 | robot: RobotWithGripper
10 | command: ActuateGripper
11 |
12 | DEFAULT_DURATION: ClassVar[float] = 1.0
13 |
14 | @property
15 | def duration(self) -> float:
16 | # Gripper command takes 1 second to execute in our simulated world for now
17 | return self.DEFAULT_DURATION
18 |
19 | def execute(self, current_time: float) -> None:
20 | # TODO: check gripper name matches link name?
21 | gripper_positions = set(self.command.configurations.values())
22 | if len(gripper_positions) > 1:
23 | raise ValueError("We only support one gripper at the moment")
24 |
25 | gripper_position: GripperPosition = next(iter(gripper_positions))
26 | gripper_configuration = self.robot.gripper_configuration_for_position(
27 | gripper_position
28 | )
29 |
30 | # Update robot configuration
31 | self.robot.set_gripper_configuration(gripper_configuration)
32 |
--------------------------------------------------------------------------------
/lisdf/plan_executor/interpolator.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 | import numpy as np
4 |
5 |
6 | class PathInterpolator(ABC):
7 | """Used so we can abstract simulator's interpolator out of the executor."""
8 |
9 | def __init__(self, t_all: np.ndarray, confs: np.ndarray):
10 | """
11 | Let n be the number of configurations and q be the number of joints
12 | in the configuration.
13 |
14 | Parameters
15 | ----------
16 | t_all: 1-d array of shape (n, ) with the times for each configuration point.
17 | This array must be sorted.
18 | confs: 2-d array of shape (n, q) where each row represents a robot conf.
19 | Subsequently, there is a conf for each time in t_all.
20 | """
21 | if t_all.shape[0] != confs.shape[0]:
22 | raise ValueError("t_all and confs must be the same length.")
23 | elif not np.allclose(np.sort(t_all), t_all):
24 | raise ValueError("t_all must be sorted")
25 |
26 | self.t_all = t_all
27 | self.confs = confs
28 |
29 | @abstractmethod
30 | def value(self, t: float) -> np.ndarray:
31 | """
32 | Get the interpolated joint values for the given time t.
33 | Returns a 1-d array shape (q, ) where q is the number of joints.
34 | """
35 | raise NotImplementedError
36 |
37 |
38 | class NearestTimeInterpolator(PathInterpolator):
39 | def value(self, t: float) -> np.ndarray:
40 | # Find the index of the value with closest time to t.
41 | # Could improve to O(log n) but less readable.
42 | shifted_t_all = np.abs(self.t_all - t)
43 | closest_t_idx = np.argmin(shifted_t_all)
44 | return self.confs[closest_t_idx]
45 |
46 |
47 | class LinearInterpolator(PathInterpolator):
48 | """
49 | Simple implementation of linear interpolation.
50 | https://en.wikipedia.org/wiki/Linear_interpolation
51 |
52 | If the time provided is:
53 | - before the first time, we return the first configuration.
54 | - after last time, we return the last configuration.
55 | - otherwise, we use linear interpolation
56 | """
57 |
58 | def __init__(self, t_all: np.ndarray, confs: np.ndarray):
59 | super().__init__(t_all, confs)
60 | intervals = zip(t_all, t_all[1:], confs, confs[1:])
61 | self.slopes = [(y2 - y1) / (x2 - x1) for x1, x2, y1, y2 in intervals]
62 | assert len(self.slopes) == len(t_all) - 1
63 |
64 | def value(self, t: float) -> np.ndarray:
65 | if t < self.t_all[0]:
66 | # Return first configuration if time earlier
67 | return self.confs[0]
68 | elif t >= self.t_all[-1]:
69 | # Return last configuration if time later
70 | return self.confs[-1]
71 |
72 | # Find index of t_x, where t_x <= t < t_y
73 | # i.e. the configuration and slope we should use for interpolating
74 | # https://numpy.org/doc/stable/reference/generated/numpy.searchsorted.html
75 | idx = np.searchsorted(self.t_all, t, side="right") - 1
76 |
77 | # Use the standard linear interpolation formula
78 | conf = self.confs[idx] + self.slopes[idx] * (t - self.t_all[idx])
79 | return conf
80 |
--------------------------------------------------------------------------------
/lisdf/plan_executor/joint_space_path_executor.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from typing import ClassVar, Type
3 |
4 | import numpy as np
5 |
6 | from lisdf.plan_executor.executor import CommandExecutor
7 | from lisdf.plan_executor.interpolator import PathInterpolator
8 | from lisdf.plan_executor.robots.common import Robot
9 | from lisdf.planner_output.command import JointSpacePath
10 |
11 |
12 | class JointSpacePathExecutor(CommandExecutor):
13 | _DEFAULT_JSP_DURATION: ClassVar[float] = 5.0
14 |
15 | def __init__(
16 | self,
17 | robot: Robot,
18 | path: JointSpacePath,
19 | start_time: float,
20 | interpolator_cls: Type[PathInterpolator],
21 | ):
22 | super().__init__(robot, path, start_time)
23 | # Determine duration if specified
24 | self._duration = (
25 | path.duration
26 | if path.duration
27 | else JointSpacePathExecutor._DEFAULT_JSP_DURATION
28 | )
29 |
30 | # Create interpolator
31 | confs = path.waypoints_as_np_array(joint_name_ordering=robot.joint_ordering)
32 | t_all = np.linspace(
33 | start_time, start_time + self.duration, num=path.num_waypoints
34 | )
35 | self._interpolator = interpolator_cls(t_all, confs)
36 |
37 | # For warning when provided time is outside duration
38 | self._warned_outside_time_limits = False
39 |
40 | @property
41 | def duration(self) -> float:
42 | return self._duration
43 |
44 | def execute(self, current_time: float) -> None:
45 | # Check if current time is within the bounds for duration. If it is outside,
46 | # we rely on interpolator to provide the best value.
47 | if (
48 | not (self.start_time <= current_time < self.end_time)
49 | and not self._warned_outside_time_limits
50 | ):
51 | warnings.warn(
52 | f"Current time {current_time} is not within the "
53 | "JointSpacePath time range"
54 | )
55 | self._warned_outside_time_limits = True
56 |
57 | # Use interpolator to get joint values
58 | q_joint = self._interpolator.value(current_time)
59 |
60 | # Update the joint configuration of the robot (not the gripper)
61 | self.robot.set_joint_configuration(q_joint)
62 |
--------------------------------------------------------------------------------
/lisdf/plan_executor/lisdf_executor.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from typing import Dict, Type, cast
3 |
4 | from lisdf.plan_executor.executor import CommandExecutor
5 | from lisdf.plan_executor.gripper_executor import ActuateGripperExecutor
6 | from lisdf.plan_executor.interpolator import PathInterpolator
7 | from lisdf.plan_executor.joint_space_path_executor import JointSpacePathExecutor
8 | from lisdf.plan_executor.robots.common import Robot
9 | from lisdf.planner_output.command import ActuateGripper, Command, JointSpacePath
10 | from lisdf.planner_output.plan import LISDFPlan
11 |
12 |
13 | class NoExecutorFoundError(ValueError):
14 | pass
15 |
16 |
17 | class _EmptyCommand(Command, type="_EmptyCommand"):
18 | """To keep the typing happy."""
19 |
20 | @classmethod
21 | def _from_json_dict(cls, json_dict: Dict) -> "Command":
22 | raise NotImplementedError
23 |
24 | def validate(self):
25 | raise NotImplementedError
26 |
27 |
28 | class LISDFPlanExecutor(CommandExecutor, ABC):
29 | """
30 | This class provides the functionality to execute a LISDF Plan by creating and
31 | getting the executor for given time steps and executing them.
32 |
33 | Consumers of this class need to connect it up to a simulator such as PyBullet
34 | or Drake.
35 | """
36 |
37 | def __init__(
38 | self,
39 | robot: Robot,
40 | plan: LISDFPlan,
41 | path_interpolator_cls: Type[PathInterpolator],
42 | start_time: float = 0.0,
43 | ):
44 | """
45 | Parameters
46 | ----------
47 | robot: the Robot to execute the plan on
48 | plan: the LISDF plan to execute
49 | path_interpolator_cls: the class to use for interpolating a joint space path
50 | start_time: the time to start executing the plan
51 | """
52 | super().__init__(robot=robot, command=_EmptyCommand(), start_time=start_time)
53 | self.plan = plan
54 | self._path_interpolator_cls = path_interpolator_cls
55 |
56 | # Note: we assume that the execute method is called with increasing time
57 | self._last_execute_time = self.start_time
58 |
59 | # Create all the executors with their respective start times based on
60 | # a command's duration.
61 | current_time = start_time
62 | self._executors = []
63 | for command in plan.commands:
64 | self._executors.append(self._create_executor(command, current_time))
65 | # Increase start time by duration of the executor
66 | current_time += self._executors[-1].duration
67 |
68 | self._current_executor_idx = 0
69 | self._run_sanity_checks(start_time)
70 |
71 | def _run_sanity_checks(self, start_time: float) -> None:
72 | # Sanity checks that none of the executor start and end times overlap
73 | prev_end_time = start_time
74 | for executor in self._executors:
75 | assert executor.end_time > executor.start_time
76 | assert executor.end_time == executor.start_time + executor.duration
77 | assert executor.start_time >= prev_end_time
78 | prev_end_time = executor.end_time
79 | assert prev_end_time == self.end_time == start_time + self.duration
80 |
81 | @property
82 | def duration(self) -> float:
83 | return sum(executor.duration for executor in self._executors)
84 |
85 | def _create_executor(self, command: Command, start_time: float) -> CommandExecutor:
86 | """Create command executor for the given command"""
87 | if command.type == JointSpacePath.type:
88 | return JointSpacePathExecutor(
89 | self.robot,
90 | cast(JointSpacePath, command), # cast the type as mypy can't infer it
91 | start_time,
92 | interpolator_cls=self._path_interpolator_cls,
93 | )
94 | elif command.type == ActuateGripper.type:
95 | return ActuateGripperExecutor(self.robot, command, start_time)
96 | else:
97 | # You should add support for new command types here
98 | raise ValueError(f"Unsupported command type: {command.type}")
99 |
100 | def _get_executor_at_time(self, time: float) -> CommandExecutor:
101 | """
102 | Get the executor that should be executed at the given time.
103 | We assume this method is called with time increasing.
104 | """
105 | if self._current_executor_idx >= len(self._executors):
106 | raise NoExecutorFoundError(f"Time {time} is after the end of the plan")
107 | elif time == self.end_time:
108 | # Edge case where we are at the end of the plan exactly (but not beyond it)
109 | return self._executors[self._current_executor_idx]
110 |
111 | current_executor = self._executors[self._current_executor_idx]
112 | if time >= current_executor.end_time:
113 | self._current_executor_idx += 1
114 | # This won't cause infinite recursion because `time` stays fixed
115 | return self._get_executor_at_time(time)
116 |
117 | return current_executor
118 |
119 | def execute(self, current_time: float) -> None:
120 | """
121 | Execute the plan at the given time. This will grab the relevant executor and
122 | execute it to update the robot state.
123 | """
124 | if current_time < self._last_execute_time:
125 | raise RuntimeError(
126 | "Execute time must be increasing - i.e., time only progresses"
127 | )
128 |
129 | executor = self._get_executor_at_time(current_time)
130 | executor.execute(current_time)
131 | self._last_execute_time = current_time
132 |
--------------------------------------------------------------------------------
/lisdf/plan_executor/robots/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/lisdf/plan_executor/robots/__init__.py
--------------------------------------------------------------------------------
/lisdf/plan_executor/robots/common.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import List
3 |
4 | import numpy as np
5 |
6 | from lisdf.planner_output.command import GripperPosition
7 |
8 |
9 | class Robot(ABC):
10 | """
11 | A Robot model for the plan execution.
12 | """
13 |
14 | def __init__(self, configuration: np.ndarray):
15 | self.configuration = configuration
16 |
17 | @property
18 | @abstractmethod
19 | def joint_configuration(self) -> np.ndarray:
20 | """
21 | Joint configuration, which may be a subset of the full robot configuration.
22 | """
23 | raise NotImplementedError
24 |
25 | @abstractmethod
26 | def set_joint_configuration(self, joint_configuration: np.ndarray) -> None:
27 | """
28 | Set the joint configuration, which may be a subset of the full robot
29 | configuration.
30 | """
31 | raise NotImplementedError
32 |
33 | @property
34 | @abstractmethod
35 | def dimensionality(self) -> int:
36 | raise NotImplementedError
37 |
38 | @property
39 | @abstractmethod
40 | def joint_ordering(self) -> List[str]:
41 | raise NotImplementedError
42 |
43 | @property
44 | def joint_names(self) -> List[str]:
45 | return self.joint_ordering
46 |
47 |
48 | class RobotWithGripper(Robot, ABC):
49 | @abstractmethod
50 | def gripper_configuration_for_position(
51 | self, position: GripperPosition
52 | ) -> np.ndarray:
53 | raise NotImplementedError
54 |
55 | @property
56 | @abstractmethod
57 | def gripper_configuration(self) -> np.ndarray:
58 | """
59 | Gripper configuration, which may be a subset of the full robot configuration.
60 | """
61 | raise NotImplementedError
62 |
63 | @abstractmethod
64 | def set_gripper_configuration(self, gripper_configuration: np.ndarray) -> None:
65 | """
66 | Set the Gripper configuration, which may be a subset of the full robot
67 | configuration.
68 | """
69 | raise NotImplementedError
70 |
--------------------------------------------------------------------------------
/lisdf/plan_executor/robots/panda.py:
--------------------------------------------------------------------------------
1 | from typing import ClassVar, List
2 |
3 | import numpy as np
4 |
5 | from lisdf.plan_executor.robots.common import RobotWithGripper
6 | from lisdf.planner_output.command import GripperPosition
7 |
8 |
9 | class Panda(RobotWithGripper):
10 | """Franka Emika Panda robot."""
11 |
12 | INITIAL_CONFIGURATION: ClassVar[np.ndarray] = np.array(
13 | [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04, 0.04]
14 | )
15 |
16 | @property
17 | def joint_configuration(self) -> np.ndarray:
18 | return self.configuration[:7]
19 |
20 | def set_joint_configuration(self, joint_configuration: np.ndarray) -> None:
21 | self.configuration = np.concatenate(
22 | [joint_configuration, self.gripper_configuration]
23 | )
24 |
25 | @property
26 | def gripper_configuration(self) -> np.ndarray:
27 | return self.configuration[7:]
28 |
29 | def set_gripper_configuration(self, gripper_configuration: np.ndarray) -> None:
30 | self.configuration = np.concatenate(
31 | [self.joint_configuration, gripper_configuration]
32 | )
33 |
34 | def gripper_configuration_for_position(
35 | self, position: GripperPosition
36 | ) -> np.ndarray:
37 | if position == GripperPosition.open:
38 | return np.array([0.04, 0.04])
39 | elif position == GripperPosition.close:
40 | return np.array([0.0, 0.0])
41 | else:
42 | raise ValueError(f"Unknown gripper position: {position}")
43 |
44 | @property
45 | def dimensionality(self) -> int:
46 | # 7 arm joints + 2 gripper joints
47 | return 9
48 |
49 | @property
50 | def joint_ordering(self) -> List[str]:
51 | return [
52 | "panda_joint1",
53 | "panda_joint2",
54 | "panda_joint3",
55 | "panda_joint4",
56 | "panda_joint5",
57 | "panda_joint6",
58 | "panda_joint7",
59 | ]
60 |
--------------------------------------------------------------------------------
/lisdf/planner_output/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/lisdf/planner_output/__init__.py
--------------------------------------------------------------------------------
/lisdf/planner_output/command.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from abc import ABC, abstractmethod
4 | from dataclasses import dataclass
5 | from enum import Enum
6 | from typing import ClassVar, Dict, List, Optional, Type
7 |
8 | import numpy as np
9 |
10 | from lisdf.planner_output.common import OutputElement
11 |
12 |
13 | class Command(OutputElement, ABC):
14 | """
15 | Hack based on Jiayuan's code for enforcing subclasses to define a type which is
16 | then used for serialization.
17 |
18 | When inherit from this class, child classes should pass type="XXX"
19 | as a keyword argument. This will register a new Command type
20 | in this class which we use when deserializing from a JSON dict.
21 | """
22 |
23 | # TODO: figure out a clean way to move the 'label' field here
24 |
25 | # The type of the command
26 | type: ClassVar[str]
27 |
28 | type_mapping: ClassVar[Dict[str, Type["Command"]]] = dict()
29 |
30 | def __init_subclass__(cls, type: str, **kwargs):
31 | super().__init_subclass__(**kwargs)
32 | setattr(cls, "type", type)
33 | Command.type_mapping[type] = cls
34 |
35 | @classmethod
36 | @abstractmethod
37 | def _from_json_dict(cls, json_dict: Dict) -> "Command":
38 | # Children of this class should implement this method
39 | raise NotImplementedError
40 |
41 | @classmethod
42 | def from_json_dict(cls, json_dict: Dict) -> "Command":
43 | """
44 | Children of this class SHOULD NOT override this method and should
45 | implement _from_json_dict instead. This is so we can use the right
46 | implementation when loading in the main LISDFPlan class.
47 | """
48 | # Check type is in the mapping
49 | if json_dict["type"] not in Command.type_mapping:
50 | raise ValueError(f"Command type {json_dict['type']} not supported")
51 |
52 | # Delete type as that is a classvar
53 | type_cls = Command.type_mapping[json_dict["type"]]
54 | del json_dict["type"]
55 |
56 | return type_cls._from_json_dict(json_dict)
57 |
58 | def to_dict(self) -> Dict:
59 | # Add the type to the output dict
60 | output = {"type": self.type}
61 | output = {**output, **super().to_dict()}
62 | return output
63 |
64 |
65 | JointName = str
66 |
67 |
68 | @dataclass(frozen=True)
69 | class JointSpacePath(Command, type="JointSpacePath"):
70 | # Mapping of joint name to a list of joint positions.
71 | # The length of the list of joint positions must be the same for each joint,
72 | # as each element indicates a single waypoint.
73 | waypoints: Dict[JointName, List[float]]
74 |
75 | # Duration of the path in seconds, if specified must be > 0
76 | duration: Optional[float] = None
77 |
78 | # Label to denote this path by
79 | label: Optional[str] = None
80 |
81 | @property
82 | def joint_names(self) -> List[JointName]:
83 | return list(self.waypoints.keys())
84 |
85 | @property
86 | def dimensionality(self) -> int:
87 | """Dimensionality is number of joints"""
88 | return len(self.waypoints)
89 |
90 | @property
91 | def num_waypoints(self) -> int:
92 | """
93 | Number of waypoints - i.e., how long is the list of joint positions
94 | """
95 | return len(next(iter(self.waypoints.values())))
96 |
97 | def waypoints_for_joint(self, joint_name: str) -> List[float]:
98 | """
99 | Get all the waypoints for a given joint
100 | """
101 | if joint_name not in self.waypoints:
102 | raise ValueError(
103 | f"Joint {joint_name} not found in waypoints for {self.type}"
104 | )
105 | return self.waypoints[joint_name]
106 |
107 | def waypoint(self, waypoint_index: int) -> Dict[str, float]:
108 | """
109 | Get the joint positions at a given waypoint index
110 | """
111 | if not -self.num_waypoints <= waypoint_index < self.num_waypoints:
112 | raise ValueError(
113 | f"Waypoint index {waypoint_index} out of range in {self.type}"
114 | )
115 |
116 | return {
117 | joint_name: joint_positions[waypoint_index]
118 | for joint_name, joint_positions in self.waypoints.items()
119 | }
120 |
121 | def _check_joint_name_ordering(self, joint_name_ordering: List[str]) -> None:
122 | """
123 | Check that the joint name ordering is valid
124 | """
125 | if set(joint_name_ordering) != set(self.waypoints.keys()):
126 | raise ValueError(
127 | f"Joint names for ordering {joint_name_ordering} does not match "
128 | f"waypoint joint names {list(self.waypoints.keys())}"
129 | )
130 |
131 | def waypoint_as_np_array(
132 | self, waypoint_index: int, joint_name_ordering: List[str]
133 | ) -> np.ndarray:
134 | self._check_joint_name_ordering(joint_name_ordering)
135 |
136 | # Get the joint positions at a given waypoint
137 | joint_positions_at_waypoint = self.waypoint(waypoint_index)
138 | joint_positions_array = np.array(
139 | [
140 | joint_positions_at_waypoint[joint_name]
141 | for joint_name in joint_name_ordering
142 | ]
143 | )
144 | assert joint_positions_array.shape == (len(joint_name_ordering),)
145 | return joint_positions_array
146 |
147 | def waypoints_as_np_array(self, joint_name_ordering: List[str]) -> np.ndarray:
148 | """
149 | Return the joint positions as a numpy array. The shape of this array is
150 | (num_waypoints, num_joints).
151 | """
152 | self._check_joint_name_ordering(joint_name_ordering)
153 |
154 | joint_name_to_waypoints = {
155 | joint_name: np.array(positions)
156 | for joint_name, positions in self.waypoints.items()
157 | }
158 |
159 | # Get a list of np.arrays in the order specified by the joint name ordering
160 | joint_positions = [
161 | joint_name_to_waypoints[joint_name] for joint_name in joint_name_ordering
162 | ]
163 | joint_positions_array = np.array(joint_positions)
164 |
165 | # Take transpose so we get array with shape (num_waypoints, num_joints)
166 | joint_positions_array = joint_positions_array.T
167 | assert joint_positions_array.shape == (
168 | self.num_waypoints,
169 | len(joint_name_ordering),
170 | )
171 |
172 | return joint_positions_array
173 |
174 | @classmethod
175 | def from_waypoints_np_array(
176 | cls,
177 | joint_positions_array: np.ndarray,
178 | joint_names: List[str],
179 | duration: Optional[float] = None,
180 | label: Optional[str] = None,
181 | ) -> JointSpacePath:
182 | """Create a JointSpacePath from an array of shape
183 | (num_waypoints, num_joints) and a list of joint names of length
184 | num_joints whose order corresponds to the array."""
185 | _, num_joints = joint_positions_array.shape
186 | assert len(joint_names) == num_joints
187 | waypoints = {
188 | name: joint_positions_array[:, i].tolist()
189 | for i, name in enumerate(joint_names)
190 | }
191 | return JointSpacePath(waypoints, duration, label)
192 |
193 | def get_reversed_path(self) -> "JointSpacePath":
194 | """Get the reversed joint space path."""
195 | return JointSpacePath(
196 | waypoints={
197 | joint_name: list(reversed(positions))
198 | for joint_name, positions in self.waypoints.items()
199 | },
200 | duration=self.duration,
201 | label=f"{self.label}_reversed",
202 | )
203 |
204 | def validate(self):
205 | # Check waypoints dict is not None
206 | if not self.waypoints:
207 | raise ValueError(f"{self.type} must have at least one joint waypoint")
208 |
209 | # Check waypoints are list of floats
210 | for joint_positions in self.waypoints.values():
211 | if isinstance(joint_positions, list):
212 | # Check all the elements are numbers
213 | if not all(isinstance(pos, (int, float)) for pos in joint_positions):
214 | raise ValueError(
215 | f"{self.type} waypoints must be a list of int and/or floats"
216 | )
217 | else:
218 | raise ValueError(f"{self.type} waypoints must be a list of floats")
219 |
220 | # Check joints have same number of waypoints
221 | num_waypoints = set(len(waypoints) for waypoints in self.waypoints.values())
222 | if len(num_waypoints) != 1:
223 | raise ValueError(
224 | f"{self.type} must have the same number of waypoints for all joints"
225 | )
226 |
227 | # Check there are at least two waypoints
228 | num_waypoints = num_waypoints.pop()
229 | if num_waypoints < 2:
230 | raise ValueError(
231 | f"There must be at least two waypoints in {self.type}. "
232 | "The first waypoint should indicate the initial configuration."
233 | )
234 |
235 | # Check duration is valid
236 | if self.duration is not None and not self.duration > 0:
237 | raise ValueError(f"Duration must be positive in {self.type}")
238 |
239 | @classmethod
240 | def _from_json_dict(cls, json_dict: Dict) -> "JointSpacePath":
241 | return cls(**json_dict)
242 |
243 |
244 | class GripperPosition(Enum):
245 | open = "open"
246 | close = "close"
247 |
248 |
249 | @dataclass(frozen=True)
250 | class ActuateGripper(Command, type="ActuateGripper"):
251 | # Mapping of Gripper Joint Name to GripperPosition (i.e., open or close)
252 | configurations: Dict[JointName, GripperPosition]
253 |
254 | # Label to denote this gripper actuation by
255 | label: Optional[str] = None
256 |
257 | @property
258 | def joint_names(self) -> List[JointName]:
259 | return list(self.configurations.keys())
260 |
261 | def position_for_gripper_joint(self, gripper_joint: str) -> GripperPosition:
262 | if gripper_joint not in self.configurations:
263 | raise ValueError(f"Gripper joint {gripper_joint} not in configuration")
264 |
265 | return self.configurations[gripper_joint]
266 |
267 | def validate(self):
268 | if not self.configurations:
269 | raise ValueError(f"Empty configurations in {self.type}")
270 |
271 | for gripper_joint, gripper_position in self.configurations.items():
272 | if gripper_position not in [GripperPosition.open, GripperPosition.close]:
273 | raise ValueError(
274 | f"Invalid gripper position {gripper_position} for "
275 | f"gripper joint {gripper_joint}"
276 | )
277 |
278 | @classmethod
279 | def _from_json_dict(cls, json_dict: Dict) -> "ActuateGripper":
280 | # Overwrite the GripperPosition to their enum representations
281 | new_configurations = {
282 | joint_name: GripperPosition(gripper_position)
283 | for joint_name, gripper_position in json_dict["configurations"].items()
284 | }
285 | json_dict["configurations"] = new_configurations
286 | return cls(**json_dict)
287 |
--------------------------------------------------------------------------------
/lisdf/planner_output/common.py:
--------------------------------------------------------------------------------
1 | import json
2 | import warnings
3 | from abc import ABC, abstractmethod
4 | from enum import Enum
5 | from typing import Dict
6 |
7 | import yaml
8 |
9 | from lisdf.planner_output.config import DEFAULT_JSON_INDENT
10 |
11 |
12 | class _CustomJSONEncoder(json.JSONEncoder):
13 | """CustomEncoder that will call to_dict for our OutputElements"""
14 |
15 | def default(self, o):
16 | if isinstance(o, OutputElement):
17 | return o.to_dict()
18 |
19 | if isinstance(o, Enum):
20 | return o.value
21 |
22 | return super().default(o)
23 |
24 |
25 | class OutputElement(ABC):
26 | def __post_init__(self):
27 | # IMPORTANT! Validate the object after it has been initialized
28 | self.validate()
29 |
30 | @abstractmethod
31 | def validate(self):
32 | """Validates the objects in this output element"""
33 | raise NotImplementedError
34 |
35 | def to_dict(self) -> Dict:
36 | """
37 | WARNING! DO NOT use dataclasses.asdict, because that will serialize all
38 | native python objects for us, which we may not want to do as subclasses
39 | may override to_dict()
40 |
41 | Our CustomJSONEncoder will handle recursively serializing all the
42 | OutputElements using their respective to_dict() methods.
43 | """
44 | return self.__dict__
45 |
46 | @classmethod
47 | @abstractmethod
48 | def from_json_dict(cls, json_dict: Dict) -> "OutputElement":
49 | """Convert a JSON dict with plain Python objects to an OutputElement"""
50 | raise NotImplementedError
51 |
52 | def to_json(self, indent=DEFAULT_JSON_INDENT, **json_kwargs) -> str:
53 | """Dump the current object as a JSON string"""
54 | return json.dumps(
55 | self.to_dict(), cls=_CustomJSONEncoder, indent=indent, **json_kwargs
56 | )
57 |
58 | def write_json(
59 | self, json_fname: str, indent=DEFAULT_JSON_INDENT, **json_kwargs
60 | ) -> None:
61 | """Write the current object to a file as a JSON"""
62 | if not json_fname.endswith(".json"):
63 | warnings.warn(f"Warning! {json_fname} does not end with .json")
64 |
65 | with open(json_fname, "w") as f:
66 | f.write(self.to_json(indent, **json_kwargs))
67 |
68 | @classmethod
69 | def from_json(cls, json_str: str) -> "OutputElement":
70 | """
71 | Load an OutputElement from a JSON string.
72 | There isn't really a need for subclasses to override this method.
73 | """
74 | return cls.from_json_dict(json.loads(json_str))
75 |
76 | def to_yaml(self, **yaml_kwargs) -> str:
77 | """Dump the current object as a YAML string"""
78 | # Convert JSON to YAML directly so we don't have to write another encoder
79 | json_as_dict = json.loads(self.to_json())
80 | return yaml.safe_dump(json_as_dict, **yaml_kwargs)
81 |
82 | def write_yaml(self, yaml_fname: str, **yaml_kwargs) -> None:
83 | """Write the current object to a file as a YAML"""
84 | if not yaml_fname.endswith(".yaml"):
85 | warnings.warn(f"Warning! {yaml_fname} does not end with .yaml")
86 |
87 | with open(yaml_fname, "w") as f:
88 | f.write(self.to_yaml(**yaml_kwargs))
89 |
90 | @classmethod
91 | def from_yaml(cls, yaml_str: str) -> "OutputElement":
92 | """
93 | Load an OutputElement from a YAML string.
94 | There isn't really a need for subclasses to override this method.
95 | """
96 | # YAML loads a plain old Python object dict, so we can use from_json_dict
97 | return cls.from_json_dict(yaml.safe_load(yaml_str))
98 |
--------------------------------------------------------------------------------
/lisdf/planner_output/config.py:
--------------------------------------------------------------------------------
1 | from typing import Set
2 |
3 | SUPPORTED_PLAN_OUTPUT_VERSIONS: Set[str] = {"0.1"}
4 | CURRENT_LISDF_PLAN_VERSION: str = "0.1"
5 | assert CURRENT_LISDF_PLAN_VERSION in SUPPORTED_PLAN_OUTPUT_VERSIONS
6 |
7 | ENABLE_LISDF_PATH_CHECKING: bool = False
8 |
9 | # Whether the validation checks should enforce dimensionalities
10 | # between waypoint in a JointSpacePath and all the commands in
11 | # an LISDFPlan. Keeping as a flag for now.
12 | ENFORCE_JOINT_DIMENSIONALITIES: bool = True
13 |
14 | DEFAULT_JSON_INDENT: int = 2
15 |
--------------------------------------------------------------------------------
/lisdf/planner_output/plan.py:
--------------------------------------------------------------------------------
1 | import os
2 | from dataclasses import dataclass
3 | from typing import Dict, List, Tuple, Type
4 |
5 | import numpy as np
6 |
7 | from lisdf.planner_output.command import ActuateGripper, Command, JointSpacePath
8 | from lisdf.planner_output.common import OutputElement
9 | from lisdf.planner_output.config import (
10 | CURRENT_LISDF_PLAN_VERSION,
11 | ENABLE_LISDF_PATH_CHECKING,
12 | ENFORCE_JOINT_DIMENSIONALITIES,
13 | SUPPORTED_PLAN_OUTPUT_VERSIONS,
14 | )
15 |
16 | _SUPPORTED_COMMAND_TYPES: Tuple[Type[Command], ...] = (ActuateGripper, JointSpacePath)
17 |
18 |
19 | @dataclass(frozen=True)
20 | class LISDFPlan(OutputElement):
21 | """
22 | Overarching dataclass for a LISDF plan. Check
23 | `tests/test_planner_output/test_plan.py` for examples on usage.
24 |
25 | We automatically validate the parameters passed to __init__ for
26 | a LISDFPlan and all the elements within it. This is to ensure they
27 | are not malformed and match our specification.
28 |
29 | You can also see `scripts/planner_output_demo.py` for a demo of how
30 | to use the entire model structure.
31 | """
32 |
33 | # Path of the LISDF folder where the world and model files are located
34 | lisdf_problem: str
35 |
36 | # List of Commands that need to be executed by the simulator
37 | # We run multiple validation checks on these commands:
38 | # 1. All JointSpacePath commands have the same dimensionality
39 | # 2. All ActuateGripper commands have the same dimensionality
40 | # 3. The last waypoint of a JointSpacePath command is the same as the first
41 | # waypoint of the next JointSpaceCommand
42 | # These checks allow us to ensure we create valid robot commands.
43 | commands: List[Command]
44 |
45 | # Version of the LISDF plan output specification
46 | version: str = CURRENT_LISDF_PLAN_VERSION
47 |
48 | def _commands_for_type(self, command_type: str) -> List[Command]:
49 | return [command for command in self.commands if command.type == command_type]
50 |
51 | def _validate_joint_space_paths(self):
52 | # Checks below enforce paths have the same dims and does error checking
53 | if not ENFORCE_JOINT_DIMENSIONALITIES:
54 | return
55 |
56 | # noinspection PyTypeChecker
57 | joint_space_paths: List[JointSpacePath] = self._commands_for_type(
58 | JointSpacePath.type
59 | )
60 | if not joint_space_paths:
61 | # No need to check if no JointSpacePath commands exists
62 | return
63 |
64 | # Check that the joint names are the same for each path hence enforcing
65 | # dimensionalities
66 | joint_names = set(frozenset(path.joint_names) for path in joint_space_paths)
67 | if len(joint_names) != 1:
68 | raise ValueError("Joint names are different across joint space paths")
69 |
70 | joint_ordering = list(joint_names.pop())
71 |
72 | # Check that the initial joint positions for each JointSpacePath are consistent
73 | # i.e., the first joint position in a JointSpacePath is the same as the last
74 | # joint position in the previous JointSpacePath
75 | # TODO(willshen): load initial joint positions from robot model file
76 | # Use joint position in last waypoint in first path as initial joint position
77 | current_joint_positions = joint_space_paths[0].waypoint_as_np_array(
78 | -1, joint_ordering
79 | )
80 | for path in joint_space_paths[1:]:
81 | # Check that the initial joint position in path is same as current
82 | # Use np.isclose as equality and floats don't work well
83 | if not np.isclose(
84 | current_joint_positions,
85 | path.waypoint_as_np_array(0, joint_ordering),
86 | ).all():
87 | raise ValueError(
88 | "Joint positions between JointSpacePaths are inconsistent"
89 | )
90 | # Set current joint position to last waypoint
91 | current_joint_positions = path.waypoint_as_np_array(-1, joint_ordering)
92 |
93 | def _validate_actuate_grippers(self):
94 | # Checks below enforce gripper commands have the same dims
95 | if not ENFORCE_JOINT_DIMENSIONALITIES:
96 | return
97 |
98 | # noinspection PyTypeChecker
99 | actuate_grippers: List[ActuateGripper] = self._commands_for_type(
100 | ActuateGripper.type
101 | )
102 | if not actuate_grippers:
103 | # No need to check if no ActuateGripper commands exists
104 | return
105 |
106 | # Check that the joint names are the same for each ActuateGripper command
107 | joint_names = set(
108 | frozenset(command.joint_names) for command in actuate_grippers
109 | )
110 | if len(joint_names) != 1:
111 | raise ValueError(
112 | "Joint names are different across actuate gripper commands"
113 | )
114 |
115 | def _validate_commands(self):
116 | if not self.commands:
117 | raise ValueError("Commands cannot be empty in LISDF plan")
118 |
119 | # Check that commands confirm to Command type
120 | for command in self.commands:
121 | if not isinstance(command, Command):
122 | raise ValueError(f"Invalid command type: {type(command)}")
123 | elif not isinstance(command, _SUPPORTED_COMMAND_TYPES):
124 | # If a new Command type is added, there should be a new validation
125 | # check for it.
126 | raise ValueError(f"Unsupported command type: {command.type}")
127 |
128 | # Validate the individual commands
129 | self._validate_joint_space_paths()
130 | self._validate_actuate_grippers()
131 |
132 | def validate(self):
133 | # Check path of LISDF exists
134 | if ENABLE_LISDF_PATH_CHECKING and not os.path.exists(self.lisdf_problem):
135 | # TODO(willshen): validate models/lisdf/sdf exists within path?
136 | raise ValueError(f"LISDF path does not exist: {self.lisdf_problem}")
137 |
138 | # Check version number is valid and supported
139 | major, minor = self.version.split(".")
140 | if not major.isdigit() or not minor.isdigit():
141 | raise ValueError(f"Invalid version number: {self.version}")
142 |
143 | if self.version not in SUPPORTED_PLAN_OUTPUT_VERSIONS:
144 | raise ValueError(f"Unsupported version number: {self.version}")
145 |
146 | self._validate_commands()
147 |
148 | @classmethod
149 | def from_json_dict(cls, json_dict: Dict) -> "LISDFPlan":
150 | # Convert the commands individually as they have their own types
151 | commands = [
152 | Command.from_json_dict(command_dict)
153 | for command_dict in json_dict["commands"]
154 | ]
155 | json_dict["commands"] = commands
156 |
157 | return cls(**json_dict)
158 |
--------------------------------------------------------------------------------
/lisdf/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/lisdf/py.typed
--------------------------------------------------------------------------------
/lisdf/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/lisdf/utils/__init__.py
--------------------------------------------------------------------------------
/lisdf/utils/printing.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | DEFAULT_TABSIZE = 2
4 |
5 |
6 | def indent_text(
7 | text: str,
8 | level: int = 1,
9 | indent_format: Optional[str] = None,
10 | tabsize: Optional[int] = None,
11 | strip: bool = True,
12 | ):
13 | if indent_format is not None:
14 | assert tabsize is None, "Cannot provide both indent format and tabsize."
15 | if tabsize is not None:
16 | assert indent_format is None, "Cannot provide both indent format and tabsize."
17 | indent_format = " " * tabsize
18 | if indent_format is None and tabsize is None:
19 | indent_format = " " * DEFAULT_TABSIZE
20 | assert isinstance(indent_format, str)
21 | indent_format = indent_format * level
22 |
23 | rv = indent_format + text.replace("\n", "\n" + indent_format)
24 | return rv if not strip else rv.strip()
25 |
--------------------------------------------------------------------------------
/lisdf/utils/transformations_more.py:
--------------------------------------------------------------------------------
1 | import math
2 |
3 | import numpy as np
4 |
5 | from lisdf.utils.typing import Vector3f
6 |
7 |
8 | def lookat_rpy(camera_pos: Vector3f, target_pos: Vector3f) -> np.ndarray:
9 | """Construct the roll, pitch, yaw angles of a camera looking at a target.
10 | This function assumes that the camera is pointing to the z-axis ([0, 0, 1]),
11 | in the camera frame.
12 |
13 | Args:
14 | camera_pos: the position of the camera.
15 | target_pos: the target position.
16 |
17 | Returns:
18 | a numpy array of the roll, pitch, yaw angles.
19 | """
20 | delta = target_pos - camera_pos
21 | pitch = math.atan2(-np.linalg.norm(delta[:2]), delta[2])
22 | yaw = math.atan2(-delta[1], -delta[0])
23 | return np.array([0, pitch, yaw], dtype="float32")
24 |
--------------------------------------------------------------------------------
/lisdf/utils/typing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | """
4 | TODO: There are a few more options to do fine-grained numpy typing.
5 |
6 | 1. The official numpy.typing package. Cons: does not support shape typing.
7 | 2. https://github.com/ramonhagenaars/nptyping. Cons: needs an additioanl pacakge.
8 | 3. A simple version from https://stackoverflow.com/a/64032593
9 | """
10 |
11 | Vector2f = np.ndarray
12 | Vector3f = np.ndarray
13 | Vector4f = np.ndarray
14 | Vector6f = np.ndarray
15 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | strict_equality = True
3 | disallow_untyped_calls = True
4 | warn_unreachable = True
5 | exclude = (?x)(
6 | .env/
7 | | env/
8 | | .venv/
9 | | venv/
10 | )
11 |
12 | # Allow untyped in the SDF parsing we modified from urdf_parser_py
13 | [mypy-lisdf.parsing.*]
14 | disallow_untyped_defs = False
15 | disallow_untyped_calls = False
16 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["hatchling"]
3 | build-backend = "hatchling.build"
4 |
5 | [project]
6 | name = "lisdf"
7 | description = "LISdf - a universal I/O spec for Task and Motion Planning (TAMP)."
8 | readme = "README.md"
9 | requires-python = ">=3.8"
10 | license = "MIT"
11 | authors = [
12 | { name = "Learning and Intelligent Systems (MIT CSAIL)", email = "willshen@mit.edu" }
13 | ]
14 | classifiers = [
15 | "Programming Language :: Python",
16 | "Programming Language :: Python :: 3.8",
17 | "License :: OSI Approved :: MIT License",
18 | ]
19 | keywords = ["robotics", "task and motion planning", "urdf", "sdf"]
20 | dependencies = [
21 | "pyyaml",
22 | "numpy",
23 | "lark",
24 | ]
25 | dynamic = ["version"]
26 |
27 | [project.urls]
28 | Homepage = "https://github.com/Learning-and-Intelligent-Systems/lisdf/"
29 |
30 | [project.optional-dependencies]
31 | develop = [
32 | # Formatting
33 | "black",
34 | "isort",
35 | # Linting and type checking
36 | "flake8",
37 | "mypy",
38 | # Typing stubs for mypy
39 | "types-PyYAML",
40 | "types-mock",
41 | # Testing
42 | "pytest",
43 | "pytest-cov",
44 | "mock",
45 | # LISdf models - comment out for now as pypi doesn't support git dependencies
46 | # "lisdf_models@git+https://github.com/Learning-and-Intelligent-Systems/lisdf-models.git"
47 | ]
48 |
49 | [tool.hatch.version]
50 | path = "lisdf/__init__.py"
51 |
52 | [tool.hatch.build.targets.sdist]
53 | exclude = [
54 | "/.github",
55 | ]
56 |
57 | [tool.hatch.build.targets.wheel]
58 | packages = ["lisdf"]
59 |
60 | [tool.hatch.metadata]
61 | allow-direct-references = true
62 |
63 | [tool.isort]
64 | profile = "black"
65 | skip = [".env", "env", ".venv", "venv"]
66 |
67 | [tool.black]
68 | target-version = ['py38']
69 |
--------------------------------------------------------------------------------
/scripts/mjcf_coverage.py:
--------------------------------------------------------------------------------
1 | """
2 | This file tests the coverage of the MJCF Parser.
3 |
4 | There are two types of checks for MJCF coverage:
5 |
6 | 1. In the MJCF parser, most visitor method has an assertion
7 | that ensures that all the attributes and children have been
8 | successfully processed.
9 |
10 | 2. At the end of processing, it outputs an XML node that contains
11 | all attributes that haven't been processed. So you can take a look
12 | at that to see if there are any important missing features.
13 | (Most of them should be rendering-related, such as lighting.)
14 |
15 | If an error occurs during parsing, the `visitor.set_verbose()` method
16 | enables detailed logging of the current node being processed.
17 | Thus, you can use this file to test whether all attributes in your SDF
18 | file can be parsed by our parser.
19 | """
20 |
21 | import argparse
22 |
23 | from lisdf.parsing.mjcf import MJCFVisitor
24 |
25 | parser = argparse.ArgumentParser(description=__doc__)
26 | parser.add_argument("file")
27 | args = parser.parse_args()
28 |
29 |
30 | def main() -> None:
31 | visitor = MJCFVisitor()
32 | visitor.set_verbose()
33 | node = visitor.load_file(args.file)
34 | print("-" * 80)
35 | print(node)
36 |
37 |
38 | if __name__ == "__main__":
39 | main()
40 |
--------------------------------------------------------------------------------
/scripts/mjcf_expand.py:
--------------------------------------------------------------------------------
1 | """
2 | This file expands all mujoco include nodes (i.e., it flatten the file).
3 |
4 | This can be a useful utility for debugging.
5 | """
6 |
7 | import argparse
8 |
9 | from lisdf.parsing.mjcf import MJCFVisitorFlatten
10 |
11 | parser = argparse.ArgumentParser(description=__doc__)
12 | parser.add_argument("file")
13 | args = parser.parse_args()
14 |
15 |
16 | def main() -> None:
17 | visitor = MJCFVisitorFlatten()
18 | node = visitor.load_file(args.file)
19 | print(node)
20 |
21 |
22 | if __name__ == "__main__":
23 | main()
24 |
--------------------------------------------------------------------------------
/scripts/planner_output_demo.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from lisdf.planner_output.command import ActuateGripper, JointSpacePath
3 | from lisdf.planner_output.plan import LISDFPlan
4 | from tests.test_planner_output.conftest import generate_complex_commands
5 |
6 |
7 | def main() -> None:
8 | # Check the generate_complex_commands() method for examples on how to
9 | # generate commands
10 | lisdf_plan = LISDFPlan(lisdf_problem=".", commands=generate_complex_commands())
11 | # We can convert the plan into json, the indent just makes it human-readable
12 | print("=== JSON ===")
13 | print(lisdf_plan.to_json())
14 |
15 | # We can also convert the plan into YAML (which is a superset of JSON)
16 | print("\n=== YAML ===")
17 | print(lisdf_plan.to_yaml())
18 |
19 | # We can load a plan from JSON or YAML
20 | print("\n=== Loading plans from JSON and YAML ===")
21 | print(
22 | "Deserialize from JSON is identical?",
23 | LISDFPlan.from_json(lisdf_plan.to_json()) == lisdf_plan,
24 | )
25 | print(
26 | "Deserialize from YAML is identical?",
27 | LISDFPlan.from_yaml(lisdf_plan.to_yaml()) == lisdf_plan,
28 | )
29 |
30 | # We can do cool things with a JointSpacePath
31 | joint_space_path: JointSpacePath = lisdf_plan.commands[0] # type: ignore
32 | print("\n=== JointSpacePath ===")
33 | print("Dict Representation:", joint_space_path.to_dict())
34 | print("Joint Names:", joint_space_path.joint_names)
35 | print("Dimensionality:", joint_space_path.dimensionality)
36 | print("Number of Waypoints:", joint_space_path.num_waypoints)
37 |
38 | print("Waypoints for joint_2:", joint_space_path.waypoints_for_joint("joint_2"))
39 | print("Last waypoint as a Dict[str, float]:", joint_space_path.waypoint(-1))
40 |
41 | # We can choose the order in which the np.array is ordered. Let's just use
42 | # the default expected order for now
43 | joint_name_ordering = ["joint_1", "joint_2", "joint_3"]
44 | print(
45 | "Last waypoint as a np.array:",
46 | joint_space_path.waypoint_as_np_array(-1, joint_name_ordering),
47 | )
48 | print(
49 | "Waypoints as np.array:\n",
50 | joint_space_path.waypoints_as_np_array(joint_name_ordering),
51 | )
52 |
53 | # We can do not less cool things with a ActuateGripper
54 | print("\n=== ActuateGripper ===")
55 | actuate_gripper: ActuateGripper = lisdf_plan.commands[1] # type: ignore
56 | print("Dict Representation:", actuate_gripper.to_dict())
57 | print("Joint Names:", actuate_gripper.joint_names)
58 | print(
59 | "gripper_1 position:", actuate_gripper.position_for_gripper_joint("gripper_1")
60 | )
61 |
62 |
63 | if __name__ == "__main__":
64 | main()
65 |
--------------------------------------------------------------------------------
/scripts/run_checks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Running autoformatting."
4 | isort . && black .
5 | echo "Autoformatting complete."
6 |
7 | echo "Running linting."
8 | flake8
9 | if [ $? -eq 0 ]; then
10 | echo "Linting passed."
11 | else
12 | echo "Linting failed! Terminating check script early."
13 | exit
14 | fi
15 |
16 | echo "Running type checking."
17 | mypy . --config-file mypy.ini
18 | if [ $? -eq 0 ]; then
19 | echo "Type checking passed."
20 | else
21 | echo "Type checking failed! Terminating check script early."
22 | exit
23 | fi
24 |
25 | echo "Running unit tests."
26 | # Note: tests/ directory is not included in coverage
27 | pytest -s tests/ --cov-config=.coveragerc --cov=lisdf/ --cov-fail-under=75 --cov-report=term-missing:skip-covered --durations=10
28 | if [ $? -eq 0 ]; then
29 | echo "Unit tests passed."
30 | else
31 | echo "Unit tests failed!"
32 | exit
33 | fi
34 |
35 | echo "All checks passed!"
36 |
--------------------------------------------------------------------------------
/scripts/sdf_coverage.py:
--------------------------------------------------------------------------------
1 | """
2 | This file tests the coverage of the SDF parser.
3 |
4 | In the SDF parser, each visitor method has an assertion
5 | that ensures that all the attributes and children have been
6 | successfully processed.
7 |
8 | The `visitor.set_verbose()` method enables detailed logging
9 | of the current node being processed. Thus, you can use this file
10 | to test whether all attributes in your SDF file can be parsed by
11 | our parser.
12 | """
13 |
14 | import argparse
15 |
16 | from lisdf.parsing.sdf_j import SDFVisitor
17 | from lisdf.parsing.xml_j.xml import load_file
18 |
19 | parser = argparse.ArgumentParser(description=__doc__)
20 | parser.add_argument("file")
21 | args = parser.parse_args()
22 |
23 |
24 | def main() -> None:
25 | node = load_file(args.file)
26 | print(node)
27 | print("-" * 120)
28 | visitor = SDFVisitor()
29 | visitor.set_verbose()
30 | visitor.load_file(args.file)
31 | print(
32 | "Coverage test passed."
33 | "(If you see this message, it means the parser has covered all the"
34 | "attributes/nodes in your file.)"
35 | )
36 |
37 |
38 | if __name__ == "__main__":
39 | main()
40 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/tests/__init__.py
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import lisdf_models
4 | import pytest
5 |
6 |
7 | @pytest.fixture(scope="session")
8 | def models_dir() -> str:
9 | """Determine models directory for the lisdf-models"""
10 | # e.g./home/user/lisdf/lisdf-models/models
11 | lisdf_models_dir = os.path.dirname(lisdf_models.__file__)
12 |
13 | # Check if lisdf-models actually exists
14 | if not os.path.exists(lisdf_models_dir):
15 | raise FileNotFoundError(
16 | f"Could not find lisdf-models submodule at {lisdf_models_dir}. "
17 | "Check you pulled the submodule!"
18 | )
19 | return lisdf_models_dir
20 |
--------------------------------------------------------------------------------
/tests/test_parsing/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/tests/test_parsing/__init__.py
--------------------------------------------------------------------------------
/tests/test_parsing/test_parse_mjcf.py:
--------------------------------------------------------------------------------
1 | import os.path as osp
2 |
3 | from lisdf.parsing.mjcf import load_mjcf
4 |
5 |
6 | def test_mjcf_parsing(models_dir):
7 | # TODO(Jiayuan Mao @ 03/24): add more assertions.
8 | filename = osp.join(
9 | models_dir,
10 | "mjcf",
11 | "sawyer_assembly_peg.xml",
12 | )
13 | _ = load_mjcf(filename)
14 |
--------------------------------------------------------------------------------
/tests/test_parsing/test_parse_sdf.py:
--------------------------------------------------------------------------------
1 | import os
2 | from collections import Counter
3 | from typing import List
4 |
5 | from lisdf.parsing.parse_sdf import load_sdf
6 | from lisdf.parsing.sdf import Model
7 | from lisdf.parsing.urdf import Robot
8 |
9 |
10 | def _assert_in_any_order(l_1: List, l_2: List):
11 | """Assert two lists contain the same elements in any order"""
12 | # We can't use sort as can't expect elements to be sorted.
13 | assert Counter(l_1) == Counter(l_2)
14 |
15 |
16 | def test_recursive_sdf_parsing(models_dir):
17 | parsed_sdf = load_sdf("mud_test/model.sdf", models_dir)
18 | world = parsed_sdf.aggregate_order[0]
19 | assert len(world.includes) == 5
20 | assert len(world.models) == 7
21 |
22 |
23 | def test_m0m(models_dir):
24 | parsed_sdf = load_sdf("m0m/model_no_pr2.sdf", models_dir)
25 | world = parsed_sdf.aggregate_order[0]
26 | assert world.models[4].links[0].collisions[0].geometry.uri == os.path.join(
27 | models_dir, "m0m/../ycb/011_banana/textured.obj"
28 | )
29 | assert world.models[4].links[0].visuals[0].geometry.uri == os.path.join(
30 | models_dir, "m0m/../ycb/011_banana/textured.obj"
31 | )
32 | assert len(world.includes) == 0
33 | assert len(world.models) == 6
34 | _assert_in_any_order([type(m) for m in world.models], [Model for _ in range(6)])
35 |
36 |
37 | def test_partnet_urdf(models_dir):
38 | parsed_sdf = load_sdf("partnet_mobility_test/model.sdf", models_dir)
39 | world = parsed_sdf.aggregate_order[0]
40 |
41 | assert len(world.includes) == 1
42 | assert len(world.models) == 3
43 | _assert_in_any_order(
44 | [type(m) for m in world.models],
45 | [
46 | Model,
47 | Model,
48 | Robot,
49 | ],
50 | )
51 |
--------------------------------------------------------------------------------
/tests/test_parsing/test_parse_sdf_j.py:
--------------------------------------------------------------------------------
1 | """
2 | The SDF Parser already checks whether all attributes and nodes have been parsed.
3 | So we only need to make sure it runs.
4 | """
5 |
6 | import os.path as osp
7 |
8 | import pytest
9 |
10 | from lisdf.parsing.sdf_j import load_sdf, load_sdf_string
11 | from lisdf.parsing.urdf_j import load_urdf_string
12 |
13 |
14 | @pytest.mark.parametrize(
15 | "test_dir",
16 | [
17 | "basic_test",
18 | "collision_test",
19 | "geometry_test",
20 | "joint_test",
21 | "link_test",
22 | "visual_test",
23 | "m0m_0",
24 | "m0m_0_test",
25 | ],
26 | )
27 | def test_sdf_parsing_j(models_dir, test_dir: str):
28 | # TODO(Jiayuan Mao @ 03/24): add more assertions.
29 | filename = osp.join(models_dir, test_dir, "model.sdf")
30 | node = load_sdf(
31 | filename
32 | ) # the parser already asserts all attributes and nodes are parsed.
33 |
34 | string = node.to_sdf()
35 | _ = load_sdf_string(string)
36 |
37 | if "m0m" not in test_dir: # m0m test contains more than one objects.
38 | string = node.to_urdf()
39 | # print(string)
40 | _ = load_urdf_string(string)
41 |
--------------------------------------------------------------------------------
/tests/test_parsing/test_parse_urdf_j.py:
--------------------------------------------------------------------------------
1 | import os.path as osp
2 |
3 | import pytest
4 |
5 | from lisdf.parsing.sdf_j import load_sdf_string
6 | from lisdf.parsing.urdf_j import load_urdf, load_urdf_string
7 |
8 |
9 | @pytest.mark.parametrize(
10 | "test_dir",
11 | [
12 | "179",
13 | "46236",
14 | "47578",
15 | "48721",
16 | ],
17 | )
18 | def test_sdf_parsing_j(models_dir, test_dir: str):
19 | # TODO(Jiayuan Mao @ 03/31): add more assertions.
20 | filename = osp.join(models_dir, "partnet_mobility", test_dir, "mobility.urdf")
21 | node = load_urdf(
22 | filename
23 | ) # the parser already asserts all attributes and nodes are parsed.
24 |
25 | string = node.to_sdf()
26 | # print(string)
27 | _ = load_sdf_string(string)
28 |
29 | string = node.to_urdf()
30 | # print(string)
31 | _ = load_urdf_string(string)
32 |
--------------------------------------------------------------------------------
/tests/test_parsing/test_urdf_error.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import lisdf.parsing.xml_reflection as xmlr
4 | from lisdf.parsing import urdf
5 |
6 | ParseError = xmlr.core.ParseError
7 |
8 |
9 | class TestURDFParserError(unittest.TestCase):
10 | def setUp(self):
11 | # Manually patch "on_error" to capture errors
12 | self.errors = []
13 |
14 | def add_error(message):
15 | self.errors.append(message)
16 |
17 | xmlr.core.on_error = add_error
18 |
19 | def tearDown(self):
20 | xmlr.core.on_error = xmlr.core.on_error_stderr
21 |
22 | def assertLoggedErrors(self, errors, func, *args, **kwds):
23 | func(*args, **kwds)
24 | self.assertEqual(self.errors, errors)
25 |
26 | def assertParseErrorPath(self, path, func, *args, **kwds):
27 | with self.assertRaises(ParseError) as cm:
28 | func(*args, **kwds)
29 | e = cm.exception
30 | self.assertEqual(str(e.path), str(path))
31 |
32 | def getParseFuncs(self, cls, xml_string):
33 | """
34 | Check XML parsing of a given string, using both "from_xml_string" and "parse"
35 | """
36 |
37 | # Static method for parsing an object
38 | # TODO: Use parameterized tests to decide which method to use
39 | def preclean(func):
40 | def op():
41 | # Dirty hack to clean external state
42 | self.tearDown()
43 | self.setUp()
44 | func()
45 |
46 | return op
47 |
48 | use_static = lambda: cls.from_xml_string(xml_string) # noqa: E731
49 | # Bound method for parsing an object (backwards compatibility)
50 | use_bound = lambda: cls().parse(xml_string) # noqa: E731
51 | return [use_static, preclean(use_bound)]
52 |
53 | def test_unknown_tag(self):
54 | xml_string = """
55 |
56 |
57 | """
58 | funcs = self.getParseFuncs(urdf.Link, xml_string)
59 | errors_expected = ["Unknown tag \"unknown_element\" in /link[@name='b']"]
60 | for func in funcs:
61 | self.assertLoggedErrors(errors_expected, func)
62 |
63 | def test_unknown_attribute(self):
64 | xml_string = """
65 | """
66 | funcs = self.getParseFuncs(urdf.Link, xml_string)
67 | errors_expected = [
68 | "Unknown attribute \"unknown_attribute\" in /link[@name='b']"
69 | ]
70 | for func in funcs:
71 | self.assertLoggedErrors(errors_expected, func)
72 |
73 | def test_unset_required_name_link(self):
74 | xml_string = """
75 | """
76 | funcs = self.getParseFuncs(urdf.Link, xml_string)
77 | for func in funcs:
78 | self.assertParseErrorPath("/link", func)
79 |
80 | def test_invalid_joint_type(self):
81 | xml_string = """
82 |
83 |
84 |
85 | """
86 | funcs = self.getParseFuncs(urdf.Joint, xml_string)
87 | for func in funcs:
88 | self.assertParseErrorPath("/joint[@name='bad_joint']", func)
89 |
90 | def test_invalid_joint_type_in_robot(self):
91 | xml_string = """
92 |
93 |
94 |
95 |
96 |
97 | """
98 | funcs = self.getParseFuncs(urdf.Robot, xml_string)
99 | for func in funcs:
100 | self.assertParseErrorPath(
101 | "/robot[@name='test']/joint[@name='bad_joint']", func
102 | )
103 |
104 | def test_unset_required_name_aggregate_in_robot(self):
105 | """Show that an aggregate with an unset name still has its index specified"""
106 | xml_string = """
107 |
108 |
109 |
110 | """
111 | funcs = self.getParseFuncs(urdf.Robot, xml_string)
112 | for func in funcs:
113 | self.assertParseErrorPath("/robot[@name='test']/link[2]", func)
114 |
115 | def test_unset_required_name_aggregate_ducktype(self):
116 | """
117 | If an aggregate duck-typed element does not have a required attribute,
118 | ensure it is reported with the index
119 | """
120 | xml_string = """
121 |
122 |
123 | """
124 | funcs = self.getParseFuncs(urdf.Robot, xml_string)
125 | for func in funcs:
126 | self.assertParseErrorPath("/robot[@name='test']/transmission[1]", func)
127 |
128 | def test_bad_inertial_origin_xyz(self):
129 | xml_string = """
130 |
131 |
132 |
133 |
134 |
135 |
136 | """
137 | funcs = self.getParseFuncs(urdf.Robot, xml_string)
138 | for func in funcs:
139 | self.assertParseErrorPath(
140 | "/robot[@name='test']/link[@name='b']/inertial/origin[@xyz]", func
141 | )
142 |
143 | def test_bad_ducktype(self):
144 | xml_string = """
145 |
146 |
147 | transmission_interface/SimpleTransmission
148 |
149 | EffortJointInterface
150 |
151 |
152 |
153 |
154 | transmission_interface/SimpleTransmission
155 |
156 |
157 | EffortJointInterface
158 |
159 |
160 | """
161 | funcs = self.getParseFuncs(urdf.Robot, xml_string)
162 | for func in funcs:
163 | self.assertParseErrorPath(
164 | "/robot[@name='test']/transmission[@name='simple_trans_bad']", func
165 | )
166 |
167 |
168 | if __name__ == "__main__":
169 | unittest.main()
170 |
--------------------------------------------------------------------------------
/tests/test_parsing/xml_matching.py:
--------------------------------------------------------------------------------
1 | import re
2 | import sys
3 | import xml.dom
4 |
5 | # regex to match whitespace
6 | whitespace = re.compile(r"\s+")
7 |
8 |
9 | def all_attributes_match(a, b):
10 | if len(a.attributes) != len(b.attributes):
11 | print("Different number of attributes")
12 | return False
13 | a_atts = [
14 | (a.attributes.item(i).name, a.attributes.item(i).value)
15 | for i in range(len(a.attributes))
16 | ] # noqa
17 | b_atts = [
18 | (b.attributes.item(i).name, b.attributes.item(i).value)
19 | for i in range(len(b.attributes))
20 | ] # noqa
21 | a_atts.sort()
22 | b_atts.sort()
23 |
24 | for i in range(len(a_atts)):
25 | if a_atts[i][0] != b_atts[i][0]:
26 | print(
27 | "Different attribute names: %s and %s" % (a_atts[i][0], b_atts[i][0])
28 | ) # noqa
29 | return False
30 | try:
31 | if abs(float(a_atts[i][1]) - float(b_atts[i][1])) > 1.0e-9:
32 | print(
33 | "Different attribute values: %s and %s"
34 | % (a_atts[i][1], b_atts[i][1])
35 | ) # noqa
36 | return False
37 | except ValueError: # Attribute values aren't numeric
38 | if a_atts[i][1] != b_atts[i][1]:
39 | print(
40 | "Different attribute values: %s and %s"
41 | % (a_atts[i][1], b_atts[i][1])
42 | ) # noqa
43 | return False
44 |
45 | return True
46 |
47 |
48 | def text_matches(a, b):
49 | a_norm = whitespace.sub(" ", a)
50 | b_norm = whitespace.sub(" ", b)
51 | if a_norm.strip() == b_norm.strip():
52 | return True
53 | print("Different text values: '%s' and '%s'" % (a, b))
54 | return False
55 |
56 |
57 | def nodes_match(a, b, ignore_nodes):
58 | if not a and not b:
59 | return True
60 | if not a or not b:
61 | return False
62 |
63 | if a.nodeType != b.nodeType:
64 | print("Different node types: %s and %s" % (a, b))
65 | return False
66 |
67 | # compare text-valued nodes
68 | if a.nodeType in [
69 | xml.dom.Node.TEXT_NODE,
70 | xml.dom.Node.CDATA_SECTION_NODE,
71 | xml.dom.Node.COMMENT_NODE,
72 | ]:
73 | return text_matches(a.data, b.data)
74 |
75 | # ignore all other nodes except ELEMENTs
76 | if a.nodeType != xml.dom.Node.ELEMENT_NODE:
77 | return True
78 |
79 | # compare ELEMENT nodes
80 | if a.nodeName != b.nodeName:
81 | print("Different element names: %s and %s" % (a.nodeName, b.nodeName))
82 | return False
83 |
84 | if not all_attributes_match(a, b):
85 | return False
86 |
87 | a = a.firstChild
88 | b = b.firstChild
89 | while a or b:
90 | # ignore whitespace-only text nodes
91 | # we could have several text nodes in a row, due to replacements
92 | while a and (
93 | (a.nodeType in ignore_nodes)
94 | or (
95 | a.nodeType == xml.dom.Node.TEXT_NODE
96 | and whitespace.sub("", a.data) == ""
97 | )
98 | ): # noqa
99 | a = a.nextSibling
100 | while b and (
101 | (b.nodeType in ignore_nodes)
102 | or (
103 | b.nodeType == xml.dom.Node.TEXT_NODE
104 | and whitespace.sub("", b.data) == ""
105 | )
106 | ): # noqa
107 | b = b.nextSibling
108 |
109 | if not nodes_match(a, b, ignore_nodes):
110 | return False
111 |
112 | if a:
113 | a = a.nextSibling
114 | if b:
115 | b = b.nextSibling
116 |
117 | return True
118 |
119 |
120 | def xml_matches(a, b, ignore_nodes=[]):
121 | if isinstance(a, str):
122 | return xml_matches(
123 | xml.dom.minidom.parseString(a).documentElement, b, ignore_nodes
124 | )
125 | if isinstance(b, str):
126 | return xml_matches(
127 | a, xml.dom.minidom.parseString(b).documentElement, ignore_nodes
128 | )
129 | if a.nodeType == xml.dom.Node.DOCUMENT_NODE:
130 | return xml_matches(a.documentElement, b, ignore_nodes)
131 | if b.nodeType == xml.dom.Node.DOCUMENT_NODE:
132 | return xml_matches(a, b.documentElement, ignore_nodes)
133 |
134 | if not nodes_match(a, b, ignore_nodes):
135 | print("Match failed:")
136 | a.writexml(sys.stdout)
137 | print()
138 | print("=" * 78)
139 | b.writexml(sys.stdout)
140 | print()
141 | return False
142 | return True
143 |
--------------------------------------------------------------------------------
/tests/test_plan_executor/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/tests/test_plan_executor/__init__.py
--------------------------------------------------------------------------------
/tests/test_plan_executor/conftest.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List
2 |
3 | import pytest
4 |
5 | from lisdf.plan_executor.robots.panda import Panda
6 |
7 |
8 | @pytest.fixture
9 | def panda() -> Panda:
10 | # Robot at zero with gripper initially open
11 | return Panda(configuration=Panda.INITIAL_CONFIGURATION)
12 |
13 |
14 | @pytest.fixture
15 | def panda_waypoints() -> Dict[str, List[float]]:
16 | return {
17 | "panda_joint1": [0.0, 0.05, 0.10, 0.15, 0.20, 0.25, 0.30],
18 | "panda_joint2": [0.0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06],
19 | "panda_joint3": [0.0, -0.1, -0.2, -0.3, -0.4, -0.5, -0.6],
20 | "panda_joint4": [0.0, 0.15, 0.25, 0.35, 0.45, 0.55, 0.65],
21 | "panda_joint5": [0.0, -0.2, -0.4, -0.6, -0.8, -1.0, -1.2],
22 | "panda_joint6": [0.0, 0.22, 0.44, 0.66, 0.88, 1.10, 1.32],
23 | "panda_joint7": [0.0, 0.11, 0.22, 0.33, 0.44, 0.55, 0.66],
24 | }
25 |
--------------------------------------------------------------------------------
/tests/test_plan_executor/test_executor.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from mock.mock import Mock
3 |
4 | from lisdf.plan_executor.executor import CommandExecutor
5 |
6 |
7 | class _ConcreteExecutor(CommandExecutor):
8 | @property
9 | def duration(self) -> float:
10 | return 1.0
11 |
12 | def execute(self, current_time: float) -> None:
13 | pass
14 |
15 |
16 | @pytest.fixture
17 | def executor() -> _ConcreteExecutor:
18 | return _ConcreteExecutor(Mock(), Mock(), 0.0)
19 |
20 |
21 | def test_command_executor_end_time(executor):
22 | assert executor.end_time == 1.0
23 |
24 |
25 | def test_command_executor_finished(executor):
26 | assert not executor.finished(0.0)
27 | assert not executor.finished(0.5)
28 | assert executor.finished(1.0)
29 |
--------------------------------------------------------------------------------
/tests/test_plan_executor/test_gripper_executor.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pytest
3 |
4 | from lisdf.plan_executor.gripper_executor import ActuateGripperExecutor
5 | from lisdf.planner_output.command import ActuateGripper, GripperPosition
6 |
7 |
8 | @pytest.mark.parametrize(
9 | "command, expected_configuration",
10 | [
11 | (
12 | ActuateGripper(configurations={"gripper_1": GripperPosition.close}),
13 | np.zeros(9),
14 | ),
15 | (
16 | ActuateGripper(configurations={"gripper_1": GripperPosition.open}),
17 | np.array([0, 0, 0, 0, 0, 0, 0, 0.04, 0.04]),
18 | ),
19 | ],
20 | )
21 | def test_actuate_gripper_executor(command, expected_configuration, panda):
22 | executor = ActuateGripperExecutor(panda, command, 2.0)
23 | executor.execute(2.5)
24 | assert np.allclose(panda.configuration, expected_configuration)
25 |
26 |
27 | def test_actuate_gripper_executor_raises_error(panda):
28 | # Two gripper actions which we don't support
29 | command = ActuateGripper(
30 | configurations={
31 | "gripper_1": GripperPosition.open,
32 | "gripper_2": GripperPosition.close,
33 | }
34 | )
35 | executor = ActuateGripperExecutor(panda, command, 1.0)
36 | with pytest.raises(ValueError):
37 | executor.execute(1.1)
38 |
--------------------------------------------------------------------------------
/tests/test_plan_executor/test_interpolator.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pytest
3 |
4 | from lisdf.plan_executor.interpolator import LinearInterpolator, NearestTimeInterpolator
5 | from lisdf.planner_output.command import JointSpacePath
6 |
7 |
8 | @pytest.fixture
9 | def joint_space_path(panda_waypoints) -> JointSpacePath:
10 | return JointSpacePath(
11 | waypoints=panda_waypoints,
12 | duration=6.0,
13 | label="complex_joint_space_path",
14 | )
15 |
16 |
17 | NEAREST_TIME_INTERPOLATOR_TEST_CASES = [
18 | # (time, expected configuration index)
19 | pytest.param(-1.0, 0, id="time_before_start"),
20 | pytest.param(0.4999, 0, id="initial_configuration"),
21 | pytest.param(0.5, 0, id="tie_breaking"),
22 | pytest.param(0.5001, 1, id="edge_case_second_configuration"),
23 | pytest.param(0.999, 1, id="second_configuration"),
24 | pytest.param(5.501, -1, id="edge_case_last_configuration"),
25 | pytest.param(6.0, -1, id="last_configuration"),
26 | pytest.param(999, -1, id="time_beyond_duration"),
27 | ]
28 |
29 |
30 | @pytest.mark.parametrize(
31 | "interpolator_cls", [LinearInterpolator, NearestTimeInterpolator]
32 | )
33 | def test_interpolator_raises_error(interpolator_cls):
34 | # Number of timesteps does not equal number of configurations
35 | with pytest.raises(ValueError):
36 | interpolator_cls(t_all=np.array([0.0, 1.0]), confs=np.array([[0.0, 0.0, 0.1]]))
37 |
38 | # Timesteps are not sorted
39 | with pytest.raises(ValueError):
40 | interpolator_cls(
41 | t_all=np.array([0.2, 0.5, 0.3]), confs=np.array([[0.0], [0.1], [0.2]])
42 | )
43 |
44 |
45 | @pytest.mark.parametrize(
46 | "time, expected_configuration_idx",
47 | NEAREST_TIME_INTERPOLATOR_TEST_CASES,
48 | )
49 | def test_nearest_time_interpolator(
50 | time, expected_configuration_idx, panda, joint_space_path
51 | ):
52 | # NearestTimeInterpolator interpolates to the waypoint with the nearest time.
53 | # Ties (i.e., at 0.5) are broken by choosing the waypoint that came first in time.
54 | interpolator = NearestTimeInterpolator(
55 | t_all=np.linspace(
56 | 0.0, joint_space_path.duration, joint_space_path.num_waypoints
57 | ),
58 | confs=joint_space_path.waypoints_as_np_array(panda.joint_ordering),
59 | )
60 | assert np.allclose(
61 | interpolator.value(time),
62 | joint_space_path.waypoint_as_np_array(
63 | expected_configuration_idx, panda.joint_ordering
64 | ),
65 | )
66 |
67 |
68 | def test_linear_interpolator(joint_space_path, panda):
69 | interpolator = LinearInterpolator(
70 | t_all=np.linspace(
71 | 0.0, joint_space_path.duration, joint_space_path.num_waypoints
72 | ),
73 | confs=joint_space_path.waypoints_as_np_array(panda.joint_ordering),
74 | )
75 |
76 | def waypoint_at(idx_):
77 | return joint_space_path.waypoint_as_np_array(idx_, panda.joint_ordering)
78 |
79 | # At time 0.0 or before, the configuration is the first waypoint.
80 | assert np.allclose(interpolator.value(0.0), waypoint_at(0))
81 | assert np.allclose(interpolator.value(-999), waypoint_at(0))
82 |
83 | # In between first and second waypoint (i.e., idx = 0 and 1),
84 | # check that it linearly interpolates
85 | slope = (waypoint_at(1) - waypoint_at(0)) / (interpolator.t_all[1])
86 | for time in np.linspace(0.0, interpolator.t_all[1], 50):
87 | assert np.allclose(interpolator.value(time), waypoint_at(0) + slope * time)
88 |
89 | # In between second and third waypoint (i.e., idx = 1 and 2),
90 | # check that it linearly interpolates
91 | slope = (waypoint_at(2) - waypoint_at(1)) / (
92 | interpolator.t_all[2] - interpolator.t_all[1]
93 | )
94 | for time in np.linspace(interpolator.t_all[1], interpolator.t_all[2], 50):
95 | assert np.allclose(
96 | interpolator.value(time),
97 | waypoint_at(1) + slope * (time - interpolator.t_all[1]),
98 | )
99 |
100 | # At end of command (and beyond), the configuration is the last waypoint.
101 | assert np.allclose(interpolator.value(joint_space_path.duration), waypoint_at(-1))
102 | assert np.allclose(interpolator.value(999), waypoint_at(-1))
103 |
104 | # Check that interpolator returns us the configuration at each given timestep
105 | # i.e., we want the robot to be at the configuration within the split duration
106 | for idx, time in enumerate(interpolator.t_all):
107 | assert np.allclose(interpolator.value(time), interpolator.confs[idx])
108 |
--------------------------------------------------------------------------------
/tests/test_plan_executor/test_joint_space_path_executor.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pytest
3 |
4 | from lisdf.plan_executor.interpolator import NearestTimeInterpolator
5 | from lisdf.plan_executor.joint_space_path_executor import JointSpacePathExecutor
6 | from lisdf.planner_output.command import JointSpacePath
7 | from tests.test_plan_executor.test_interpolator import (
8 | NEAREST_TIME_INTERPOLATOR_TEST_CASES,
9 | )
10 |
11 |
12 | @pytest.fixture
13 | def joint_space_path(panda_waypoints) -> JointSpacePath:
14 | return JointSpacePath(
15 | waypoints=panda_waypoints,
16 | duration=6.0,
17 | label="complex_joint_space_path",
18 | )
19 |
20 |
21 | @pytest.fixture
22 | def executor(panda, joint_space_path) -> JointSpacePathExecutor:
23 | return JointSpacePathExecutor(
24 | panda, joint_space_path, 0.0, interpolator_cls=NearestTimeInterpolator
25 | )
26 |
27 |
28 | def test_joint_space_path_executor_parses_duration(executor, joint_space_path):
29 | assert executor.duration == joint_space_path.duration
30 |
31 |
32 | def test_joint_space_path_executor_creates_interpolator(
33 | executor, panda, joint_space_path
34 | ):
35 | interpolator = executor._interpolator
36 | assert np.allclose(
37 | interpolator.t_all, np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0])
38 | )
39 | assert np.allclose(
40 | interpolator.confs, joint_space_path.waypoints_as_np_array(panda.joint_ordering)
41 | )
42 |
43 |
44 | @pytest.mark.parametrize(
45 | "time, expected_configuration_idx",
46 | # Since we use the NearestTimeInterpolator, we can reuse test cases here
47 | NEAREST_TIME_INTERPOLATOR_TEST_CASES,
48 | )
49 | def test_joint_space_path_executor_execute(
50 | time, expected_configuration_idx, executor, panda, joint_space_path
51 | ):
52 | executor.execute(time)
53 | assert np.allclose(
54 | panda.joint_configuration,
55 | joint_space_path.waypoint_as_np_array(
56 | expected_configuration_idx, panda.joint_ordering
57 | ),
58 | )
59 |
--------------------------------------------------------------------------------
/tests/test_plan_executor/test_lisdf_executor.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from mock import Mock, call
3 |
4 | from lisdf.plan_executor.gripper_executor import ActuateGripperExecutor
5 | from lisdf.plan_executor.interpolator import NearestTimeInterpolator
6 | from lisdf.plan_executor.joint_space_path_executor import JointSpacePathExecutor
7 | from lisdf.plan_executor.lisdf_executor import LISDFPlanExecutor, NoExecutorFoundError
8 | from lisdf.planner_output.command import ActuateGripper, GripperPosition, JointSpacePath
9 | from lisdf.planner_output.plan import LISDFPlan
10 |
11 |
12 | @pytest.fixture
13 | def plan(panda_waypoints) -> LISDFPlan:
14 | reversed_waypoints = {
15 | joint: list(reversed(confs)) for joint, confs in panda_waypoints.items()
16 | }
17 |
18 | return LISDFPlan(
19 | lisdf_problem="my_cool_problem",
20 | commands=[
21 | JointSpacePath(
22 | waypoints=panda_waypoints,
23 | duration=3.0,
24 | label="move_to_pick",
25 | ),
26 | ActuateGripper(
27 | configurations={"gripper_1": GripperPosition.close}, label="pick"
28 | ),
29 | JointSpacePath(
30 | waypoints=reversed_waypoints,
31 | duration=6.0,
32 | label="move_to_place",
33 | ),
34 | ActuateGripper(
35 | configurations={"gripper_1": GripperPosition.open}, label="place"
36 | ),
37 | ],
38 | )
39 |
40 |
41 | @pytest.fixture
42 | def executor(panda, plan) -> LISDFPlanExecutor:
43 | return LISDFPlanExecutor(
44 | robot=panda,
45 | plan=plan,
46 | path_interpolator_cls=NearestTimeInterpolator,
47 | start_time=0.0,
48 | )
49 |
50 |
51 | def test_lisdf_plan_executor(executor, plan, panda):
52 | # 3.0 + 6.0 + 2 * 1.0 (for gripper)
53 | assert ActuateGripperExecutor.DEFAULT_DURATION == 1.0 # test relies on this for now
54 | assert executor.duration == 3.0 + 6.0 + 2.0
55 |
56 | # Check executors created conform to plan
57 | for idx, ((expected_type, expected_start_time), command_executor) in enumerate(
58 | zip(
59 | [
60 | (JointSpacePathExecutor, 0.0),
61 | (ActuateGripperExecutor, 3.0),
62 | (JointSpacePathExecutor, 4.0),
63 | (ActuateGripperExecutor, 10.0),
64 | ],
65 | executor._executors,
66 | )
67 | ):
68 | assert type(command_executor) == expected_type
69 | assert command_executor.command == plan.commands[idx]
70 | assert command_executor.start_time == expected_start_time
71 | assert (
72 | command_executor.end_time == expected_start_time + command_executor.duration
73 | )
74 |
75 | command_executor.execute = Mock()
76 |
77 | # Execute the LISDFPlanExecutor for given times
78 | expected_times = [
79 | [0.0, 0.2, 1.0, 2.0, 2.99], # move to pick
80 | [3.0, 3.2, 3.99], # pick
81 | [4.0, 5.0, 7.0, 8.5, 9.95], # move to place
82 | [10.0, 11.0], # place
83 | ]
84 | for idx, times in enumerate(expected_times):
85 | for current_time in times:
86 | executor.execute(current_time)
87 |
88 | # Check calls were made to underlying execute method in expected order
89 | command_executor = executor._executors[idx]
90 | command_executor.execute.assert_has_calls([call(t) for t in times])
91 |
92 | # Check no additional calls
93 | for idx, command_executor in enumerate(executor._executors):
94 | assert command_executor.execute.call_count == len(expected_times[idx])
95 |
96 | # Error raised when time goes backwards
97 | with pytest.raises(RuntimeError):
98 | executor.execute(1.0)
99 |
100 | # Raise error when we try execute for time beyond the plan
101 | with pytest.raises(NoExecutorFoundError):
102 | executor.execute(999)
103 |
--------------------------------------------------------------------------------
/tests/test_planner_output/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Learning-and-Intelligent-Systems/lisdf/478191a56b207608b0ea6707368f9a09176b801c/tests/test_planner_output/__init__.py
--------------------------------------------------------------------------------
/tests/test_planner_output/conftest.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List
2 |
3 | import pytest
4 |
5 | from lisdf.planner_output.command import (
6 | ActuateGripper,
7 | Command,
8 | GripperPosition,
9 | JointSpacePath,
10 | )
11 |
12 |
13 | def generate_complex_commands() -> List[Command]:
14 | return [
15 | JointSpacePath(
16 | waypoints={
17 | "joint_1": [0.0, 0.25, 0.5],
18 | "joint_2": [0.0, 0.1, 0.2],
19 | "joint_3": [0.0, 0.15, 0.3],
20 | },
21 | duration=5.0,
22 | label="move_to_pick",
23 | ),
24 | ActuateGripper(
25 | configurations={"gripper_1": GripperPosition.close}, label="pick"
26 | ),
27 | JointSpacePath(
28 | waypoints={
29 | "joint_1": [0.5, 0.2],
30 | "joint_2": [0.2, 0.6],
31 | "joint_3": [0.3, 0.15],
32 | },
33 | duration=3.0,
34 | label="move_to_place",
35 | ),
36 | ActuateGripper(
37 | configurations={"gripper_1": GripperPosition.open}, label="place"
38 | ),
39 | JointSpacePath(
40 | waypoints={
41 | "joint_1": [0.2, 0.0],
42 | "joint_2": [0.6, 0.0],
43 | "joint_3": [0.15, 0.0],
44 | },
45 | duration=2.5,
46 | label="go_to_zero",
47 | ),
48 | ]
49 |
50 |
51 | @pytest.fixture
52 | def complex_commands() -> List[Command]:
53 | return generate_complex_commands()
54 |
55 |
56 | @pytest.fixture
57 | def expected_complex_lisdf_plan_dict(lisdf_problem, version) -> Dict:
58 | """Expected plain python dict representation of a complex LISDF plan."""
59 | return {
60 | "commands": [
61 | {
62 | "duration": 5.0,
63 | "label": "move_to_pick",
64 | "type": "JointSpacePath",
65 | "waypoints": {
66 | "joint_1": [0.0, 0.25, 0.5],
67 | "joint_2": [0.0, 0.1, 0.2],
68 | "joint_3": [0.0, 0.15, 0.3],
69 | },
70 | },
71 | {
72 | "configurations": {"gripper_1": "close"},
73 | "label": "pick",
74 | "type": "ActuateGripper",
75 | },
76 | {
77 | "duration": 3.0,
78 | "label": "move_to_place",
79 | "type": "JointSpacePath",
80 | "waypoints": {
81 | "joint_1": [0.5, 0.2],
82 | "joint_2": [0.2, 0.6],
83 | "joint_3": [0.3, 0.15],
84 | },
85 | },
86 | {
87 | "configurations": {"gripper_1": "open"},
88 | "label": "place",
89 | "type": "ActuateGripper",
90 | },
91 | {
92 | "duration": 2.5,
93 | "label": "go_to_zero",
94 | "type": "JointSpacePath",
95 | "waypoints": {
96 | "joint_1": [0.2, 0.0],
97 | "joint_2": [0.6, 0.0],
98 | "joint_3": [0.15, 0.0],
99 | },
100 | },
101 | ],
102 | "lisdf_problem": lisdf_problem,
103 | "version": version,
104 | }
105 |
--------------------------------------------------------------------------------
/tests/test_planner_output/test_common.py:
--------------------------------------------------------------------------------
1 | import json
2 | from dataclasses import dataclass
3 | from typing import Dict
4 |
5 | import yaml
6 | from mock.mock import mock_open, patch
7 |
8 | from lisdf.planner_output.common import OutputElement
9 | from lisdf.planner_output.config import DEFAULT_JSON_INDENT
10 |
11 |
12 | @dataclass
13 | class _ConcreteOutputElement(OutputElement):
14 | my_dict: Dict
15 | validated: bool = False
16 |
17 | def validate(self):
18 | self.validated = True
19 |
20 | @classmethod
21 | def from_json_dict(cls, json_dict: Dict) -> "_ConcreteOutputElement":
22 | return cls(**json_dict)
23 |
24 |
25 | def test_output_element():
26 | element = _ConcreteOutputElement({"lis": "mit", "counter": 999})
27 |
28 | # Check that the element automatically validates itself after init
29 | assert element.validated
30 |
31 | # Check to_dict, to_json, to_yaml
32 | expected_dict_to_dump = {"my_dict": element.my_dict, "validated": True}
33 | assert element.to_dict() == expected_dict_to_dump
34 | assert element.to_json() == json.dumps(
35 | expected_dict_to_dump, indent=DEFAULT_JSON_INDENT
36 | )
37 | assert element.to_yaml() == yaml.safe_dump(expected_dict_to_dump)
38 |
39 | # Check write_json, write_yaml. Mock open so we don't actually write to disk
40 | with patch("builtins.open", mock_open()) as mock_file:
41 | element.write_json("/tmp/test.json")
42 | mock_file.assert_called_with("/tmp/test.json", "w")
43 | mock_file().write.assert_called_with(
44 | element.to_json(),
45 | )
46 |
47 | element.write_yaml("/tmp/test.yaml")
48 | mock_file.assert_called_with("/tmp/test.yaml", "w")
49 | mock_file().write.assert_called_with(
50 | element.to_yaml(),
51 | )
52 |
53 | assert mock_file.call_count == 4
54 |
55 | # Check from_json_dict, from_json, from_yaml
56 | assert _ConcreteOutputElement.from_json_dict(element.to_dict()) == element
57 | assert _ConcreteOutputElement.from_json(element.to_json()) == element
58 | assert _ConcreteOutputElement.from_yaml(element.to_yaml()) == element
59 |
--------------------------------------------------------------------------------
/tests/test_planner_output/test_plan.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from typing import Dict
4 |
5 | import pytest
6 | import yaml
7 |
8 | from lisdf.planner_output.command import (
9 | ActuateGripper,
10 | Command,
11 | GripperPosition,
12 | JointSpacePath,
13 | )
14 | from lisdf.planner_output.config import CURRENT_LISDF_PLAN_VERSION
15 | from lisdf.planner_output.plan import LISDFPlan
16 |
17 | _CURRENT_DIR = os.path.dirname(__file__)
18 | _VALID_VERSION = CURRENT_LISDF_PLAN_VERSION
19 |
20 | _VALID_JOINT_SPACE_PATH = JointSpacePath(
21 | waypoints={"joint_1": [0.0, 1.0]}, duration=1.0
22 | )
23 | _VALID_COMMANDS = [_VALID_JOINT_SPACE_PATH]
24 |
25 |
26 | class _UnvalidatedCommand(Command, type="unvalidated_command_for_tests"):
27 | @classmethod
28 | def _from_json_dict(cls, json_dict: Dict) -> "Command":
29 | raise NotImplementedError
30 |
31 | def validate(self):
32 | pass
33 |
34 |
35 | @pytest.mark.parametrize(
36 | "lisdf_problem, version, commands",
37 | [
38 | pytest.param(
39 | "lisdf-non-existent-path-i-hope",
40 | _VALID_VERSION,
41 | _VALID_COMMANDS,
42 | id="lisdf_problem does not exist",
43 | # We're not checking paths at the moment, remove this mark once we do
44 | marks=pytest.mark.xfail,
45 | ),
46 | pytest.param(
47 | _CURRENT_DIR,
48 | "1.z",
49 | _VALID_COMMANDS,
50 | id="invalid version number",
51 | ),
52 | pytest.param(
53 | _CURRENT_DIR,
54 | "99.99",
55 | _VALID_COMMANDS,
56 | id="unsupported version",
57 | ),
58 | pytest.param(_CURRENT_DIR, _VALID_VERSION, [], id="empty commands"),
59 | pytest.param(
60 | _CURRENT_DIR,
61 | _VALID_VERSION,
62 | [
63 | _VALID_JOINT_SPACE_PATH,
64 | "abc",
65 | "its",
66 | "easy",
67 | ],
68 | id="invalid commands",
69 | ),
70 | pytest.param(
71 | _CURRENT_DIR,
72 | _VALID_VERSION,
73 | [
74 | JointSpacePath({"joint_1": [0.0, 1.0], "joint_2": [0.0, 2.0]}),
75 | JointSpacePath(
76 | {
77 | "joint_1": [0.0, 1.0],
78 | "joint_2": [0.0, 2.0],
79 | "joint_3": [0.2, 0.3],
80 | }
81 | ),
82 | ],
83 | id="joint space paths different joint dims",
84 | ),
85 | pytest.param(
86 | _CURRENT_DIR,
87 | _VALID_VERSION,
88 | [
89 | JointSpacePath({"joint_1": [0.0, 1.0], "joint_2": [0.0, 2.0]}),
90 | JointSpacePath(
91 | {
92 | "joint_1": [1.0, 0.25],
93 | "joint_2": [
94 | 1.99,
95 | 1.0,
96 | ], # first element should be 2.0 to match previous position
97 | }
98 | ),
99 | ],
100 | id="joint space paths different inconsistent joint states",
101 | ),
102 | pytest.param(
103 | _CURRENT_DIR,
104 | _VALID_VERSION,
105 | [
106 | ActuateGripper({"gripper_1": GripperPosition.open}),
107 | ActuateGripper(
108 | {
109 | "gripper_1": GripperPosition.close,
110 | "gripper_2": GripperPosition.open,
111 | }
112 | ),
113 | ],
114 | id="actuate gripper commands different joint dims",
115 | ),
116 | pytest.param(
117 | _CURRENT_DIR,
118 | _VALID_VERSION,
119 | [
120 | ActuateGripper({"gripper_1": GripperPosition.open}),
121 | _UnvalidatedCommand(),
122 | ],
123 | id="unvalidated command in plan",
124 | ),
125 | ],
126 | )
127 | def test_lisdf_plan_raises_value_error(lisdf_problem, version, commands):
128 | with pytest.raises(ValueError):
129 | LISDFPlan(lisdf_problem=lisdf_problem, commands=commands, version=version)
130 |
131 |
132 | @pytest.mark.parametrize(
133 | "commands",
134 | [
135 | _VALID_COMMANDS,
136 | [
137 | JointSpacePath({"joint_1": [0.0, 1.0], "joint_2": [0.0, 2.0]}),
138 | ActuateGripper({"gripper_1": GripperPosition.open}),
139 | JointSpacePath(
140 | {
141 | "joint_1": [1.0, 0.25],
142 | "joint_2": [
143 | 2.0,
144 | 1.0,
145 | ],
146 | }
147 | ),
148 | ],
149 | [
150 | ActuateGripper(
151 | {
152 | "gripper_1": GripperPosition.open,
153 | "gripper_2": GripperPosition.open,
154 | }
155 | ),
156 | ActuateGripper(
157 | {
158 | "gripper_1": GripperPosition.close,
159 | "gripper_2": GripperPosition.close,
160 | }
161 | ),
162 | ],
163 | ],
164 | )
165 | def test_lisdf_plan(commands):
166 | lisdf_plan = LISDFPlan(
167 | lisdf_problem=_CURRENT_DIR, commands=commands, version=_VALID_VERSION
168 | )
169 | assert lisdf_plan.lisdf_problem == _CURRENT_DIR
170 | assert lisdf_plan.version == _VALID_VERSION
171 | assert lisdf_plan.commands == commands
172 |
173 |
174 | def test_lisdf_plan_with_default_version():
175 | lisdf_plan = LISDFPlan(lisdf_problem=_CURRENT_DIR, commands=_VALID_COMMANDS)
176 | assert lisdf_plan.lisdf_problem == _CURRENT_DIR
177 | assert lisdf_plan.version == CURRENT_LISDF_PLAN_VERSION
178 | assert lisdf_plan.commands == _VALID_COMMANDS
179 |
180 |
181 | @pytest.mark.parametrize("lisdf_problem, version", [(_CURRENT_DIR, _VALID_VERSION)])
182 | def test_lisdf_plan_with_complex_commands(
183 | lisdf_problem, version, complex_commands, expected_complex_lisdf_plan_dict
184 | ):
185 | """Complex test case where we check entire functionality of LISDFPlan"""
186 | lisdf_plan = LISDFPlan(
187 | lisdf_problem=lisdf_problem, version=version, commands=complex_commands
188 | )
189 |
190 | # Check to_json() and from_json() works as expected
191 | json_str = lisdf_plan.to_json()
192 | assert json.loads(json_str) == expected_complex_lisdf_plan_dict
193 | assert LISDFPlan.from_json(json_str) == lisdf_plan
194 |
195 | # Check to_yaml() and from_json() works as expected
196 | yaml_str = lisdf_plan.to_yaml()
197 | assert yaml.safe_load(yaml_str) == expected_complex_lisdf_plan_dict
198 | assert LISDFPlan.from_yaml(yaml_str) == lisdf_plan
199 |
200 | # Check from_json_dict() works as expected
201 | assert LISDFPlan.from_json_dict(expected_complex_lisdf_plan_dict) == lisdf_plan
202 |
--------------------------------------------------------------------------------
/tests/test_utils/test_transformation_more.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pytest
3 |
4 | from lisdf.utils.transformations import euler_matrix
5 | from lisdf.utils.transformations_more import lookat_rpy
6 |
7 |
8 | @pytest.mark.parametrize(
9 | "camera_pos, target_pos",
10 | [
11 | [[3, 8, 3], [0, 8, 1]],
12 | [[3, 8, 3], [0, 9, 1]],
13 | [[3, 8, 3], [0, 7, 1]],
14 | [[0, 8, 1], [3, 8, 3]],
15 | [[0, 9, 1], [3, 8, 3]],
16 | [[0, 7, 1], [3, 8, 3]],
17 | ],
18 | )
19 | def test_lookat_rpy(camera_pos, target_pos):
20 | camera_pos = np.array(camera_pos, dtype="float32")
21 | target_pos = np.array(target_pos, dtype="float32")
22 | delta = target_pos - camera_pos
23 | delta = delta / np.linalg.norm(delta)
24 |
25 | rpy = lookat_rpy(camera_pos, target_pos)
26 | mat = euler_matrix(*rpy)
27 | z = np.array([0, 0, 1, 1], dtype="float32") # the unit-z vector.
28 | # the unit-z vector of the camera frame in the world frame.
29 | z_in_world = (mat @ z)[:-1]
30 |
31 | assert np.allclose(
32 | z_in_world / np.linalg.norm(z_in_world),
33 | delta / np.linalg.norm(delta),
34 | atol=1e-6,
35 | )
36 |
--------------------------------------------------------------------------------