├── src
└── rosdiscover
│ ├── version.py
│ ├── acme
│ ├── __init__.py
│ └── lib
│ │ └── acme.standalone-ros.jar
│ ├── project
│ ├── __init__.py
│ └── models.py
│ ├── core
│ ├── __init__.py
│ ├── action.py
│ ├── service.py
│ └── topic.py
│ ├── recover
│ ├── __init__.py
│ ├── states_analyzer.py
│ ├── model.py
│ ├── analyzer.py
│ ├── database.py
│ └── call.py
│ ├── models
│ ├── plugins
│ │ ├── __init__.py
│ │ ├── controller_manager.py
│ │ └── navigation.py
│ ├── placeholder.py
│ ├── turtlebot3_teleop_key.py
│ ├── pdw_turtlebot3_teleop_key.py
│ ├── rviz.py
│ ├── joy.py
│ ├── twist_mux.py
│ ├── fetch_teleop.py
│ ├── gazebo_gui.py
│ ├── map_server.py
│ ├── kobuki_safety_controller.py
│ ├── robot_state_publisher.py
│ ├── pdw_turtlebot3_fake.py
│ ├── pdw_robot_state_publisher.py
│ ├── stage_ros.py
│ ├── image_transport.py
│ ├── rosdiscover_error_reproducer.py
│ ├── velocity_smoother.py
│ ├── diagnostic_aggregator.py
│ ├── cmd_vel_mux.py
│ ├── joint_state_publisher.py
│ ├── lap_stats.py
│ ├── rostopic.py
│ ├── topic_tools_mux.py
│ ├── ekf_localization.py
│ ├── image_proc_nodelets.py
│ ├── depth_image_proc_nodelets.py
│ ├── __init__.py
│ ├── gzserver.py
│ ├── amcl.py
│ ├── pointgrey_camera_driver.py
│ ├── autorally.py
│ ├── spawner.py
│ ├── spawn_model.py
│ └── move_base.py
│ ├── __init__.py
│ ├── interpreter
│ ├── provenance.py
│ ├── plugin.py
│ ├── __init__.py
│ ├── parameter.py
│ ├── model.py
│ ├── summary.py
│ └── interpreter.py
│ ├── observer
│ ├── __init__.py
│ ├── ros2.py
│ ├── observer.py
│ ├── ros1.py
│ └── nodeinfo.py
│ └── launch.py
├── MANIFEST.in
├── .dockerignore
├── example
├── husky.yml
├── rbcar.yml
├── fetch.yml
├── f1tenth.yml
├── pr2.yml
├── fetch-demo.yml
├── turtlebot3.yml
├── turtlebot3-tweaked.yml
├── chatter.yml
├── autoware.yml
└── autorally.yml
├── tests
├── configs
│ ├── fetch.yml
│ ├── autoware.yml
│ └── turtlebot.yml
├── test_recover.py
├── conftest.py
├── model.py
└── test_behavior_analysis.py
├── setup.py
├── .gitattributes
├── Dockerfile
├── .github
└── workflows
│ └── tox.yml
├── Pipfile
├── Dockerfile-dev
├── tox.ini
├── setup.cfg
├── .gitignore
├── README.rst
└── LICENSE
/src/rosdiscover/version.py:
--------------------------------------------------------------------------------
1 | __version__ = '0.0.1'
2 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | graft src/rosdiscover/acme/lib
2 | add requirements.txt
3 |
--------------------------------------------------------------------------------
/src/rosdiscover/acme/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from .acme import AcmeGenerator
3 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !Pipfile
3 | !Pipfile.lock
4 | !setup.py
5 | !setup.cfg
6 | !tox.ini
7 | !src
8 |
--------------------------------------------------------------------------------
/src/rosdiscover/project/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from .models import ProjectModels
3 |
--------------------------------------------------------------------------------
/src/rosdiscover/core/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from .action import Action
3 | from .service import Service
4 | from .topic import Topic
5 |
--------------------------------------------------------------------------------
/src/rosdiscover/acme/lib/acme.standalone-ros.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-rss-lab/rosdiscover/HEAD/src/rosdiscover/acme/lib/acme.standalone-ros.jar
--------------------------------------------------------------------------------
/src/rosdiscover/recover/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from .database import RecoveredNodeModelDatabase
3 | from .tool import NodeRecoveryTool
4 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/plugins/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from . import controller_manager
3 | from . import gazebo
4 | from . import navigation
5 |
--------------------------------------------------------------------------------
/example/husky.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/husky
2 | sources:
3 | - /opt/ros/melodic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /ros_ws/src/husky/husky_base/launch/base.launch
7 |
--------------------------------------------------------------------------------
/example/rbcar.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/rbcar
2 | sources:
3 | - /opt/ros/kinetic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /ros_ws/src/rbcar_sim/rbcar_sim_bringup/launch/rbcar_complete.launch
7 |
--------------------------------------------------------------------------------
/example/fetch.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/fetch
2 | sources:
3 | - /opt/ros/melodic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /ros_ws/src/fetch_gazebo/fetch_gazebo/launch/pickplace_playground.launch
7 |
--------------------------------------------------------------------------------
/tests/configs/fetch.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/fetch
2 | sources:
3 | - /opt/ros/melodic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /ros_ws/src/fetch_gazebo/fetch_gazebo/launch/pickplace_playground.launch
7 |
--------------------------------------------------------------------------------
/src/rosdiscover/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from . import models
3 | from .config import Config
4 | from .interpreter import Interpreter, NodeModel
5 | from .recover import NodeRecoveryTool
6 | from .version import __version__
7 |
--------------------------------------------------------------------------------
/tests/test_recover.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import pytest
3 |
4 |
5 | @pytest.mark.parametrize('config', ['fetch'], indirect=True)
6 | def test_follow_waypoints(config) -> None:
7 | recover = NodeRecoveryTool.for_config(config)
8 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/placeholder.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('PLACEHOLDER', 'PLACEHOLDER')
5 | def placeholder(c):
6 | # c.pub("/diagnostics", "diagnostic_msgs/DiagnosticArray")
7 | c.mark_placeholder()
8 |
--------------------------------------------------------------------------------
/example/f1tenth.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/f1tenth
2 | sources:
3 | - /opt/ros/melodic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /ros_ws/src/f1tenth_gtc_tutorial/src/hector_slam/hector_imu_attitude_to_tf/launch/example.launch
7 |
--------------------------------------------------------------------------------
/example/pr2.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/pr2
2 | sources:
3 | - /opt/ros/kinetic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /opt/ros/kinetic/share/gazebo_ros/launch/empty_world.launch
7 | - /ros_ws/src/pr2_simulator/pr2_gazebo/launch/pr2.launch
8 |
--------------------------------------------------------------------------------
/example/fetch-demo.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/fetch
2 | sources:
3 | - /opt/ros/melodic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /ros_ws/src/fetch_gazebo/fetch_gazebo/launch/playground.launch
7 | - /ros_ws/src/fetch_gazebo/fetch_gazebo_demo/launch/demo.launch
8 |
--------------------------------------------------------------------------------
/example/turtlebot3.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/turtlebot3
2 | sources:
3 | - /opt/ros/kinetic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | environment:
6 | TURTLEBOT3_MODEL: burger
7 | launches:
8 | - /ros_ws/src/turtlebot3_simulations/turtlebot3_gazebo/launch/turtlebot3_house.launch
9 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/turtlebot3_teleop_key.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 |
5 | @model('turtlebot3_teleop', 'turtlebot3_teleop_key')
6 | def turtlebot3_teleop_key(c):
7 | c.pub('cmd_vel', 'geometry_msgs/Twist')
8 | c.read('model', 'burger')
9 |
--------------------------------------------------------------------------------
/src/rosdiscover/interpreter/provenance.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ("Provenance",)
3 |
4 | import enum
5 |
6 |
7 | class Provenance(enum.Enum):
8 | RECOVERED = "recovered"
9 | PLACEHOLDER = "placeholder"
10 | HANDWRITTEN = "handwritten"
11 | UNKNOWN = "unknown"
12 |
--------------------------------------------------------------------------------
/example/turtlebot3-tweaked.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/turtlebot3
2 | sources:
3 | - /opt/ros/kinetic/setup.bash
4 | - /ros_ws/devel_isolated/setup.bash
5 | environment:
6 | TURTLEBOT3_MODEL: burger
7 | launches:
8 | - /ros_ws/src/turtlebot3_simulations/turtlebot3_gazebo/launch/turtlebot3_house.launch
9 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/pdw_turtlebot3_teleop_key.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 |
5 | @model('pdw_turtlebot3_teleop', 'pdw_turtlebot3_teleop_key')
6 | def pdw_turtlebot3_teleop_key(c):
7 | c.pub('cmd_vel', 'geometry_msgs/Twist')
8 | c.read('model', 'burger')
9 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | from glob import glob
4 | from setuptools import setup, find_packages
5 |
6 | path = os.path.join(os.path.dirname(__file__), 'src/rosdiscover/version.py')
7 | with open(path, 'r') as f:
8 | exec(f.read())
9 |
10 | setup(
11 | version=__version__,
12 | include_package_data=True
13 | )
14 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # We'll let Git's auto-detection algorithm infer if a file is text. If it is,
2 | # enforce LF line endings regardless of OS or git configurations.
3 | * text=auto eol=lf
4 |
5 | # Isolate binary files in case the auto-detection algorithm fails and
6 | # marks them as text files (which could brick them).
7 | *.{png,jpg,jpeg,gif,webp,woff,woff2,zip} binary
8 |
9 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/rviz.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('rviz', 'rviz')
5 | def rviz(c):
6 | c.provide('reload_shaders', 'std_srvs/Empty')
7 |
8 | # https://github.com/ros-visualization/rviz/blob/070835c426b8982e304b38eb4a9c6eb221155d5f/src/rviz/default_plugin/marker_array_display.cpp
9 | # FIXME
10 | c.sub('visualization_marker_array', 'visualization_msgs/MarkerArray')
11 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/joy.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # from ..interpreter import model
3 |
4 | # Removed from handwritten because we can recover this node statically
5 | # @model('joy', 'joy_node')
6 | def joy(c):
7 | c.pub('diagnostics', 'diagnostic_msgs/DiagnosticArray')
8 | c.pub('joy', 'sensor_msgs/Joy')
9 | c.sub('/clock', 'rosgraph_msgs/Clock')
10 | c.sub('set_feedback', 'unknown type') # FIXME
11 |
--------------------------------------------------------------------------------
/src/rosdiscover/observer/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | This module is used to model the architecture of ROS systems running in a container.
4 |
5 | The main class within this module is :class:`Observer`, which acts as a mediator for
6 | executing commands to extract the architecture
7 | """
8 | from .observer import ExecutionError, Observer
9 | from .ros1 import ROS1Observer
10 | from .ros2 import ROS2Observer
11 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/twist_mux.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 |
5 | @model('twist_mux', 'twist_mux')
6 | def twist_mux(c):
7 | topics = c.read("~topics", [])
8 | locks = c.read("~locks", [])
9 | c.pub('~cmd_vel', 'geometry_msgs/Twist')
10 | c.pub('diagnostics', 'diagnostic_msgs/DiagnosticArray')
11 |
12 | for topic in topics + locks:
13 | c.sub(f"/{topic['topic']}", "any")
14 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/fetch_teleop.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('fetch_teleop', 'fetch_telop')
5 | def fetch_teleop(c):
6 | ...
7 | # Parameter usage
8 | c.read("use_max", True)
9 |
10 | # Topic subscriptions
11 | c.sub("/odom", "nav_msgs/Odometry")
12 | c.pub("/teleop/cmd_vel", "geometry_msgs/Twist")
13 |
14 | # Services called/provided
15 | c.call("/cmd_vel_mux", "topic_tools/MuxSelect")
16 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import pytest
3 |
4 | import os
5 |
6 | from loguru import logger
7 | import rosdiscover
8 |
9 | DIR_HERE = os.path.dirname(__file__)
10 |
11 | logger.enable('rosdiscover')
12 |
13 |
14 | @pytest.fixture
15 | def config(request) -> rosdiscover.Config:
16 | name = request.param
17 | filepath = os.path.join(DIR_HERE, 'configs', f'{name}.yml')
18 | return rosdiscover.Config.load(filepath)
19 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/gazebo_gui.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 |
5 | @model('gazebo_ros', 'gzclient')
6 | def gazebo_gui(c):
7 | c.pub('/gazebo_gui/parameter_description', 'dynamic_recongfigure/ConfigDescription')
8 | c.pub('/gazebo_gui/parameter_updates', 'dynamic_reconfigure/Config')
9 | c.provide('/gazebo_gui/set_parameters', 'unknown type') # FIXME
10 | c.sub('/clock', 'rosgraph_msgs/Clock')
11 |
--------------------------------------------------------------------------------
/example/chatter.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/chatter
2 | sources:
3 | - /opt/ros/kinetic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | launches:
6 | - /ros_ws/src/ros_tutorials/roscpp_tutorials/launch/talker_listener.launch
7 | node_sources:
8 | - package: roscpp_tutorials
9 | node: listener
10 | sources:
11 | - listener/listener.cpp
12 | - package: roscpp_tutorials
13 | node: talker
14 | sources:
15 | - talker/talker.cpp
16 |
--------------------------------------------------------------------------------
/src/rosdiscover/observer/ros2.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import typing as t
3 |
4 | from dockerblade.popen import Popen
5 |
6 | from .observer import Observer
7 | from ..interpreter import SystemSummary
8 |
9 |
10 | class ROS2Observer(Observer):
11 |
12 | def observe(self) -> SystemSummary:
13 | raise NotImplementedError
14 |
15 | def launch_from_config(self, sleep_time: float) -> t.Sequence[Popen]:
16 | raise NotImplementedError
17 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/map_server.py:
--------------------------------------------------------------------------------
1 | # from ..interpreter import model
2 |
3 |
4 | # Commented out to make static node recovery be used
5 | # @model('map_server', 'map_server')
6 | def map_server(c):
7 | c.read('~frame_id', 'map')
8 | c.read('~negate', 0)
9 | c.read('~occupied_thresh', 0.65)
10 | c.read('~free_thresh', 0.196)
11 |
12 | c.provide('static_map', 'nav_msgs/GetMap')
13 |
14 | c.pub('map_metadata', 'nav_msgs/MapMetaData')
15 | c.pub('map', 'nav_msgs/OccupancyGrid')
16 |
--------------------------------------------------------------------------------
/src/rosdiscover/interpreter/plugin.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('ModelPlugin',)
3 |
4 | import abc
5 | import typing
6 |
7 | if typing.TYPE_CHECKING:
8 | from .interpreter import Interpreter
9 |
10 |
11 | class ModelPlugin(abc.ABC):
12 | """Models the architectural effects of a dynamically-loaded node plugin
13 | (e.g., a Gazebo plugin)."""
14 | @abc.abstractmethod
15 | def load(self, interpreter: 'Interpreter') -> None:
16 | """Simulates the effects of loading this plugin in a given context."""
17 | ...
18 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/kobuki_safety_controller.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('kobuki_safety_controller', 'SafetyControllerNodelet')
5 | def safety_controller(c):
6 | c.read("~time_to_extend_bump_cliff_events", 0.0)
7 | c.sub("~enable", "std_msgs/Empty")
8 | c.sub("~disable", "std_msgs/Empty")
9 | c.sub("~events/bumper", 'kobuki_msgs/BumperEvent')
10 | c.sub("~events/cliff", 'kobuki_msgs/CliffEvent')
11 | c.sub("~events/wheel_drop", 'kobuki_msgs/WheelDropEvent')
12 | c.sub("~reset", "std_msgs/Empty")
13 | c.pub("~cmd_vel", "geometry_msgs/Twist")
14 |
--------------------------------------------------------------------------------
/src/rosdiscover/core/action.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('Action',)
3 |
4 | from typing import Any, Dict
5 |
6 | import attr
7 |
8 |
9 | @attr.s(frozen=True, slots=True, auto_attribs=True)
10 | class Action:
11 | """Describes an action.
12 |
13 | Attributes
14 | ----------
15 | name: str
16 | The fully-qualified name of the action.
17 | format: str
18 | The name of the format used by the action.
19 | """
20 | name: str
21 | format: str
22 |
23 | def to_dict(self) -> Dict[str, Any]:
24 | return {'name': self.name, 'format': self.format}
25 |
--------------------------------------------------------------------------------
/src/rosdiscover/core/service.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('Service',)
3 |
4 | from typing import Any, Dict
5 |
6 | import attr
7 |
8 |
9 | @attr.s(frozen=True, slots=True, auto_attribs=True)
10 | class Service:
11 | """Describes a service.
12 |
13 | Attributes
14 | ----------
15 | name: str
16 | The fully-qualified name of the service.
17 | format: str
18 | The name of the format used by the service.
19 | """
20 | name: str
21 | format: str
22 |
23 | def to_dict(self) -> Dict[str, Any]:
24 | return {'name': self.name, 'format': self.format}
25 |
--------------------------------------------------------------------------------
/src/rosdiscover/interpreter/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | This module is used to model the architectural consequences of particular
4 | ROS commands (e.g., launching a given :code:`.launch` file via
5 | :code:`roslaunch`).
6 |
7 | The main class within this module is :class:`Interpreter`, which acts as a
8 | model evaluator / virtual machine for a ROS architecture.
9 | """
10 | from .context import NodeContext
11 | from .model import model, NodeModel
12 | from .parameter import ParameterServer
13 | from .plugin import ModelPlugin
14 | from .summary import NodeSummary, SystemSummary
15 | from .interpreter import Interpreter
16 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/robot_state_publisher.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('robot_state_publisher', 'robot_state_publisher')
5 | def robot_state_publisher(c):
6 | # FIXME required!
7 | # c.read('robot_description')
8 |
9 | c.pub('/tf', 'tf2_msgs/TFMessage')
10 | c.pub('/tf_static', 'tf2_msgs/TFMessage')
11 |
12 | c.read('~publish_frequency', 50.0)
13 | c.read('~use_tf_static', True)
14 | c.read('~ignore_timestamp', False)
15 |
16 | # FIXME implement searchParam
17 | # http://wiki.ros.org/roscpp/Overview/Parameter%20Server
18 | # tf_prefix_key = c.search('~tf_prefix')
19 | # c.read(tf_prefix_key, '')
20 |
21 | c.sub('joint_states', 'sensor_msgs/JointState')
22 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/pdw_turtlebot3_fake.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 |
5 | @model('pdw_turtlebot3_fake', 'pdw_turtlebot3_fake_node')
6 | def turtlebot3_fake_node(c):
7 | c.read('tb3_model', '')
8 | c.read('wheel_left_joint_name', 'wheel_left_joint')
9 | c.read('wheel_right_joint_name', 'wheel_right_joint')
10 | c.read('joint_states_frame', 'base_footprint')
11 | c.read('odom_frame', 'odom')
12 | c.read('odom_frame', 'odom')
13 | c.read('base_frame', 'base_footprint')
14 |
15 | c.pub('joint_states', 'sensor_msgs/JointState')
16 | c.pub('odom', 'nav_msgs/Odometry')
17 | c.sub('cmd_vel', 'geometry_msgs/Twist')
18 |
19 | c.pub('/tf', 'tf2_msgs/TFMessage')
20 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/pdw_robot_state_publisher.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('pdw_robot_state_publisher', 'pdw_robot_state_publisher')
5 | def pdw_robot_state_publisher(c):
6 | # FIXME required!
7 | # c.read('robot_description')
8 |
9 | c.pub('/tf', 'tf2_msgs/TFMessage')
10 | c.pub('/tf_static', 'tf2_msgs/TFMessage')
11 |
12 | c.read('~publish_frequency', 50.0)
13 | c.read('~use_tf_static', True)
14 | c.read('~ignore_timestamp', False)
15 |
16 | # FIXME implement searchParam
17 | # http://wiki.ros.org/roscpp/Overview/Parameter%20Server
18 | # tf_prefix_key = c.search('~tf_prefix')
19 | # c.read(tf_prefix_key, '')
20 |
21 | c.sub('joint_states', 'sensor_msgs/JointState')
22 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9-alpine
2 | RUN apk add --no-cache \
3 | git \
4 | python3-dev \
5 | libffi-dev \
6 | musl-dev \
7 | libressl-dev \
8 | openssl-dev \
9 | cargo \
10 | py3-pip \
11 | build-base \
12 | docker \
13 | gcc \
14 | linux-headers \
15 | openjdk8 \
16 | bash \
17 | && rm -rf /tmp/*
18 | ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1
19 | #RUN /bin/sh -c python3 -m ensurepip --upgrade
20 | RUN CRYPTOGRAPHY_DONT_BUILD_RUST=1 pip3 install --no-binary cryptography pipenv cryptography
21 | RUN mkdir -p /opt/rosdiscover/
22 | WORKDIR /opt/rosdiscover/
23 | COPY . /opt/rosdiscover/
24 | RUN pipenv install
25 | #ENTRYPOINT ["/bin/bash"]
26 | ENTRYPOINT ["pipenv", "run", "rosdiscover"]
27 |
--------------------------------------------------------------------------------
/src/rosdiscover/core/topic.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('Topic',)
3 |
4 | from typing import Any, Dict
5 |
6 | import attr
7 |
8 |
9 | @attr.s(frozen=True, slots=True, auto_attribs=True)
10 | class Topic:
11 | """Describes a topic.
12 |
13 | Attributes
14 | ----------
15 | name: str
16 | The fully-qualified name of the topic.
17 | format: str
18 | The name of the format used by messages published to the topic.
19 | implicit: bool
20 | If :code:`True`, the topic is implicitly created by an associated
21 | action server.
22 | """
23 | name: str
24 | format: str
25 | implicit: bool
26 |
27 | def to_dict(self) -> Dict[str, Any]:
28 | return {'name': self.name, 'format': self.format, 'implicit': self.implicit}
29 |
--------------------------------------------------------------------------------
/tests/configs/autoware.yml:
--------------------------------------------------------------------------------
1 | type: recovery
2 | subject: autoware
3 | image: rosdiscover-experiments/autoware:static
4 | docker:
5 | type: custom
6 | image: rosdiscover-experiments/autoware:static
7 | filename: ../../../../docker/Dockerfile.autoware
8 | context: ../../../../docker
9 | distro: kinetic
10 | build_command: ./catkin_make_release -DCMAKE_EXPORT_COMPILE_COMMANDS=1
11 | sources:
12 | - /opt/ros/kinetic/setup.bash
13 | - /home/autoware/Autoware/ros/devel/setup.bash
14 | launches:
15 | - /root/.autoware/my_launch/my_detection.launch
16 | - /root/.autoware/my_launch/my_localization.launch
17 | - /root/.autoware/my_launch/my_map.launch
18 | - /root/.autoware/my_launch/my_mission_planning.launch
19 | - /root/.autoware/my_launch/my_motion_planning.launch
20 | - /root/.autoware/my_launch/my_sensing.launch
21 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/stage_ros.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 |
5 | @model('stage_ros', 'stageros')
6 | def stage_ros(c):
7 | c.write('/use_sim_time', True)
8 |
9 | # FIXME implement mapName logic
10 | # for now, this only supports a single robot and assumes that omitRobotID
11 | # is true
12 |
13 | c.pub('/odom', 'nav_msgs/Odometry')
14 | c.pub('/base_pose_ground_truth', 'nav_msgs/Odometry')
15 | c.pub('/base_scan', 'sensor_msgs/LaserScan')
16 | c.pub('/image', 'sensor_msgs/Image')
17 | c.pub('/depth', 'sensor_msgs/Image')
18 | c.pub('/camera_info', 'sensor_msgs/CameraInfo')
19 | c.pub('/clock', 'rosgraph_msgs/Clock')
20 |
21 | c.sub('/cmd_vel', 'geometry_msgs/Twist')
22 |
23 | c.provide('reset_positions', 'std_srvs/Empty')
24 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/image_transport.py:
--------------------------------------------------------------------------------
1 | from loguru import logger
2 |
3 | from ..interpreter import model
4 |
5 |
6 | @model('image_transport', 'republish')
7 | def republish(c):
8 | args = c.args.split(' ')
9 | intopic: str = 'in'
10 | outopic: str = 'out'
11 | img_format: str = 'raw'
12 |
13 | for a in args:
14 | if a in ("raw", "compressed", "theora"):
15 | img_format = a
16 | elif a.startswith("in:="):
17 | intopic = a[4:]
18 | elif a.startswith("out:="):
19 | outopic = a[5:]
20 | else:
21 | logger.error(f'"{a}" is not a valid argument for image_transport/republish')
22 | logger.info(f'The format for republish is {img_format}')
23 | c.sub(intopic, 'sensor_msgs/Image')
24 | c.pub(outopic, 'sensor_msgs/Image')
25 |
--------------------------------------------------------------------------------
/.github/workflows/tox.yml:
--------------------------------------------------------------------------------
1 | name: Tox lint checking
2 | on: [pull_request]
3 | jobs:
4 | build:
5 | runs-on: ubuntu-20.04
6 | steps:
7 | - uses: actions/checkout@v2
8 | - name: Install Python
9 | uses: actions/setup-python@v2
10 | with:
11 | python-version: 3.9.5
12 | - name: Install pipenv
13 | run: pip install pipenv==2021.5.29
14 | - id: cache-pipenv
15 | uses: actions/cache@v2
16 | with:
17 | path: ~/.local/share/virtualenvs
18 | key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
19 | - name: Install package
20 | if: steps.cache-pipenv.outputs.cache-hit != 'true'
21 | run: |
22 | pipenv install --dev
23 | - name: Flake8
24 | run: pipenv run flake8 src
25 | - name: MyPy
26 | run: pipenv run mypy src
27 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/rosdiscover_error_reproducer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model, NodeContext
3 |
4 |
5 | @model('*', 'error_reproducer')
6 | def error_reproducer(c: NodeContext):
7 | """
8 | This model is intended for evaluation of rosdiscover on
9 | detecting misconfigurations. IT SHOULD NOT BE USED IN GENERAL.
10 |
11 | This model subscribes and publishes to topics that are passed
12 | in as parameters. In this way, we can easily construct a node
13 | that can be used to reproduce a configuration error that exists
14 | in our database.
15 | """
16 | topics = c.read("~topic", [])
17 | for t in topics:
18 | if t["direction"] == 'pub':
19 | c.pub(t["name"], t["type"])
20 | elif t["direction"] == 'sub':
21 | c.sub(t["name"], t["type"])
22 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/velocity_smoother.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('yocs_velocity_smoother', 'VelocitySmootherNodelet')
5 | def velocity_smoother(c):
6 | c.read("~frequency", 20.0)
7 | c.read("~decel_factor", 1.0)
8 | c.read("~robot_feedback", 0)
9 |
10 | # FIXME must be present!
11 | # c.read("~speed_lim_v")
12 | # c.read("~speed_lim_w")
13 | # c.read("~accel_lim_v")
14 | # c.read("~accel_lim_w")
15 |
16 | c.sub("~odometry", 'nav_msgs/Odometry')
17 | c.sub("~robot_cmd_vel", 'geometry_msgs/Twist')
18 | c.sub("~raw_cmd_vel", 'geometry_msgs/Twist')
19 |
20 | c.pub("~smooth_cmd_vel", "geometry_msgs/Twist")
21 |
22 | # dynamic reconfigure
23 | c.pub('~parameter_descriptions', 'dynamic_reconfigure/ConfigDescription')
24 | c.pub('~parameter_updates', 'dynamic_reconfigure/Config')
25 |
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | rosdiscover = {editable = true, path = "."}
8 | flake8 = "==3.8.2"
9 | pep8-naming = "==0.10.0"
10 | mypy = "==0.910"
11 | pytest = "==5.4.2"
12 | ptpython = "==3.0.2"
13 | tox = "==3.15.1"
14 | coveralls = "==2.0.0"
15 | twine = "==3.1.1"
16 | types-setuptools = "==57.0.0"
17 | types-PyYAML = "==5.4.3"
18 | roswire = {editable = true, ref = "613749e510b9d75044963430af9dac11f9ca2914", git = "https://github.com/rosqual/roswire"}
19 |
20 | [dev-packages]
21 | flake8 = "==3.8.2"
22 | pep8-naming = "==0.10.0"
23 | mypy = "==0.910"
24 | pytest = "==5.4.2"
25 | ptpython = "==3.0.2"
26 | tox = "==3.15.1"
27 | coveralls = "==2.0.0"
28 | twine = "==3.1.1"
29 | types-setuptools = "==57.0.0"
30 | types-PyYAML = "==5.4.3"
31 |
32 | [requires]
33 | python_version = "3.9"
34 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/diagnostic_aggregator.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 |
5 | @model('diagnostic_aggregator', 'aggregator_node')
6 | def diagnostic_aggregator(c):
7 | base_path = c.read('~base_path', '')
8 | if '/' not in base_path:
9 | base_path = '/' + base_path
10 |
11 | c.read('~pub_rate', 1.0)
12 |
13 | c.read('~other_as_errors', False)
14 |
15 | # TODO create analyzer group
16 | # https://github.com/ros/diagnostics/blob/indigo-devel/diagnostic_aggregator/src/analyzer_group.cpp
17 |
18 | c.sub('/diagnostics', 'diagnostic_msgs/DiagnosticArray')
19 | c.pub('/diagnostics_agg', 'diagnostic_msgs/DiagnosticArray')
20 | c.pub('/diagnostics_toplevel_state', 'diagnostic_msgs/DiagnosticStatus')
21 |
22 | c.provide('/diagnostics_agg/add_diagnostics',
23 | 'diagnostic_msgs/AddDiagnostics')
24 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/cmd_vel_mux.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | from loguru import logger
3 |
4 | from ..interpreter import model, NodeContext
5 |
6 |
7 | @model('yocs_cmd_vel_mux', 'CmdVelMuxNodelet')
8 | def cmd_vel_mux(c: NodeContext):
9 | # FIXME handle IO
10 | fn = c.read("~yaml_cfg_file", None)
11 | if not fn:
12 | logger.error("Couldn't read ~yaml_cfg_file")
13 | return
14 | logger.debug(f"Reading parameters from '{fn}'")
15 | yml = yaml.load(c.read_file(fn))
16 | c.pub(f"~{yml.get('publisher', 'output')}", 'geometry_msgs/Twist')
17 | for sub_desc in yml['subscribers']:
18 | c.sub(f"~{sub_desc['topic']}", 'geometry_msgs/Twist')
19 |
20 | c.pub("~active", "std_msgs/String")
21 |
22 | # dynamic reconfigure
23 | c.pub('~parameter_descriptions', 'dynamic_reconfigure/ConfigDescription')
24 | c.pub('~parameter_updates', 'dynamic_reconfigure/Config')
25 |
--------------------------------------------------------------------------------
/Dockerfile-dev:
--------------------------------------------------------------------------------
1 | FROM python:3.9-alpine
2 | RUN apk add --no-cache \
3 | git \
4 | python3-dev \
5 | libffi-dev \
6 | musl-dev \
7 | libressl-dev \
8 | openssl-dev \
9 | cargo \
10 | py3-pip \
11 | build-base \
12 | docker \
13 | gcc \
14 | linux-headers \
15 | openjdk8 \
16 | bash \
17 | less \
18 | && rm -rf /tmp/*
19 |
20 | WORKDIR /code
21 | ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1
22 | RUN /bin/sh -c python -m ensurepip --upgrade
23 | RUN CRYPTOGRAPHY_DONT_BUILD_RUST=1 pip3 install --no-binary cryptography pipenv cryptography
24 |
25 | ENTRYPOINT ["/bin/bash"]
26 |
27 | # docker run --rm -v d:/ROSDiscover/rosdiscover:/code -v d:/ROSDiscover/cache:/root/.roswire -v //var/run/docker.sock:/var/run/docker.sock -it build/rosdiscover-dev
28 | # pip3 install -e .
29 | # rosdiscover acme --acme fetch-demo.acme --check example/fetch-demo.yml
30 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | minversion = 3.4.0
3 | envlist = py39
4 |
5 | [pytest]
6 | testpaths = tests
7 | addopts = -rx -v
8 |
9 | [flake8]
10 | ignore = W605
11 | max-line-length = 140
12 | setenv =
13 | CRYPTOGRAPHY_DONT_BUILD_RUST = 1
14 | install_command = python -m pip install --no-binary cryptography {opts} {packages}
15 | per-file-ignores =
16 | src/rosdiscover/__init__.py:F401
17 | src/rosdiscover/acme/__init__.py:F401
18 | src/rosdiscover/core/__init__.py:F401
19 | src/rosdiscover/interpreter/__init__.py:F401
20 | src/rosdiscover/models/__init__.py:F401
21 | src/rosdiscover/models/plugins/__init__.py:F401
22 | src/rosdiscover/observer/__init__.py:F401
23 | src/rosdiscover/project/__init__.py:F401
24 | src/rosdiscover/recover/__init__.py:F401
25 |
26 | [testenv]
27 | install_command = python -m pip install --no-binary cryptography {opts} {packages}
28 | setenv =
29 | CRYPTOGRAPHY_DONT_BUILD_RUST = 1
30 | deps =
31 | -rrequirements.dev.txt
32 | commands =
33 | flake8 src
34 | mypy src
35 |
--------------------------------------------------------------------------------
/src/rosdiscover/interpreter/parameter.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('ParameterServer',)
3 |
4 | from typing import Any, Dict, Iterator, MutableMapping, Optional
5 |
6 |
7 | class ParameterServer(MutableMapping[str, Any]):
8 | """Models the state of the parameter server."""
9 | def __init__(self) -> None:
10 | self.__contents: Dict[str, Any] = {}
11 |
12 | def __len__(self) -> int:
13 | return len(self.__contents)
14 |
15 | def __delitem__(self, key: str) -> None:
16 | del self.__contents[key]
17 |
18 | def __iter__(self) -> Iterator[str]:
19 | return self.__contents.__iter__()
20 |
21 | def __getitem__(self, key: str) -> Any:
22 | return self.__contents[key]
23 |
24 | def __contains__(self, key: Any) -> bool:
25 | return isinstance(key, str) and key in self.__contents
26 |
27 | def __setitem__(self, key: str, val: Any) -> None:
28 | self.__contents[key] = val
29 |
30 | def get(self, key: Any, default: Optional[Any] = None) -> Optional[Any]:
31 | return self.__contents.get(key, default)
32 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/joint_state_publisher.py:
--------------------------------------------------------------------------------
1 | from roswire import ROSDistribution
2 |
3 | from ..interpreter import model, NodeContext
4 |
5 |
6 | @model('joint_state_publisher', 'joint_state_publisher')
7 | def joint_state_publisher(c: NodeContext):
8 | # https://github.com/ros/joint_state_publisher/blob/kinetic-devel/joint_state_publisher/joint_state_publisher/joint_state_publisher
9 |
10 | # joint_state_publisher#L33
11 | def get_param(name, value=None):
12 | private = "~{}".format(name)
13 | return c.read(private, c.read(name, value))
14 |
15 | get_param("robot_description")
16 | get_param("dependent_joints", {})
17 | get_param("use_mimic_tags", True)
18 | get_param("use_smallest_joint_limits", True)
19 | get_param("zeros")
20 |
21 | get_param("publish_default_positions", True)
22 | get_param("publish_default_velocities", False)
23 | get_param("publish_default_efforts", False)
24 |
25 | if c.ros_distro < ROSDistribution.NOETIC:
26 | get_param("use_gui", False)
27 |
28 | for source in get_param("source_list", []):
29 | c.sub(source, 'sensor_msgs/JointState')
30 |
31 | c.pub('joint_states', 'sensor_msgs/JointState')
32 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = rosdiscover
3 | author = Christopher Timperley
4 | author_email = christimperley@googlemail.com
5 | url = https://github.com/ChrisTimperley/rosdiscover
6 | description = Statically analyse and repair run-time architectures for ROS
7 | long_description = file: README.rst
8 | keywords = ros, docker, analysis, repair, architecture
9 | license = Apache License, Version 2.0
10 | include_package_data = True
11 | classifiers =
12 | Natural Language :: English
13 | Intended Audience :: Developers
14 | Programming Language :: Python
15 | Programming Language :: Python :: 3
16 | Programming Language :: Python :: 3.6
17 | Programming Language :: Python :: 3.7
18 |
19 | [options]
20 | python_requires = >= 3.6
21 | install_requires =
22 | attrs ~= 19.3.0
23 | dockerblade ~= 0.5.2
24 | pyyaml ~= 5.3.1
25 | # we should either switch this to a VCS dependency or try to release ROSWire 2.0.0 on PyPI
26 | # roswire ~= 2.0.0
27 | sourcelocation ~= 1.0.2
28 | typing-extensions >= 3.7.2
29 | package_dir =
30 | =src
31 | packages = find:
32 |
33 | [options.entry_points]
34 | console_scripts =
35 | rosdiscover = rosdiscover.cli:main
36 |
37 | [options.packages.find]
38 | where = src
39 |
--------------------------------------------------------------------------------
/example/autoware.yml:
--------------------------------------------------------------------------------
1 | image: autoware-static_2
2 | sources:
3 | - /opt/ros/melodic/setup.bash
4 | - /home/autoware/Autoware/install/setup.bash
5 | launches:
6 | - /home/autoware/Autoware/src/autoware/documentation/autoware_quickstart_examples/launch/rosbag_demo/my_localization.launch
7 | - /home/autoware/Autoware/src/autoware/documentation/autoware_quickstart_examples/launch/rosbag_demo/my_motion_planning.launch
8 | - /home/autoware/Autoware/src/autoware/documentation/autoware_quickstart_examples/launch/rosbag_demo/default.rviz
9 | - /home/autoware/Autoware/src/autoware/documentation/autoware_quickstart_examples/launch/rosbag_demo/my_map.launch
10 | - /home/autoware/Autoware/src/autoware/documentation/autoware_quickstart_examples/launch/rosbag_demo/my_sensing.launch
11 | - /home/autoware/Autoware/src/autoware/documentation/autoware_quickstart_examples/launch/rosbag_demo/my_detection.launch
12 | - /home/autoware/Autoware/src/autoware/documentation/autoware_quickstart_examples/launch/rosbag_demo/my_mission_planning.launch
13 | # TODO we need to fill these in
14 | node_sources:
15 | - package: roscpp_tutorials
16 | node: listener
17 | sources:
18 | - listener/listener.cpp
19 | - package: roscpp_tutorials
20 | node: talker
21 | sources:
22 | - talker/talker.cpp
23 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/lap_stats.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from roswire import ROSDistribution
3 |
4 | from ..interpreter import model, NodeContext
5 |
6 |
7 | @model('autorally_control', 'lap_stats')
8 | def lap_stats(c: NodeContext) -> None:
9 | # For indigo and kinetic
10 | prefix = "/stat_tracker/controller_type"
11 | c.read(f"{prefix}/hz")
12 | c.read(f"{prefix}/num_timesteps")
13 | c.read(f"{prefix}/tag")
14 | c.read(f"{prefix}/gamma")
15 | c.read(f"{prefix}/num_iters")
16 | c.read(f"{prefix}/init_steering")
17 | c.read(f"{prefix}/init_throttle")
18 | c.read(f"{prefix}/steering_var")
19 | c.read(f"{prefix}/throttle_var")
20 | c.read(f"{prefix}/max_throttle")
21 | c.read(f"{prefix}/desired_speed")
22 | c.read(f"{prefix}/speed_coefficient")
23 | c.read(f"{prefix}/track_coefficient")
24 | c.read(f"{prefix}/max_slip_angle")
25 | c.read(f"{prefix}/slip_penalty")
26 | c.read(f"{prefix}/track_slop")
27 | c.read(f"{prefix}/crash_coeff")
28 | c.read(f"{prefix}/map_path")
29 |
30 | c.pub('lap_stats', 'autorally_msgs/pathIntegralStats')
31 | if c.ros_distro < ROSDistribution.MELODIC:
32 | c.sub('/pose_estimate', 'nav_msgs/Odometry')
33 | else:
34 | c.sub('/mppi_controller/subscribedPose', 'nav_msgs/Odometry')
35 |
--------------------------------------------------------------------------------
/tests/configs/turtlebot.yml:
--------------------------------------------------------------------------------
1 | type: recovery
2 | subject: turtlebot
3 | docker:
4 | type: templated
5 | image: rosdiscover-experiments/turtlebot:2.4.2
6 | image: rosdiscover-experiments/turtlebot:2.4.2
7 | distro: kinetic
8 | build_command: catkin_make_isolated -DCMAKE_EXPORT_COMPILE_COMMANDS=1
9 | apt_packages:
10 | - ros-kinetic-bfl
11 | - ros-kinetic-orocos-kdl
12 | exclude_ros_packages:
13 | - bfl
14 | repositories:
15 | - name: turtlebot_simulator
16 | url: https://github.com/turtlebot/turtlebot_simulator.git
17 | version: 2.2.3
18 | - name: turtlebot_apps
19 | url: https://github.com/turtlebot/turtlebot_apps.git
20 | version: 2.3.7
21 | environment:
22 | TURTLEBOT_GAZEBO_WORLD_FILE: /ros_ws/src/turtlebot_simulator/turtlebot_gazebo/worlds/playground.world
23 | TURTLEBOT_3D_SENSOR: kinect
24 | TURTLEBOT_GAZEBO_MAP_FILE: /ros_ws/src/turtlebot_simulator/turtlebot_gazebo/maps/playground.yaml
25 | sources:
26 | - /opt/ros/kinetic/setup.bash
27 | - /ros_ws/devel_isolated/setup.bash
28 | launches:
29 | - /ros_ws/src/turtlebot_simulator/turtlebot_gazebo/launch/turtlebot_world.launch
30 | - /ros_ws/src/turtlebot_simulator/turtlebot_gazebo/launch/amcl_demo.launch
31 | run_script: >
32 | rostopic pub /move_base_simple/goal geometry_msgs/PoseStamped '{header: {stamp: now, frame_id: "map"}, pose: {position: {x: 2.0, y: 2.0, z: 0.0}, orientation: {w: 1.0}}}'
33 |
34 |
--------------------------------------------------------------------------------
/example/autorally.yml:
--------------------------------------------------------------------------------
1 | image: therobotcooperative/autorally
2 | sources:
3 | - /opt/ros/melodic/setup.bash
4 | - /ros_ws/devel/setup.bash
5 | - /ros_ws/src/autorally/autorally_util/setupEnvLocal.sh
6 | launches:
7 | - /ros_ws/src/autorally/autorally_gazebo/launch/autoRallyTrackGazeboSim.launch
8 | - /ros_ws/src/autorally/autorally_control/launch/waypointFollower.launch
9 | - /ros_ws/src/autorally/autorally_control/launch/constantSpeedController.launch
10 | node_sources:
11 | - package: autorally_control
12 | node: gpsWaypoint
13 | sources:
14 | - src/gpsWaypoint/gpsWaypoint.cpp
15 | restrict-analysis-to-paths:
16 | - /ros_ws/devel/include/autorally_control
17 | - /ros_ws/src/autorally/autorally_control
18 | - package: autorally_control
19 | node: joystickController
20 | sources:
21 | - src/joystick/JoystickControl.cpp
22 | - src/joystick/joystickControlMain.cpp
23 | restrict-analysis-to-paths:
24 | - /ros_ws/devel/include/autorally_control
25 | - /ros_ws/src/autorally/autorally_control
26 | - package: autorally_control
27 | node: ConstantSpeedController
28 | type: nodelet
29 | entrypoint: autorally_control::ConstantSpeedController::onInit
30 | sources:
31 | - src/ConstantSpeedController/ConstantSpeedController.cpp
32 | restrict-analysis-to-paths:
33 | - /ros_ws/devel/include/autorally_control
34 | - /ros_ws/src/autorally/autorally_control
35 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/rostopic.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import argparse
3 | import shlex
4 |
5 | from ..interpreter import model, NodeContext
6 |
7 |
8 | # Models a call to rostopic from in launch
9 | @model('rostopic', 'rostopic')
10 | def rostopic(c: NodeContext) -> None:
11 | parser = argparse.ArgumentParser()
12 |
13 | def publish(args: argparse.Namespace):
14 | c.pub(args.topic, args.type)
15 |
16 | def subscribe(args: argparse.Namespace):
17 | c.pub(args.topic, 'any')
18 |
19 | subparsers = parser.add_subparsers()
20 | p = subparsers.add_parser('pub', help='Publishes a message')
21 | p.add_argument('-l', '--latch', action='store_true')
22 | p.add_argument('-r', metavar='rate', type=int)
23 | p.add_argument('-1', '--once', action='store_true')
24 | p.add_argument('topic', type=str)
25 | p.add_argument('type', type=str)
26 | p.add_argument('msg', type=str)
27 | p.set_defaults(func=publish)
28 |
29 | p = subparsers.add_parser('echo', help='Subscribes to a topic')
30 | p.add_argument('--offset', action='store_true')
31 | p.add_argument('--filter', type=str)
32 | p.add_argument('-c', metavar='clear')
33 | p.add_argument('-b', type=str, metavar='bag')
34 | p.add_argument('-p', action='store_true', metavar='print_friendly')
35 | p.add_argument('-w', type=int, metavar='width_fixed')
36 | p.add_argument('--nostr', action='store_true')
37 | p.add_argument('--noarr', action='store_true')
38 | p.add_argument('-n', type=int, metavar='count')
39 | p.add_argument("topic", type=str)
40 | p.set_defaults(func=subscribe)
41 |
42 | args = parser.parse_args(shlex.split(c.args))
43 | if 'func' in args:
44 | args.func(args)
45 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .notes
2 | *.swp
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 | *$py.class
8 |
9 | # C extensions
10 | *.so
11 |
12 | # Distribution / packaging
13 | .Python
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | .hypothesis/
51 | .pytest_cache/
52 |
53 | # Translations
54 | *.mo
55 | *.pot
56 |
57 | # Django stuff:
58 | *.log
59 | local_settings.py
60 | db.sqlite3
61 |
62 | # Flask stuff:
63 | instance/
64 | .webassets-cache
65 |
66 | # Scrapy stuff:
67 | .scrapy
68 |
69 | # Sphinx documentation
70 | docs/_build/
71 |
72 | # PyBuilder
73 | target/
74 |
75 | # Jupyter Notebook
76 | .ipynb_checkpoints
77 |
78 | # pyenv
79 | .python-version
80 |
81 | # celery beat schedule file
82 | celerybeat-schedule
83 |
84 | # SageMath parsed files
85 | *.sage.py
86 |
87 | # Environments
88 | .env
89 | .venv
90 | env/
91 | venv/
92 | ENV/
93 | env.bak/
94 | venv.bak/
95 |
96 | # Spyder project settings
97 | .spyderproject
98 | .spyproject
99 |
100 | # Rope project settings
101 | .ropeproject
102 |
103 | # mkdocs documentation
104 | /site
105 |
106 | # mypy
107 | .mypy_cache/
108 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/topic_tools_mux.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 | import argparse
5 |
6 |
7 | @model("topic_tools", "mux")
8 | def mux(c):
9 | c.read("~lazy", False)
10 | c.read("~initial_topic", "")
11 |
12 | c.provide("mux/select", "topic_tools/MuxSelect")
13 | c.provide("mux/add", "topic_tools/MuxAdd")
14 | c.provide("mux/delete", "topic_tools/MuxDelete")
15 |
16 | parser = argparse.ArgumentParser("topic_tools/mux")
17 | parser.add_argument("outopic", type=str)
18 | parser.add_argument("intopic", nargs="+", type=str)
19 | topics = parser.parse_args(c.args.split())
20 |
21 | c.pub(topics.outopic, "any")
22 | for in_topic in topics.intopic:
23 | c.sub(in_topic, "any")
24 |
25 |
26 | @model("topic_tools", "relay")
27 | def relay(c):
28 | c.read("~unreliable", False)
29 | c.read('~lazy', False)
30 |
31 | c.read('~monitor_rate', 1.0)
32 |
33 | parser = argparse.ArgumentParser("topic_tools/relay")
34 | parser.add_argument("intopic", type=str)
35 | parser.add_argument("outtopic", type=str)
36 | topics = parser.parse_args(c.args.split())
37 |
38 | c.pub(topics.outtopic, 'any')
39 | c.sub(topics.intopic, 'any')
40 | # Stealth mode and monitor topic means a topic is relayed
41 | # that by default is the same is intopic, but could be set by a parameter
42 | # http://wiki.ros.org/topic_tools/relay
43 | # relay monitors the topic and if there is a subscriber only then will it
44 | # relay the topic. This is out of scope because it is dynamic.
45 | # The code below is just here to remind that we do not handle this and
46 | # how we might.
47 |
48 | # stealth = c.read('~stealth', False)
49 |
50 | # topic = c.read('~monitor_topic', 'intopic')
51 | # if stealth:
52 | # c.pub(topic, 'any')
53 |
--------------------------------------------------------------------------------
/src/rosdiscover/launch.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('Launch',)
3 |
4 | import typing as t
5 |
6 | import attr
7 |
8 |
9 | @attr.s(frozen=True, slots=True, auto_attribs=True)
10 | class Launch:
11 | """
12 | Data class storing the file name of a launch file and its arguments
13 |
14 | Attributes
15 | ----------
16 | filename: str
17 | The file name of the launch files that should be launched
18 | arguments: t.Mapping[str, str]
19 | A set of launch arguments that should be passed to roslaunch
20 | """
21 | filename: str
22 | arguments: t.Mapping[str, str]
23 |
24 | def get_argv(self) -> t.List[str]:
25 | return [f'{argk}:={self.arguments.get(argk)}' for argk in self.arguments.keys()]
26 |
27 | def to_dict(self) -> t.Dict[str, t.Any]:
28 | return {
29 | "filename": self.filename,
30 | "arguments": dict(self.arguments),
31 | }
32 |
33 | @classmethod
34 | def from_dict(cls, dict_: t.Mapping[str, t.Any]) -> 'Launch':
35 | """
36 | Raises
37 | ------
38 | ValueError
39 | If 'filename' is undefined in configuration.
40 | """
41 | if 'filename' not in dict_:
42 | raise ValueError("'filename' is undefined in configuration")
43 |
44 | if not isinstance(dict_['filename'], str):
45 | raise ValueError("expected 'filename' to be a string")
46 |
47 | has_arguments = 'arguments' in dict_
48 | if has_arguments and not isinstance(dict_['arguments'], dict):
49 | raise ValueError("expected 'arguments' to be a mapping")
50 |
51 | filename: str = dict_['filename']
52 | arguments: t.Mapping[str, str] = dict(dict_.get('arguments', {}))
53 | return Launch(filename=filename,
54 | arguments=arguments)
55 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/ekf_localization.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model, NodeContext
3 |
4 |
5 | @model('robot_localization', 'ekf_localization_node')
6 | def ekf_localization(c: NodeContext):
7 | c.pub("odometry/filtered", "nav_msgs/Odometry")
8 | c.pub("accel/filtered", "geometry_messages/AccelWithCovarianceStamped")
9 |
10 | topic_index = 0
11 | odom_topic_name = f"~odom{topic_index}"
12 | while c.has_param(odom_topic_name):
13 | c.sub(c.read(odom_topic_name, "\\unknown"), "nav_msgs/Odometry")
14 | topic_index += 1
15 | odom_topic_name = f"~odom{topic_index}"
16 |
17 | topic_index = 0
18 | pose_topic_name = f"~pose{topic_index}"
19 | while c.has_param(pose_topic_name):
20 | c.sub(c.read(pose_topic_name, "\\unknown"), "geometry_msgs/PoseWithCovarianceStamped")
21 | topic_index += 1
22 | pose_topic_name = f"~pose{topic_index}"
23 |
24 | topic_index = 0
25 | twist_topic_name = f"~twist{topic_index}"
26 | while c.has_param(twist_topic_name):
27 | c.sub(c.read(twist_topic_name, "\\unknown"), "geometry_msgs/TwistWithCovarianceStamped")
28 | topic_index += 1
29 | twist_topic_name = f"~twist{topic_index}"
30 |
31 | topic_index = 0
32 | imu_topic_name = f"~imu{topic_index}"
33 | while c.has_param(imu_topic_name):
34 | c.sub(c.read(imu_topic_name, "\\unknown"), "sensor_msgs//Imu")
35 | topic_index += 1
36 | imu_topic_name = f"~imu{topic_index}"
37 |
38 | stamped_control = c.read("~stamped_control", False)
39 | stamped = f"geometry_msgs::Twist{'Stamped' if stamped_control else ''}"
40 | c.sub("set_pose", "geometry_msgs/PoseWithCovarianceStamped")
41 | c.sub("cmd_vel", stamped)
42 | c.provide("~set_pose", "geometry_msgs/PoseWithCovarianceStamped")
43 | c.provide("~toggle", "robot_localization/ToggleFilterProcessing")
44 | c.provide("~enable", "std_srvs/Empty")
45 |
--------------------------------------------------------------------------------
/tests/model.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from rosdiscover.workspace import Workspace
4 | from rosdiscover.vm import Model, NodeContext, VM
5 |
6 |
7 | def test_joint_state_publisher():
8 | import rosdiscover.models.joint_state_publisher
9 | m = Model.find('joint_state_publisher', 'joint_state_publisher')
10 | ctx = NodeContext('joint_state_publisher')
11 | m.eval(ctx)
12 |
13 |
14 | def test_amcl():
15 | import rosdiscover.models.amcl
16 | m = Model.find('amcl', 'amcl')
17 | ctx = NodeContext('amcl')
18 | m.eval(ctx)
19 |
20 |
21 | def test_launch():
22 | workspace = Workspace('/home/chris/brass/tbot')
23 | fn_launch = "/home/chris/brass/tbot/turtlebot_simulator/launch/turtlebot_in_stage.launch"
24 | vm = VM(workspace)
25 | vm.launch(fn_launch)
26 |
27 |
28 | def test_fake_launch():
29 | import rosdiscover.models.amcl
30 | import rosdiscover.models.joint_state_publisher
31 | import rosdiscover.models.move_base
32 | import rosdiscover.models.stage_ros
33 |
34 | workspace = Workspace('/home/chris/brass/tbot')
35 | vm = VM(workspace)
36 | vm.load('stage_ros', 'stage_ros', 'stage_ros')
37 | vm.load('joint_state_publisher', 'joint_state_publisher', 'joint_state_publisher')
38 | vm.load('amcl', 'amcl', 'amcl')
39 | vm.load('move_base', 'move_base', 'move_base')
40 |
41 | # FIXME nodelets
42 | vm.load('yocs_velocity_smoother',
43 | 'velocity_smoother',
44 | 'navigation_velocity_smoother')
45 | vm.load('kobuki_safety_controller',
46 | 'kobuki_safety_controller',
47 | 'safety_controller')
48 |
49 | print(list(vm.nodes))
50 | # print(list(vm.topics))
51 |
52 |
53 | if __name__ == '__main__':
54 | log_to_stdout = logging.StreamHandler()
55 | logging.getLogger('rosdiscover').addHandler(log_to_stdout)
56 |
57 | # test_joint_state_publisher()
58 | # test_amcl()
59 |
60 | test_fake_launch()
61 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/image_proc_nodelets.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 | IMAGE_TOPIC_TYPE = 'sensor_msgs/Image'
5 | IMAGE_PROC_PKG = 'image_proc'
6 |
7 |
8 | @model(IMAGE_PROC_PKG, "image_proc")
9 | def image_proc(c):
10 | c.sub('image_raw', IMAGE_TOPIC_TYPE)
11 | c.sub('camera_info', 'sensor_msgs/CameraInfo')
12 |
13 | c.pub('image_mono', IMAGE_TOPIC_TYPE)
14 | c.pub('image_rect', IMAGE_TOPIC_TYPE)
15 | c.pub('image_color', IMAGE_TOPIC_TYPE)
16 | c.pub('image_rect_color', IMAGE_TOPIC_TYPE)
17 |
18 | c.read('~queue_size', 5)
19 |
20 |
21 | @model(IMAGE_PROC_PKG, "debayer")
22 | def debayer(c):
23 | c.mark_nodelet()
24 | c.pub('image_mono', IMAGE_TOPIC_TYPE)
25 | c.pub('image_color', IMAGE_TOPIC_TYPE)
26 |
27 | c.sub('image_raw', IMAGE_TOPIC_TYPE)
28 |
29 | c.read('~debayer', 0)
30 |
31 |
32 | @model(IMAGE_PROC_PKG, 'rectify')
33 | def rectify(c):
34 | c.mark_nodelet()
35 |
36 | c.sub('image_mono', IMAGE_TOPIC_TYPE)
37 | c.sub('camera_info', 'sensor_msgs/CameraInfo')
38 |
39 | c.pub('image_rect', IMAGE_TOPIC_TYPE)
40 |
41 | c.read('~queue_size', 5)
42 | c.read('~interpolation', 1)
43 |
44 |
45 | @model(IMAGE_PROC_PKG, 'crop_decimate')
46 | def crop_decimate(c):
47 | c.mark_nodelet()
48 |
49 | c.sub('camera/image_raw', IMAGE_TOPIC_TYPE)
50 | c.sub('camera/camera_info', 'sensor_msgs/CameraInfo')
51 |
52 | c.pub('camera_out/image_raw', IMAGE_TOPIC_TYPE)
53 | c.pub('camera_out/camera_info', 'sensor_msgs/CameraInfo')
54 |
55 | c.read('~queue_size', 5)
56 | c.read('~decimation_x', 1, True)
57 | c.read('~decimation_y', 1, True)
58 | c.read('~x_offset', 0, True)
59 | c.read('~y_offset', 0, True)
60 | c.read('~width', 0, True)
61 | c.read('~height', 0, True)
62 | c.read('~interpolation', 0, True)
63 |
64 |
65 | @model(IMAGE_PROC_PKG, 'resize')
66 | def resize(c):
67 | c.mark_nodelet()
68 |
69 | c.sub('image', IMAGE_TOPIC_TYPE)
70 | c.sub('camera_info', 'sensor_msgs/CameraInfo')
71 |
72 | c.pub('~image', IMAGE_TOPIC_TYPE)
73 | c.pub('~camera_info', 'sensor_msgs/CameraInfo')
74 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/depth_image_proc_nodelets.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model
3 |
4 | POINTCLOUD2 = 'sensor_msgs/PointCloud2'
5 | MSGS_CAMERA_INFO = 'sensor_msgs/CameraInfo'
6 | IMAGE_TOPIC_TYPE = 'sensor_msgs/Image'
7 | DEPTH_IMAGE_PROC_PKG = 'depth_image_proc'
8 |
9 |
10 | @model(DEPTH_IMAGE_PROC_PKG, 'convert_metric')
11 | def convert_metric(c):
12 | c.mark_nodelet()
13 |
14 | c.sub('~image_raw', IMAGE_TOPIC_TYPE)
15 | c.pub('~image', IMAGE_TOPIC_TYPE)
16 |
17 |
18 | @model(DEPTH_IMAGE_PROC_PKG, 'disparity')
19 | def disparity(c):
20 | c.mark_nodelet()
21 |
22 | c.sub('~left/image_rect', IMAGE_TOPIC_TYPE)
23 | c.sub('~right/camera_info', MSGS_CAMERA_INFO)
24 |
25 | c.read('min_range', 0.0)
26 | c.read('max_range', 0.0)
27 | c.read('delta_d', 0.125)
28 | c.read('queue_size', 5)
29 |
30 |
31 | @model(DEPTH_IMAGE_PROC_PKG, 'point_cloud_xyz')
32 | def point_cloud_xyz(c):
33 | c.mark_nodelet()
34 |
35 | c.sub('~camera_info', MSGS_CAMERA_INFO)
36 | c.sub('~image_rect', IMAGE_TOPIC_TYPE)
37 |
38 | c.pub('~points', POINTCLOUD2)
39 |
40 | c.read('queue_size', 5)
41 |
42 |
43 | @model(DEPTH_IMAGE_PROC_PKG, 'point_cloud_xyzrgb')
44 | def point_cloud_xyzrgb(c):
45 | c.mark_nodelet()
46 |
47 | c.sub('~rgb/camera_info', MSGS_CAMERA_INFO)
48 | c.sub('~rgb/image_rect_color', IMAGE_TOPIC_TYPE)
49 | c.sub('~depth_registered/image_rect', IMAGE_TOPIC_TYPE)
50 |
51 | c.pub('~depth_registered/points', POINTCLOUD2)
52 |
53 | c.read('queue_size', 5)
54 |
55 |
56 | @model(DEPTH_IMAGE_PROC_PKG, 'register')
57 | def register(c):
58 | c.mark_nodelet()
59 |
60 | c.sub('~rgb/camera_info', MSGS_CAMERA_INFO)
61 | c.sub('~depth/camera_info', MSGS_CAMERA_INFO)
62 | c.sub('~depth/image_rect', IMAGE_TOPIC_TYPE)
63 |
64 | c.pub('~depth_registered/camera_info', MSGS_CAMERA_INFO)
65 | c.pub('~depth_registered/image_rect', IMAGE_TOPIC_TYPE)
66 |
67 | c.read('~queue_size', 5)
68 |
69 | # TODO: The documentation mentions that this TF should exist
70 | # but we don't have a way to record it
71 | # This is here to give an idea for when we to decide to
72 | # record it
73 | # c.tf('depth_optical_frame', '/rgb_optical_frame')
74 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | This module provides architectural models for various ROS packages.
4 |
5 | Each file within this directory defines a single model for a particular ROS
6 | node type. For now, models are implemented as Python functions that accept a
7 | single argument, which is used to supply a :code:`NodeContext` for evaluating
8 | the various architectural commands within the model definition (e.g.,
9 | subscribing to a particular topic).
10 | The :code:`model` decorator must be used to indicate that a particular
11 | function is used to supply a model definition, as shown below.
12 |
13 | .. code:: python
14 |
15 | from ..interpreter import model
16 |
17 | @register('amcl', 'amcl')
18 | def amcl(c):
19 | ...
20 |
21 | Note that the module (i.e., file) responsible for defining a particular model
22 | must be imported below in order for that model to be loaded. For example, to
23 | ensure that the model provided by :code:`joint_state_publisher.py` is loaded,
24 | the following :code:`import` statement must be added to this file:
25 |
26 | .. code:: python
27 |
28 | from . import joint_state_publisher
29 |
30 | """
31 | from . import amcl
32 | from . import autorally
33 | from . import cmd_vel_mux
34 | from . import depth_image_proc_nodelets
35 | from . import diagnostic_aggregator
36 | from . import gazebo_gui
37 | from . import gzserver
38 | from . import image_proc_nodelets
39 | from . import image_transport
40 | from . import joint_state_publisher
41 | from . import joy
42 | from . import kobuki_safety_controller
43 | from . import map_server
44 | from . import move_base
45 | from . import pdw_robot_state_publisher
46 | from . import pdw_turtlebot3_fake
47 | from . import pdw_turtlebot3_teleop_key
48 | from . import placeholder
49 | from . import robot_state_publisher
50 | from . import rviz
51 | from . import spawn_model
52 | from . import spawner
53 | from . import stage_ros
54 | from . import topic_tools_mux
55 | from . import turtlebot3_teleop_key
56 | from . import twist_mux
57 | from . import velocity_smoother
58 | from . import pointgrey_camera_driver
59 | from . import lap_stats
60 | from . import rostopic
61 | from . import ekf_localization
62 | from . import rosdiscover_error_reproducer
63 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/gzserver.py:
--------------------------------------------------------------------------------
1 | from ..interpreter import model
2 |
3 |
4 | @model('gazebo_ros', 'gzserver')
5 | def gzserver(c):
6 | # TODO: Most of these parameters should come from plugins, not be handled here
7 | c.pub('/clock', 'rosgraph_msgs/Clock')
8 | c.pub('~link_states', 'gazebo_msgs/LinkState')
9 | c.pub('~model_states', 'gazebo_msgs/ModelStates')
10 |
11 | c.sub('~set_link_state', 'gazebo_msgs/LinkState')
12 | c.sub('~set_model_state', 'gazebo_msgs/ModelState')
13 |
14 | c.provide('~spawn_sdf_model', 'gazebo_msgs/SpawnModel')
15 | c.provide('~spawn_urdf_model', 'gazebo_msgs/SpawnModel')
16 | c.provide('~delete_model', 'gazebo_msgs/DeleteModel')
17 | c.provide('~delete_light', 'gazebo_msgs/DeleteLight')
18 | c.provide('~get_model_properties', 'gazebo_msgs/GetModelProperties')
19 | c.provide('~get_model_state', 'gazebo_msgs/GetModelState')
20 | c.provide('~get_world_properties', 'gazebo_msgs/GetWorldProperties')
21 | c.provide('~get_joint_properties', 'gazebo_msgs/GetJointProperties')
22 | c.provide('~get_link_properties', 'gazebo_msgs/GetLinkProperties')
23 | c.provide('~get_link_state', 'gazebo_msgs/GetLinkState')
24 | c.provide('~set_link_state', 'gazebo_msgs/GetLinkState')
25 | c.provide('~get_light_properties', 'gazebo_msgs/GetLightProperties')
26 | c.provide('~set_light_properties', 'gazebo_msgs/SetLightProperties')
27 | c.provide('~get_physics_properties', 'gazebo_msgs/GetPhysicsProperties')
28 | c.provide('~set_link_properties', 'gazebo_msgs/SetLinkProperties')
29 | c.provide('~set_physics_properties', 'gazebo_msgs/SetPhysicsProperties')
30 | c.provide('~set_model_state', 'gazebo_msgs/SetModelState')
31 | c.provide('~set_model_configuration', 'gazebo_msgs/SetModelConfiguration')
32 | c.provide('~set_joint_properties', 'gazebo_msgs/SetJointProperties')
33 | c.provide('~unpause_physics', 'std_srvs/Empty')
34 | c.provide('~pause_physics', 'std_srvs/Empty')
35 | c.provide('~apply_body_wrench', 'gazebo_msgs/ApplyBodyWrench')
36 | c.provide('~apply_joint_effort', 'gazebo_msgs/ApplyJointEffort')
37 | c.provide('~clear_joint_forces', 'gazebo_msgs/JointRequest')
38 | c.provide('~clear_body_wrenches', 'gazebo_msgs/BodyRequest')
39 | c.provide('~reset_simulation', 'std_srvs/Empty')
40 | c.provide('~reset_world', 'std_srvs/Empty')
41 |
42 | c.write('~use_sim_time', True)
43 | c.read('~pub_clock_frequency')
44 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/amcl.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # from ..interpreter import model
3 |
4 | M_PI = 3.14159265358979323846
5 |
6 |
7 | # Commented out to make static node recovery be used
8 | # @model('amcl', 'amcl')
9 | def amcl(c):
10 | c.read('~use_map_topic', False)
11 | c.read('~first_map_only', False)
12 | c.read('~gui_publish_rate', -1.0)
13 | c.read('~save_pose_rate', 0.5)
14 | c.read("~gui_publish_rate", -1.0)
15 | c.read("~save_pose_rate", 0.5)
16 | c.read("~laser_min_range", -1.0)
17 | c.read("~laser_max_range", -1.0)
18 | c.read("~laser_max_beams", 30)
19 | c.read("~min_particles", 100)
20 | c.read("~max_particles", 5000)
21 | c.read("~kld_err", 0.01)
22 | c.read("~kld_z", 0.99)
23 | c.read("~odom_alpha1", 0.2)
24 | c.read("~odom_alpha2", 0.2)
25 | c.read("~odom_alpha3", 0.2)
26 | c.read("~odom_alpha4", 0.2)
27 | c.read("~odom_alpha5", 0.2)
28 | c.read("~do_beamskip", False)
29 | c.read("~beam_skip_distance", 0.5)
30 | c.read("~beam_skip_threshold", 0.3)
31 | c.read("~beam_skip_error_threshold_", 0.9)
32 | c.read("~laser_z_hit", 0.95)
33 | c.read("~laser_z_short", 0.1)
34 | c.read("~laser_z_max", 0.05)
35 | c.read("~laser_z_rand", 0.05)
36 | c.read("~laser_sigma_hit", 0.2)
37 | c.read("~laser_lambda_short", 0.1)
38 | c.read("~laser_likelihood_max_dist", 2.0)
39 | c.read("~laser_model_type", "likelihood_field")
40 | c.read("~odom_model_type", "diff")
41 | c.read("~update_min_d", 0.2)
42 | c.read("~update_min_a", M_PI / 6.0)
43 | c.read("~odom_frame_id", "odom")
44 | c.read("~base_frame_id", "base_link")
45 | c.read("~global_frame_id", "map")
46 | c.read("~resample_interval", 2)
47 | c.read("~transform_tolerance", 0.1)
48 | c.read("~recovery_alpha_slow", 0.001)
49 | c.read("~recovery_alpha_fast", 0.1)
50 | c.read("~tf_broadcast", True)
51 | c.read("~bag_scan_period", -1.0)
52 |
53 | c.pub('amcl_pose', 'geometry_msgs/PoseWithCovarianceStamped')
54 | c.pub('particlecloud', 'geometry_msgs/PoseArray')
55 |
56 | c.sub('scan', 'sensor_msgs/LaserScan')
57 | c.sub('map', 'nav_msgs/OccupancyGrid')
58 | c.sub('initialpose', 'geometry_msgs/PoseWithCovarianceStamped')
59 |
60 | c.provide('global_localization', 'std_srvs/Empty')
61 | c.provide('request_nomotion_update', 'std_srvs/Empty')
62 | c.provide('set_map', 'nav_msgs/SetMap')
63 |
64 | # TODO add dynamic reconfigure helper
65 | c.pub('~parameter_descriptions', 'dynamic_reconfigure/ConfigDescription')
66 | c.pub('~parameter_updates', 'dynamic_reconfigure/Config')
67 |
--------------------------------------------------------------------------------
/src/rosdiscover/recover/states_analyzer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import annotations
3 |
4 | from .call import Subscriber
5 |
6 | __all__ = (
7 | "SymbolicStatesAnalyzer"
8 | )
9 |
10 | import typing as t
11 | import attr
12 | from functools import cached_property
13 |
14 | from .symbolic import (
15 | SymbolicAssignment,
16 | SymbolicFunction,
17 | SymbolicProgram,
18 | SymbolicVariableReference,
19 | )
20 | from .analyzer import SymbolicProgramAnalyzer
21 |
22 |
23 | @attr.s(auto_attribs=True) # Can't use slots with cached_property
24 | class SymbolicStatesAnalyzer:
25 |
26 | program: SymbolicProgram
27 | program_analyzer: SymbolicProgramAnalyzer
28 |
29 | @cached_property
30 | def state_vars(self) -> t.List[SymbolicVariableReference]:
31 | var_refs: t.List[SymbolicVariableReference] = []
32 | for pub in self.program_analyzer.publish_calls:
33 | cond = self.program_analyzer.inter_procedual_condition(pub)
34 | for expr in cond.decendents(True):
35 | if isinstance(expr, SymbolicVariableReference) and expr.variable in self.program_analyzer.assigned_vars:
36 | pub_func = self.program.func_of_stmt(pub)
37 | assign_funcs: t.Set[SymbolicFunction] = set()
38 | for assign in self.program_analyzer.assignments_of_var(expr.variable):
39 | assign_funcs.add(self.program.func_of_stmt(assign))
40 | if len(assign_funcs - {pub_func}) > 0 and expr not in var_refs:
41 | var_refs.append(expr)
42 | return var_refs
43 |
44 | @cached_property
45 | def _state_var_assigns(self) -> t.List[SymbolicAssignment]:
46 | result: t.List[SymbolicAssignment] = []
47 | for var in self.state_vars:
48 | for assign in self.program_analyzer.assignments_of_var(var.variable):
49 | if assign not in result:
50 | result.append(assign)
51 | return result
52 |
53 | @cached_property
54 | def sub_state_var_assigns(self) -> t.List[t.Tuple[Subscriber, SymbolicAssignment]]:
55 | result: t.List[t.Tuple[Subscriber, SymbolicAssignment]] = []
56 | for assign in self._state_var_assigns:
57 | for (sub, callback) in self.program_analyzer.subscriber_callbacks_map:
58 | if callback.body.contains(assign, self.program.functions) and (sub, assign) not in result:
59 | result.append((sub, assign))
60 | return result
61 |
62 | @cached_property
63 | def main_state_var_assigns(self) -> t.List[SymbolicAssignment]:
64 | result: t.List[SymbolicAssignment] = []
65 | for assign in self._state_var_assigns:
66 | if self.program.entrypoint.body.contains(assign, self.program.functions):
67 | result.append(assign)
68 | return result
69 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/pointgrey_camera_driver.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model, NodeContext
3 |
4 |
5 | @model("pointgrey_camera_driver", "PointGreyCameraNodelet")
6 | def pointgrey_camera_driver(c: NodeContext) -> None:
7 | c.mark_nodelet()
8 |
9 | # How to derive dynamic parameters from:
10 | # https://github.com/ros-drivers/pointgrey_camera_driver/blob/1c71a654bea94f59396361cd735ef718f8f07011/pointgrey_camera_driver/src/nodelet.cpp#L270
11 | c.read('serial', 0)
12 | c.read('packet_size', 1400)
13 | c.read('auto_packet_size', True)
14 | c.read('packet_delay', 4000)
15 | c.read('camera_info_url', '')
16 | c.read('frame_id', 'camera')
17 |
18 | c.read('desired_freq', 7.0)
19 | c.read('min_freq', 7.0)
20 | c.read('max_freq', 7.0)
21 | c.read('freq_tolerance', 0.1)
22 | c.read('window_size', 100)
23 | c.read('min_acceptable_delay', 0.0)
24 | c.read('max_acceptable_delay', 0.2)
25 |
26 | # Dynamic parameters read from
27 | # https://github.com/ros-drivers/pointgrey_camera_driver/blob/1c71a654bea94f59396361cd735ef718f8f07011/pointgrey_camera_driver/cfg/PointGrey.cfg
28 | c.read("video_mode", "format7_mode0", dynamic=True)
29 | c.read("frame_rate", 7, dynamic=True)
30 | c.read("auto_exposure", True, dynamic=True)
31 | c.read("exposure", 1.35, dynamic=True)
32 | c.read("auto_shutter", True, dynamic=True)
33 | c.read("shutter_speed", 0.03, dynamic=True)
34 | c.read("auto_gain", True, dynamic=True)
35 | c.read("gain", 0, dynamic=True)
36 | c.read("pan", 0, dynamic=True)
37 | c.read("tilt", 0, dynamic=True)
38 | c.read("brightness", 0.0, dynamic=True)
39 | c.read("gamma", 1.0, dynamic=True)
40 | c.read("auto_white_balance", True, dynamic=True)
41 | c.read("white_balance_blue", 800, dynamic=True)
42 | c.read("white_balance_red", 550, dynamic=True)
43 | c.read("format7_roi_width", 0,)
44 | c.read("format7_roi_height", 0, dynamic=True)
45 | c.read("format7_x_offset", 0, dynamic=True)
46 | c.read("format7_y_offset", 0, dynamic=True)
47 | c.read("format7_color_coding", "raw8", dynamic=True)
48 | c.read("enable_trigger", False, dynamic=True)
49 | c.read("trigger_mode", "mode0", dynamic=True)
50 | c.read("trigger_source", "gpio0", dynamic=True)
51 | c.read("trigger_polarity", 0, dynamic=True)
52 | c.read("enable_trigger_delay", False, dynamic=True)
53 | c.read("trigger_delay", 0.0, dynamic=True)
54 | c.read("trigger_parameter", 0, dynamic=True)
55 | c.read("enable_strobe1", False, dynamic=True)
56 | c.read("strobe1_polarity", 0, dynamic=True)
57 | c.read("strobe1_delay", 0.0, dynamic=True)
58 | c.read("strobe1_duration", 0.0, dynamic=True)
59 |
60 | for image_topic in [("", "sensor_msgs/Image"), ("/compressed", "sensor_msgs/CompressedImage"),
61 | ("/compressedDepth", "sensor_msgs/CompressedImage"),
62 | ("/theora", "theora_image_transport/Packet")]:
63 | c.pub("~image_raw" + image_topic[0], image_topic[1])
64 | c.pub("~camera_info", "sensor_msgs/CameraInfo")
65 | c.pub('/diagnostics', 'diagnostics_msg/DiagnosticArray')
66 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/autorally.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from ..interpreter import model, NodeContext
3 |
4 | # Python models for autorally_gazebo
5 | # Derived from: https://github.com/AutoRally/autorally/tree/melodic-devel/autorally_gazebo/nodes
6 |
7 |
8 | @model('autorally_gazebo', 'autorally_controller.py')
9 | def autorally_controller(c: NodeContext) -> None:
10 |
11 | c.read("~left_front_wheel/steering_link_name", "left_steering_link")
12 | c.read("~right_front_wheel/steering_link_name", "right_steering_link")
13 | left_steering_controller_name = \
14 | c.read("~left_front_wheel/steering_controller_name", "left_steering_controller")
15 | assert isinstance(left_steering_controller_name, str)
16 | right_steering_controller_name = \
17 | c.read("~right_front_wheel/steering_controller_name", "right_steering_controller")
18 | assert isinstance(right_steering_controller_name, str)
19 | c.read("~left_rear_wheel/link_name", "left_wheel")
20 | c.read("~right_rear_wheel/link_name", "right_wheel")
21 |
22 | left_front_axle_controller_name = \
23 | c.read("~left_front_wheel/axle_controller_name")
24 | assert isinstance(left_front_axle_controller_name, str)
25 |
26 | right_front_axle_controller_name = \
27 | c.read("~right_front_wheel/axle_controller_name")
28 | assert isinstance(right_front_axle_controller_name, str)
29 |
30 | left_rear_axle_controller_name = \
31 | c.read("~left_rear_wheel/axle_controller_name")
32 | assert isinstance(left_rear_axle_controller_name, str)
33 |
34 | right_rear_axle_controller_name = \
35 | c.read("~right_rear_wheel/axle_controller_name")
36 | assert isinstance(right_rear_axle_controller_name, str)
37 |
38 | c.read("~left_front_wheel/diameter", 1.0)
39 | c.read("~right_front_wheel/diameter")
40 | c.read("~left_rear_wheel/diameter")
41 | c.read("~right_rear_wheel/diameter")
42 |
43 | # https://github.com/AutoRally/autorally/blob/c2692f2970da6874ad9ddfeea3908adaf05b4b09/autorally_gazebo/nodes/autorally_controller.py#L258
44 | chassis_command_priorities = \
45 | c.parameter_keys("~chassisCommandProirities") # Note, misspelling is deliberate
46 |
47 | shock_absorbers = c.read("~shock_absorbers", [])
48 |
49 | c.read("~cmd_timeout", 0.5)
50 | c.read("~publishing_frequency", 30.0)
51 |
52 | c.sub("runstop", "autorally_msgs/runstop")
53 |
54 | c.pub(f"{left_steering_controller_name}/command", "std_msgs/Float64")
55 | c.pub(f"{right_steering_controller_name}/command", "std_msgs/Float64")
56 | c.pub(f"{left_front_axle_controller_name}/command", "std_msgs/Float64")
57 | c.pub(f"{right_front_axle_controller_name}/command", "std_msgs/Float64")
58 | c.pub(f"{left_rear_axle_controller_name}/command", "std_msgs/Float64")
59 | c.pub(f"{right_rear_axle_controller_name}/command", "std_msgs/Float64")
60 |
61 | for shocker in shock_absorbers:
62 | assert isinstance(shocker, dict)
63 | assert 'controller_name' in shocker
64 | c.pub(f"{shocker['controller_name']}/command", "std_msgs/Float64") # latched = True
65 |
66 | c.pub("wheelSpeeds", "autorally_msgs/wheelSpeeds")
67 | c.pub("chassisState", "autorally_msgs/chassisState")
68 |
69 | for cmd in chassis_command_priorities:
70 | cmd = cmd.split("/")[-1]
71 | c.sub(f"{cmd}/chassisCommand", "autorally_msgs/chassisCommand")
72 |
73 | c.sub('joint_states', "sensor_msgs/JointState")
74 |
75 | c.provide('~/list_controllers', "controller_manager_msgs/ListControllers")
76 |
77 |
78 | @model('autorally_gazebo', 'ground_truth_republisher.py')
79 | def ground_truth_republisher(c: NodeContext) -> None:
80 | c.pub('/ground_truth/state', 'nav_msgs/Odometry')
81 | c.sub('/ground_truth/state_raw', 'nav_msgs/Odometry')
82 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/spawner.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import argparse
3 | import os
4 | import typing as t
5 |
6 | import yaml
7 | import attr
8 | from loguru import logger
9 | from roswire.name import namespace_join
10 |
11 | from .plugins.controller_manager import ControllerManagerPlugin
12 | from ..interpreter import model, NodeContext
13 |
14 |
15 | @attr.s(frozen=True, slots=True, auto_attribs=True)
16 | class _Controller:
17 | name: str = attr.ib()
18 | type_: str = attr.ib()
19 |
20 |
21 | @model('controller_manager', 'spawner')
22 | def spawner(c: NodeContext):
23 | parser = argparse.ArgumentParser("spawner")
24 | # FIXME this is not quite how this argument works:
25 | # https://github.com/ros-controls/ros_control/blob/5db3baaa71c9dcd8a2fadb3b2c0b5085ea49b3a1/controller_manager/scripts/spawner#L174-L185
26 | parser.add_argument("--namespace", default="/")
27 | parser.add_argument("controllers", nargs="+")
28 | args = parser.parse_args(c.args.split())
29 |
30 | controllers_to_spawn: t.List[_Controller] = []
31 | controller_manager_namespace = "/"
32 |
33 | for controller_name_or_filename in args.controllers:
34 | if os.path.isabs(controller_name_or_filename):
35 | try:
36 | contents = c.app.files.read(controller_name_or_filename)
37 | controllers_yml = yaml.safe_load(contents)
38 | for name, info in controllers_yml.items():
39 | controller_type = info['type']
40 | controllers_to_spawn.append(_Controller(name=name, type_=controller_type))
41 | except Exception:
42 | m = f"Error reading controllers from a config file: {controller_name_or_filename}"
43 | logger.error(m)
44 | raise
45 |
46 | else:
47 | controller_name = controller_name_or_filename
48 | logger.debug(f"finding type for controller [{controller_name}]")
49 | controller_namespace = namespace_join(controller_manager_namespace, controller_name)
50 | controller_type = c.read(f"{controller_namespace}/type")
51 | logger.debug(f"found type for controller [{controller_name}]: {controller_type}")
52 | controller = _Controller(name=controller_name, type_=controller_type)
53 | controllers_to_spawn.append(controller)
54 |
55 | # the spawner interacts with the controller_manager services
56 | load_controller_service = namespace_join(args.namespace, "controller_manager/load_controller")
57 | unload_controller_service = namespace_join(args.namespace, "controller_manager/unload_controller")
58 | switch_controller_service = namespace_join(args.namespace, "controller_manager/switch_controller")
59 |
60 | c.use(load_controller_service, "controller_manager_msgs/LoadController")
61 | c.use(unload_controller_service, "controller_manager_msgs/UnloadController")
62 | c.use(switch_controller_service, "controller_manager_msgs/SwitchController")
63 |
64 | # FIXME load each controller as a plugin for the associated controller_manager
65 | # this is a little tricky since we need to find the correct controller_manager
66 | # going forward, we can (after solving some ordering/dependency issues) iterate
67 | # over the system to find the node that provides the necessary services
68 | #
69 | # for now, I'm hardcoding gazebo so that we crack on with our experiments
70 | controller_manager_node = "/gazebo"
71 |
72 | for controller in controllers_to_spawn:
73 | # This used to be ControllerManagerPlugin.from_type
74 | plugin = ControllerManagerPlugin.from_type(
75 | controller_name=controller.name,
76 | controller_type=controller.type_,
77 | controller_manager_node=controller_manager_node,
78 | )
79 | c.load_plugin(plugin)
80 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/spawn_model.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ("spawn_model",)
3 |
4 | import argparse
5 | import os
6 | import typing as t
7 | import xml.etree.ElementTree as ET # noqa
8 |
9 | from loguru import logger
10 |
11 | from .plugins.gazebo import GazeboPlugin
12 | from ..interpreter import model
13 |
14 |
15 | def _repair_urdf_contents(contents: str) -> str:
16 | # Workaround to find the begin tag
17 | begin_tag = ""
18 | begin_tag_starts_at = contents.find(begin_tag)
19 | if begin_tag_starts_at == -1:
20 | begin_tag_starts_at = contents.find(" t.Optional[ET.Element]:
34 | logger.debug(f'spawning model using parameter [{parameter_name}]')
35 | urdf_contents = c.read(parameter_name).strip()
36 | logger.debug(f'parsing URDF model from parameter [{parameter_name}]:'
37 | f'\n{urdf_contents}')
38 | urdf_contents = _repair_urdf_contents(urdf_contents)
39 | return ET.fromstring(urdf_contents)
40 |
41 |
42 | def _load_urdf_xml_from_file(c, filename: str) -> t.Optional[ET.Element]:
43 | if not os.path.isabs(filename):
44 | logger.error(f"unable to load URDF XML from file [{filename}]: expected absolute path")
45 | return None
46 |
47 | logger.debug(f"loading URDF XML from file [{filename}]")
48 | urdf_contents = c.read_file(filename)
49 | logger.debug(f"loaded URDF XML contents from file [{filename}]:\n{urdf_contents}")
50 | urdf_contents = _repair_urdf_contents(urdf_contents)
51 | logger.debug("parsing URDF XML contents")
52 | xml = ET.fromstring(urdf_contents)
53 | logger.debug("parsed UDF XML contents")
54 | return xml
55 |
56 |
57 | @model('gazebo_ros', 'spawn_model')
58 | def spawn_model(c):
59 | parser = argparse.ArgumentParser('spawn_model')
60 | parser.add_argument('-urdf', action='store_true')
61 | parser.add_argument('-model', type=str)
62 | parser.add_argument('-file', type=str)
63 | parser.add_argument('-param', type=str)
64 | parser.add_argument('-unpause', action='store_true')
65 | parser.add_argument('-wait', action='store_true')
66 | parser.add_argument('-x', type=float)
67 | parser.add_argument('-y', type=float)
68 | parser.add_argument('-z', type=float)
69 | parser.add_argument('-R', type=float)
70 | parser.add_argument('-P', type=float)
71 | parser.add_argument('-Y', type=float)
72 | parser.add_argument('-J', type=float)
73 | parser.add_argument('-gazebo_namespace', default='/gazebo')
74 | args = parser.parse_args(c.args.split())
75 |
76 | ns_gz = args.gazebo_namespace
77 | c.sub(f'{ns_gz}/model_states', 'gazebo_msgs/ModelStates')
78 | c.use(f'{ns_gz}/unpause_physics', 'std_srvs/Empty')
79 | c.use(f'{ns_gz}/delete_model', 'gazebo_msgs/DeleteModel')
80 | c.use(f'{ns_gz}/spawn_urdf_model', 'gazebo_msgs/SpawnModel')
81 | c.use(f'{ns_gz}/spawn_sdf_model', 'gazebo_msgs/SpawnModel')
82 | c.use(f'{ns_gz}/set_model_configuration', 'gazebo_msgs/SetModelConfiguration')
83 |
84 | # load the URDF file into an XML object
85 | urdf_xml: t.Optional[ET.Element] = None
86 | if args.file:
87 | urdf_xml = _load_urdf_xml_from_file(c, args.file)
88 | elif args.param:
89 | urdf_xml = _load_urdf_xml_from_parameter(c, args.param)
90 |
91 | if not urdf_xml:
92 | logger.error("failed to load URDF XML")
93 | return
94 |
95 | for plugin_xml in urdf_xml.findall('.//plugin'):
96 | try:
97 | plugin = GazeboPlugin.from_xml(plugin_xml)
98 | c.load_plugin(plugin)
99 | except ValueError:
100 | logger.exception(f"failed to load gazebo plugin [skipping!]: {plugin_xml}")
101 |
--------------------------------------------------------------------------------
/src/rosdiscover/interpreter/model.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('HandwrittenModel', 'PlaceholderModel', 'model', 'NodeModel')
3 |
4 | import abc
5 | import typing as t
6 |
7 | from loguru import logger
8 | import attr
9 |
10 | from .context import NodeContext
11 |
12 |
13 | class NodeModel(abc.ABC):
14 | """
15 | Provides an executable description of the run-time architecture of a given node type
16 | that can be used to obtain the actual run-time architectural interactions of a node
17 | within a concrete context.
18 | """
19 | @abc.abstractmethod
20 | def eval(self, context: NodeContext) -> None:
21 | """Obtain the concrete definition for a node within a given context."""
22 | ...
23 |
24 |
25 | @attr.s(frozen=True, slots=True, auto_attribs=True)
26 | class PlaceholderModel(NodeModel):
27 | """Used in place of a missing node model. Has no architectural effects."""
28 | package: str
29 | name: str
30 |
31 | def eval(self, context: NodeContext) -> None:
32 | context.mark_placeholder()
33 |
34 |
35 | @attr.s(frozen=True, slots=True)
36 | class HandwrittenModel(NodeModel):
37 | """Models the architectural interactions of a node type.
38 |
39 | Attributes
40 | ----------
41 | package: str
42 | The name of the package to which the associated node type belongs.
43 | name: str
44 | The name of the node type.
45 | _definition: t.Callable[[NodeContext], None]
46 | A callable (e.g., a [lambda] function) that implements the architectural
47 | semantics of this node type in the form of a function that accepts and
48 | appropriately mutates a given node context. I.e., this attribute is
49 | used to provide an implementation of the :meth:`NodeModel.eval` method.
50 | """
51 | package: str = attr.ib()
52 | name: str = attr.ib()
53 | _definition: t.Callable[[NodeContext], None] = attr.ib()
54 |
55 | _models: t.ClassVar[t.Dict[t.Tuple[str, str], "HandwrittenModel"]] = {}
56 |
57 | @staticmethod
58 | def register(
59 | package: str,
60 | name: str,
61 | definition: t.Callable[[NodeContext], None],
62 | ) -> None:
63 | key = (package, name)
64 | models = HandwrittenModel._models
65 | if key in models:
66 | m = f"model [{name}] already registered for package [{package}]"
67 | raise Exception(m)
68 | models[key] = HandwrittenModel(package, name, definition)
69 | logger.debug(f"registered model [{name}] for package [{package}]")
70 |
71 | @staticmethod
72 | def exists(package: str, name: str) -> bool:
73 | """Determines whether there exists a handwritten model for a given node type.
74 |
75 | Parameters
76 | ----------
77 | package: str
78 | The name of the package to which the node belongs.
79 | name: str
80 | The name of the node type.
81 |
82 | Returns
83 | -------
84 | bool
85 | :code:`True` if there is a handwritten model for the given, else :code:`False`.
86 | """
87 | return (package, name) in HandwrittenModel._models
88 |
89 | @staticmethod
90 | def fetch(package: str, name: str) -> NodeModel:
91 | """Fetches the prewritten model for a given node.
92 |
93 | Raises
94 | ------
95 | ValueError
96 | if no handwritten model for the given node exists.
97 | """
98 | package_and_name = (package, name)
99 | if package_and_name not in HandwrittenModel._models:
100 | msg = f"no handwritten model exists for node [{name}] in package [{package}]"
101 | raise ValueError(msg)
102 |
103 | return HandwrittenModel._models[package_and_name]
104 |
105 | def eval(self, context: NodeContext) -> None:
106 | try:
107 | self._definition(context)
108 | context.mark_handwritten()
109 | except Exception:
110 | logger.error(f'Failed to load handwritten model {context.fullname} from {context.launch_filename}')
111 | raise
112 |
113 |
114 | def model(package: str, name: str) -> t.Any:
115 | def register(m: t.Callable[[NodeContext], None]) -> t.Any:
116 | HandwrittenModel.register(package, name, m)
117 | return m
118 | return register
119 |
--------------------------------------------------------------------------------
/src/rosdiscover/recover/model.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import annotations
3 |
4 | __all__ = ("RecoveredNodeModel",)
5 |
6 | import json
7 | import typing as t
8 |
9 | from loguru import logger
10 | import attr
11 |
12 | from .loader import SymbolicProgramLoader
13 | from .symbolic import SymbolicProgram
14 | from ..interpreter import NodeModel, NodeContext
15 |
16 | from ..config import Config
17 |
18 |
19 | @attr.s(frozen=True, slots=True, auto_attribs=True)
20 | class CMakeListsInfo:
21 | filename: str
22 | lineno: int
23 |
24 | def to_dict(self) -> t.Dict[str, t.Any]:
25 | return {
26 | "filename": self.filename,
27 | "lineno": self.lineno
28 | }
29 |
30 | @classmethod
31 | def from_dict(cls, dict_: t.Dict[str, t.Any]) -> "CMakeListsInfo":
32 | assert 'filename' in dict_ and 'lineno' in dict_
33 | return CMakeListsInfo(filename=dict_["filename"], lineno=dict_["lineno"])
34 |
35 |
36 | @attr.s(frozen=True, slots=True, auto_attribs=True)
37 | class RecoveredNodeModel(NodeModel):
38 | """Provides a symbolic description of the (statically recovered) run-time
39 | architecture of a given node. This description can be executed via the symbolic
40 | interpreter to obtain the concrete run-time architecture for a given configuration.
41 |
42 | Attributes
43 | ----------
44 | image_sha256: str
45 | The SHA256 of the image from which this node model was recovered, represented
46 | as a hexadecimal string.
47 | package_name: str
48 | The name of the package to which this node belongs.
49 | package_abs_path: str
50 | The absolute path of the directory for the node's package.
51 | source_paths: t.Collection[str]
52 | The paths of the source files for this node, relative to the package
53 | directory,
54 | node_name: str
55 | The name of the node.
56 | cmakelist_info: t.Optional[CMakeListsInfo]
57 | Information about which CMakeFile the sources came from
58 | program: SymbolicProgram
59 | A parameterized, executable description of the node's architectural effects.
60 | """
61 | image_sha256: str
62 | package_name: str
63 | package_abs_path: str
64 | source_paths: t.Collection[str]
65 | node_name: str
66 | cmakelist_info: t.Optional[CMakeListsInfo]
67 | program: SymbolicProgram
68 |
69 | def save(self, filename: str) -> None:
70 | dict_ = self.to_dict()
71 | logger.debug(f"converted model to JSON-ready dict: {dict_}")
72 | with open(filename, "w") as f:
73 | json.dump(dict_, f, indent=2)
74 |
75 | def to_dict(self) -> t.Dict[str, t.Any]:
76 | dict_ = {
77 | "image": {
78 | "sha256": self.image_sha256,
79 | },
80 | "node-name": self.node_name,
81 | "package": {
82 | "name": self.package_name,
83 | "path": self.package_abs_path,
84 | },
85 | "sources": list(self.source_paths),
86 | "program": self.program.to_dict(),
87 | }
88 | if self.cmakelist_info:
89 | dict_["cmakeinfo"] = self.cmakelist_info.to_dict()
90 | return dict_
91 |
92 | @classmethod
93 | def load(cls, config: Config, filename: str) -> "RecoveredNodeModel":
94 | with open(filename, "r") as f:
95 | return cls.from_dict(config, json.load(f))
96 |
97 | @classmethod
98 | def from_dict(
99 | cls,
100 | config: Config,
101 | dict_: t.Dict[str, t.Any],
102 | ) -> "RecoveredNodeModel":
103 | image_sha256 = dict_["image"]["sha256"]
104 | node_name = dict_["node-name"]
105 | package_name = dict_["package"]["name"]
106 | package_abs_path = dict_["package"]["path"]
107 | source_paths = dict_["sources"]
108 | program = SymbolicProgramLoader.for_config(config).load(dict_["program"])
109 | cmakeinfo = None
110 | if 'cmakeinfo' in dict_:
111 | cmakeinfo = dict_["cmakeinfo"]
112 | return RecoveredNodeModel(
113 | image_sha256=image_sha256,
114 | node_name=node_name,
115 | package_name=package_name,
116 | package_abs_path=package_abs_path,
117 | source_paths=source_paths,
118 | program=program,
119 | cmakelist_info=CMakeListsInfo.from_dict(cmakeinfo) if cmakeinfo else None
120 | )
121 |
122 | def eval(self, context: NodeContext) -> None:
123 | context.mark_recovered()
124 | return self.program.eval(context)
125 |
--------------------------------------------------------------------------------
/src/rosdiscover/recover/analyzer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import annotations
3 |
4 | __all__ = (
5 | "SymbolicProgramAnalyzer"
6 | )
7 | from functools import cached_property
8 | import typing as t
9 |
10 | import attr
11 |
12 | from .symbolic import (
13 | AndExpr,
14 | SymbolicAssignment,
15 | SymbolicExpr,
16 | SymbolicProgram,
17 | SymbolicFunction,
18 | SymbolicFunctionCall,
19 | SymbolicWhile,
20 | )
21 |
22 | from .call import Publish, RateSleep
23 | from .call import Subscriber
24 |
25 |
26 | @attr.s(auto_attribs=True) # Can't use slots with cached_property
27 | class SymbolicProgramAnalyzer:
28 |
29 | program: SymbolicProgram
30 |
31 | @cached_property
32 | def assigned_vars(self) -> t.Set[str]:
33 | return {a.variable for a in self.assignments}
34 |
35 | def inter_procedual_condition(self, publish_call: Publish) -> SymbolicExpr:
36 | expr = publish_call.condition
37 | transitive_callers = self.program.transitive_callers(self.program.func_of_stmt(publish_call))
38 | for call in transitive_callers:
39 | expr = AndExpr.build(expr, call.condition)
40 | return expr
41 |
42 | def assignments_of_var(self, variable: str) -> t.Set[SymbolicAssignment]:
43 | result = set()
44 | for assign in self.assignments:
45 | if assign.variable == variable:
46 | result.add(assign)
47 |
48 | return result
49 |
50 | @cached_property
51 | def assignments(self) -> t.Set[SymbolicAssignment]:
52 | result = set()
53 | for func in self.program.functions.values():
54 | for stmt in func.body:
55 | if isinstance(stmt, SymbolicAssignment):
56 | result.add(stmt)
57 |
58 | return result
59 |
60 | @cached_property
61 | def subscribers(self) -> t.Set[Subscriber]:
62 | result = set()
63 | for func in self.program.functions.values():
64 | for stmt in func.body:
65 | if isinstance(stmt, Subscriber):
66 | result.add(stmt)
67 |
68 | return result
69 |
70 | @cached_property
71 | def rate_sleeps(self) -> t.Set[RateSleep]:
72 | result = set()
73 | for func in self.program.functions.values():
74 | for stmt in func.body:
75 | if isinstance(stmt, RateSleep):
76 | result.add(stmt)
77 |
78 | return result
79 |
80 | @cached_property
81 | def subscriber_callbacks_map(self) -> t.Set[t.Tuple[Subscriber, SymbolicFunction]]:
82 | result = set()
83 | for sub in self.subscribers:
84 | if sub.callback_name == "unknown":
85 | continue
86 | result.add((sub, self.program.functions[sub.callback_name]))
87 |
88 | return result
89 |
90 | @cached_property
91 | def subscriber_callbacks(self) -> t.Set[SymbolicFunction]:
92 | return set(callback for (sub, callback) in self.subscriber_callbacks_map)
93 |
94 | @cached_property
95 | def publish_calls(self) -> t.List[Publish]:
96 | result = []
97 | for func in self.program.functions.values():
98 | for stmt in func.body:
99 | if isinstance(stmt, Publish) and stmt not in result:
100 | result.append(stmt)
101 |
102 | return result
103 |
104 | @cached_property
105 | def publish_calls_in_sub_callback(self) -> t.List[Publish]:
106 | result = []
107 | for pub_call in self.publish_calls:
108 | for callback in self.subscriber_callbacks:
109 | if callback.body.contains(pub_call, self.program.functions) and pub_call not in result:
110 | result.append(pub_call)
111 |
112 | return result
113 |
114 | @cached_property
115 | def while_loops(self) -> t.List[SymbolicWhile]:
116 | result = []
117 | for func in self.program.functions.values():
118 | for stmt in func.body:
119 | if isinstance(stmt, SymbolicWhile) and stmt not in result:
120 | result.append(stmt)
121 |
122 | return result
123 |
124 | @cached_property
125 | def periodic_publish_calls(self) -> t.List[Publish]:
126 | result = []
127 | for pub_call in self.publish_calls:
128 | for while_stmt in self.while_loops:
129 | if while_stmt.body.contains(pub_call, self.program.functions):
130 | for rate in self.rate_sleeps:
131 | if while_stmt.body.contains(rate, self.program.functions) and pub_call not in result:
132 | result.append(pub_call)
133 |
134 | return result
135 |
136 | @cached_property
137 | def function_calls(self) -> t.List[SymbolicFunctionCall]:
138 | result = []
139 | for func in self.program.functions.values():
140 | for stmt in func.body:
141 | if isinstance(stmt, SymbolicFunctionCall) and stmt not in result:
142 | result.append(stmt)
143 |
144 | return result
145 |
--------------------------------------------------------------------------------
/src/rosdiscover/recover/database.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('RecoveredNodeModelDatabase',)
3 |
4 | import os
5 |
6 | from loguru import logger
7 | import attr
8 |
9 | from .model import RecoveredNodeModel
10 | from ..config import Config
11 |
12 |
13 | @attr.s
14 | class RecoveredNodeModelDatabase:
15 | """Provides an interface for caching recovered node models to and from disk.
16 |
17 | Attributes
18 | ----------
19 | path: str
20 | The absolute path of the database directory on the host file system.
21 |
22 | Raises
23 | ------
24 | ValueError
25 | If the given database path is not an absolute path.
26 | """
27 | path: str = attr.ib()
28 |
29 | @path.validator
30 | def validate_path_must_be_absolute(self, attribute, value) -> None:
31 | if not os.path.isabs(value):
32 | message = f"recovered node model database path must be absolute: {value}"
33 | raise ValueError(message)
34 |
35 | def __attrs_post_init__(self) -> None:
36 | logger.debug(f"ensuring that model database directory exists: {self.path}")
37 | os.makedirs(self.path, exist_ok=True)
38 | logger.debug(f"ensured that model database directory exists: {self.path}")
39 |
40 | def _path(self, image_sha256: str, package_dir: str, node_name: str) -> str:
41 | """Determines the absolute path of a recovered model on disk.
42 |
43 | Parameters
44 | ----------
45 | image_sha256: str
46 | The SHA256 of the image to which the node belongs, represented as a hex string.
47 | package_dir: str
48 | The absolute path of the node's corresponding package directory within its
49 | associated image.
50 | node_name: str
51 | The name of the node.
52 | """
53 | # strip the leading / in the package_dir
54 | assert package_dir[0] == "/"
55 | package_dir = package_dir[1:]
56 |
57 | rel_path = os.path.join(image_sha256, package_dir, f"{node_name}.json")
58 | return os.path.join(self.path, rel_path)
59 |
60 | def _path_from_config(
61 | self,
62 | config: Config,
63 | package_name: str,
64 | node_name: str,
65 | ) -> str:
66 | """Determines the absolute path of a recovered model on disk.
67 |
68 | Parameters
69 | ----------
70 | config: Config
71 | the configuration for the system to which the node belongs
72 | package_name: str
73 | the name of the package to which the node belongs
74 | node_name: str
75 | the name of the node
76 |
77 | Raises
78 | ------
79 | ValueError
80 | if no package exists with the given name inside the specified project
81 | """
82 | try:
83 | package = config.app.description.packages[package_name]
84 | except KeyError as err:
85 | message = f"failed to find package with given name: {package_name}"
86 | raise ValueError(message) from err
87 |
88 | return self._path(config.image_sha256, package.path, node_name)
89 |
90 | def contains(self, config: Config, package: str, node: str) -> bool:
91 | """Determines whether this database contains a recovered model for a given node.
92 |
93 | Parameters
94 | ----------
95 | config: Config
96 | the configuration for the system to which the node belongs
97 | package: str
98 | the name of the package to which the node belongs
99 | node: str
100 | the name of the node
101 |
102 | Returns
103 | -------
104 | bool
105 | True if the database contains a recovered model, or False if it does not.
106 | """
107 | model_path = self._path_from_config(config, package, node)
108 | return os.path.exists(model_path)
109 |
110 | def fetch(self, config: Config, package: str, node: str) -> RecoveredNodeModel:
111 | """Retrieves the model of a given node.
112 |
113 | Parameters
114 | ----------
115 | config: Config
116 | the configuration for the system to which the node belongs
117 | package: str
118 | the name of the package to which the node belongs
119 | node: str
120 | the name of the node
121 |
122 | Raises
123 | ------
124 | ValueError
125 | if no model exists for the given node
126 | """
127 | model_path = self._path_from_config(config, package, node)
128 |
129 | if not os.path.exists(model_path):
130 | message = (f"failed to fetch recovered model for node [{node}] "
131 | f"in package [{package}] for config [{config}]")
132 | raise ValueError(message)
133 |
134 | return RecoveredNodeModel.load(config, model_path)
135 |
136 | def store(self, model: RecoveredNodeModel) -> None:
137 | """Stores a given node model in this database."""
138 | model_path = self._path(model.image_sha256, model.package_abs_path, model.node_name)
139 | logger.info(f"storing recovered node model [{model}] on disk: {model_path}")
140 | package_models_dir = os.path.dirname(model_path)
141 | os.makedirs(package_models_dir, exist_ok=True)
142 | model.save(model_path)
143 | logger.info(f"stored recovered node model [{model}] on disk")
144 |
--------------------------------------------------------------------------------
/src/rosdiscover/observer/observer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ("Observer",)
3 |
4 | import os
5 | import time
6 | import typing as t
7 | from abc import ABC, abstractmethod
8 | from contextlib import contextmanager
9 |
10 | import roswire
11 | from dockerblade.popen import Popen
12 | from loguru import logger
13 | from roswire import App, AppInstance, ROSVersion
14 |
15 | from ..interpreter import SystemSummary
16 |
17 | if t.TYPE_CHECKING:
18 | from ..config import Config
19 |
20 |
21 | class ExecutionError(Exception):
22 | ...
23 |
24 |
25 | class Observer(ABC):
26 |
27 | @classmethod
28 | def for_container(cls,
29 | container: str,
30 | config: 'Config',
31 | ) -> 'Observer':
32 | """Constructs an Observer for a given running container.
33 |
34 | Parameters
35 | ----------
36 | container: str
37 | The image id or name of a container running a ROS system.
38 | config: Config
39 | A description of the configuration used by the running container.
40 |
41 | Returns
42 | -------
43 | Observer
44 | An observer that is appropriate for the kind of ROS system that is running in the
45 | container.
46 | """
47 | rsw = roswire.ROSWire()
48 | app = rsw.app(config.image, config.sources)
49 | instance = app.attach(container, require_description=True)
50 | observer = cls._build_observer(app, config, instance)
51 | return observer
52 |
53 | @classmethod
54 | def _build_observer(
55 | cls,
56 | app: App,
57 | config: 'Config',
58 | instance: AppInstance,
59 | ) -> 'Observer':
60 | if app.description.distribution.ros == ROSVersion.ROS1:
61 | from .ros1 import ROS1Observer
62 | return ROS1Observer(instance, config)
63 | else:
64 | from .ros2 import ROS2Observer
65 | return ROS2Observer(instance, config)
66 |
67 | @classmethod
68 | @contextmanager
69 | def for_image(cls,
70 | config: 'Config',
71 | start_script: t.Optional[str] = None) -> t.Iterator['Observer']:
72 | """Constructs an observer by starting an instance of an image
73 |
74 | Parameters
75 | ----------
76 | config: Config
77 | A description of the configuration used by the running container.
78 | Will contain the image name, sources, and environment variables to
79 | run
80 | start_script: str
81 | An optional script (on the container) to run after instance cretion
82 | Returns
83 | -------
84 | Observer
85 | An observer that is appropriate for the kind of ROS system that is running in the
86 | container.
87 | """
88 | rsw = roswire.ROSWire()
89 | app = rsw.app(config.image, config.sources)
90 | with app.launch(environment=config.environment) as instance:
91 | observer = cls._build_observer(app, config, instance)
92 | script: t.Optional[Popen] = None
93 | try:
94 | if start_script:
95 | logger.debug("Starting the container")
96 | cmd = start_script
97 | script = instance.shell.popen(cmd)
98 | time.sleep(5)
99 | if script.returncode and script.returncode != 1:
100 | for line in script.stream:
101 | logger.error(line)
102 | raise ExecutionError("Could not run start script")
103 | yield observer
104 | finally:
105 | if start_script and script:
106 | script.terminate()
107 |
108 | def __init__(self, app: AppInstance, config: 'Config') -> None:
109 | self._app_instance = app
110 | self._config = config
111 |
112 | @abstractmethod
113 | def observe(self) -> SystemSummary:
114 | """Dynamically observe the system and produce a summary of the architecture.
115 |
116 | Returns
117 | -------
118 | SystemSummmary
119 | An immutable representation of the system architecture.
120 | """
121 | ...
122 |
123 | def execute_script(self, path_on_host: str) -> Popen:
124 | """Executes a script on the executing container.
125 |
126 | Parameters
127 | ----------
128 | path_on_host: str
129 | The path to the script on the host (commands in the script should
130 | be relative to the container, not the host).
131 |
132 | Returns
133 | -------
134 | Popen
135 | The process instance that was started on the container
136 | """
137 | if not os.path.exists(path_on_host):
138 | raise FileNotFoundError(f"'{path_on_host}' not found.")
139 | assert self._app_instance is not None
140 |
141 | path_on_container = self._app_instance.files.mktemp('.sh')
142 | self._app_instance.files.copy_from_host(path_on_host, path_on_container)
143 |
144 | cmd = f"bash {path_on_container}"
145 |
146 | logger.debug(f"Running the script in the container: {cmd}")
147 | process = self._app_instance.shell.popen(cmd)
148 | return process
149 |
150 | @abstractmethod
151 | def launch_from_config(self, sleep_time: float) -> t.Sequence[Popen]:
152 | """
153 | Uses the launch file(s) in config to launch the ROS nodes. Waits until
154 | all nodes are started.
155 |
156 | Parameters
157 | ----------
158 | sleep_time: float
159 | The number of seconds to sleep after each launch (if there are multiple launches)
160 |
161 | Returns
162 | -------
163 | int
164 | The exit code. Will be the exit code of the first non-zero return from launch
165 | or 0 if all launches are successful.
166 | """
167 | ...
168 |
--------------------------------------------------------------------------------
/src/rosdiscover/observer/ros1.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('ROS1Observer',)
3 |
4 | import os
5 | import tempfile
6 | import time
7 | import typing as t
8 |
9 | from dockerblade.popen import Popen
10 | from loguru import logger
11 | from roswire.common import SystemState
12 |
13 | from .nodeinfo import NodeInfo
14 | from .observer import ExecutionError, Observer
15 | from ..interpreter import NodeContext, SystemSummary
16 | from ..launch import Launch
17 |
18 | _NODES_TO_FILTER_OUT = ('/rosout',)
19 | _TOPICS_TO_FILTER_OUT = ('/rosout', '/rosout_agg')
20 | _SERVICES_TO_FILTER_OUT = ('set_logger_level', 'get_loggers')
21 |
22 |
23 | class ROS1Observer(Observer):
24 |
25 | def observe(self) -> SystemSummary:
26 | """Observe the state of the running system and produce a summary of the architecture."""
27 | nodecontexts = []
28 | with self._app_instance.ros1() as ros:
29 | nodes = self._transform_state_to_nodeinfo(ros.state)
30 | for node in nodes:
31 | nodecontext = node.make_node_context(ros, self._app_instance)
32 | nodecontexts.append(nodecontext)
33 |
34 | return self._summarise(nodecontexts)
35 |
36 | def _summarise(self, contexts: t.Collection[NodeContext]) -> SystemSummary:
37 | """
38 | Produce an immutable description of the system architecture from the collection of
39 | NodeContexts.
40 |
41 | Parameters
42 | ----------
43 | contexts: Collection[NodeContext]
44 | A collection of contexts that are to be summarised.
45 |
46 | Returns
47 | -------
48 | SystemSummary
49 | A summary of the system architecture.
50 | """
51 | summaries = [node.summarise() for node in contexts]
52 | node_to_summary = {s.fullname: s for s in summaries}
53 | return SystemSummary(node_to_summary)
54 |
55 | def _transform_state_to_nodeinfo(self, state: SystemState) -> t.Collection[NodeInfo]:
56 | """
57 | Produce information about ros keyed by node.
58 |
59 | Roswire state as information keyed by topic, which causes complex processing in
60 | rosdiscover which expects information keyed by node. We transform the roswire information
61 | so that it is keyed by node.
62 |
63 | Parameters
64 | ----------
65 | ros: ROS1
66 | State information from roswire
67 |
68 | Returns
69 | -------
70 | Collection[NodeInfo]
71 | A collection of nodes with publishers and subscribers and services attacbed
72 | """
73 | reorganized_nodes: t.Dict[str, NodeInfo] = dict()
74 | # Create the node placeholders
75 | for node_name in state.nodes:
76 | if node_name not in _NODES_TO_FILTER_OUT:
77 | node: NodeInfo = NodeInfo(
78 | name=node_name[1:] if node_name.startswith('/') else node_name
79 | )
80 | reorganized_nodes[node_name] = node
81 |
82 | # Add in topics
83 | for topic in state.publishers:
84 | if topic not in _TOPICS_TO_FILTER_OUT:
85 | for node_name in state.publishers[topic]:
86 | if node_name in reorganized_nodes:
87 | reorganized_nodes[node_name].publishers.add(topic)
88 |
89 | for topic in state.subscribers:
90 | if topic not in _TOPICS_TO_FILTER_OUT:
91 | for node_name in state.subscribers[topic]:
92 | if node_name in reorganized_nodes:
93 | reorganized_nodes[node_name].subscribers.add(topic)
94 |
95 | # Add in services
96 | for service in state.services:
97 | if service.split('/')[-1] not in _SERVICES_TO_FILTER_OUT:
98 | for node_name in state.services[service]:
99 | if node_name in reorganized_nodes:
100 | reorganized_nodes[node_name].provides.add(service)
101 |
102 | return list(reorganized_nodes.values())
103 |
104 | def launch_from_config(self, sleep_time: float) -> t.Sequence[Popen]:
105 | # Eagerly check launch files exist - don't start any if one doesn't exist
106 | app_instance = self._app_instance
107 | for launch in self._config.launches:
108 | if not app_instance.files.exists(launch.filename):
109 | raise FileNotFoundError(launch.filename)
110 |
111 | processes = []
112 |
113 | for launch in self._config.launches:
114 | launch_script = self._generate_launch_script_on_host(launch)
115 | cmd = f"/bin/bash {launch_script}"
116 | process = app_instance.shell.popen(cmd)
117 | processes.append(process)
118 | time.sleep(sleep_time)
119 | if process.returncode and process.returncode != 0:
120 | raise ExecutionError(f"Could not run '{cmd}' on container.")
121 | return processes
122 |
123 | def _generate_launch_script_on_host(self, launch: Launch) -> str:
124 | script_path = None
125 | try:
126 | tmp = tempfile.NamedTemporaryFile(suffix=".sh", delete=False)
127 | with open(tmp.name, 'w') as script:
128 | for source in self._config.sources:
129 | script.write(f"source {source}\n")
130 | for var, val in self._config.environment.items():
131 | script.write(f"export {var}={val}\n")
132 | launch_cmd = f"roslaunch {launch.filename}"
133 | for arg in launch.get_argv():
134 | launch_cmd += f" {arg}"
135 | logger.info(f"Launching with '{launch_cmd}")
136 | script.write(f"{launch_cmd}\n")
137 | script_path = script.name
138 | script_on_container = self._app_instance.files.mktemp(suffix='.sh')
139 | self._app_instance.files.copy_from_host(script_path, script_on_container)
140 | return script_on_container
141 | finally:
142 | if script_path:
143 | os.remove(script_path)
144 |
--------------------------------------------------------------------------------
/tests/test_behavior_analysis.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import os
4 | import unittest
5 |
6 | from rosdiscover.acme import AcmeGenerator
7 | from rosdiscover.config import Config
8 | from rosdiscover.interpreter import Interpreter, SystemSummary
9 | from rosdiscover.observer import Observer
10 | from rosdiscover.recover import NodeRecoveryTool
11 | from rosdiscover.recover import analyzer
12 | from rosdiscover.recover.call import RateSleep
13 | from rosdiscover.recover.model import CMakeListsInfo, RecoveredNodeModel
14 | from rosdiscover.recover.analyzer import SymbolicProgramAnalyzer
15 |
16 | DIR_HERE = os.path.dirname(__file__)
17 |
18 | class TestStringMethods(unittest.TestCase):
19 |
20 | autoware_file = os.path.join(DIR_HERE, 'configs', 'autoware.yml')
21 | turtlebot_file = os.path.join(DIR_HERE, 'configs', 'turtlebot.yml')
22 |
23 | def get_model(self, config_path: str, package:str, node:str) -> RecoveredNodeModel:
24 | config = Config.load(config_path)
25 | with NodeRecoveryTool.for_config(config) as tool:
26 | print(f"spun up container: {tool}")
27 | return tool.recover_using_cmakelists(package, node)
28 |
29 | def assert_publish_calls(self, analzyer: SymbolicProgramAnalyzer, publishers):
30 | publish_calls = set()
31 | for p in analzyer.publish_calls:
32 | publish_calls.add(p.publisher)
33 |
34 | self.assertSetEqual(publish_calls, publishers)
35 |
36 | def assert_publish_calls_in_sub_callback(self, analzyer: SymbolicProgramAnalyzer, publishers):
37 | publish_calls_in_sub_callback = set()
38 | for p in analzyer.publish_calls_in_sub_callback:
39 | publish_calls_in_sub_callback.add(p.publisher)
40 |
41 | self.assertSetEqual(publish_calls_in_sub_callback, publishers)
42 |
43 | def assert_periodic_publish_calls(self, analzyer: SymbolicProgramAnalyzer, publishers):
44 | periodic_publish_calls = set()
45 | for p in analzyer.periodic_publish_calls:
46 | periodic_publish_calls.add(p.publisher)
47 |
48 | self.assertSetEqual(periodic_publish_calls, publishers)
49 |
50 | def assert_sub_callbacks(self, analzyer: SymbolicProgramAnalyzer, callbacks):
51 | sub_callback = set()
52 | for c in analzyer.subscriber_callbacks:
53 | sub_callback.add(c.name)
54 |
55 | self.assertSetEqual(sub_callback, callbacks)
56 |
57 | def assert_rate_sleeps(self, analzyer: SymbolicProgramAnalyzer, sleeps):
58 | rate_sleeps = set()
59 | for r in analzyer.rate_sleeps:
60 | rate_sleeps.add(r.rate.value)
61 |
62 | self.assertSetEqual(rate_sleeps, sleeps)
63 |
64 | def test_velocity_set(self):
65 | model = self.get_model(self.autoware_file, "astar_planner", "velocity_set")
66 |
67 | self.assert_publish_calls_in_sub_callback(model,set())
68 |
69 | self.assert_rate_sleeps(model, {10.0})
70 |
71 | self.assert_sub_callbacks(model,
72 | {
73 | "VelocitySetPath::waypointsCallback",
74 | "VelocitySetPath::currentVelocityCallback",
75 | "VelocitySetInfo::configCallback",
76 | "VelocitySetInfo::pointsCallback",
77 | "VelocitySetInfo::localizerPoseCallback",
78 | "VelocitySetInfo::controlPoseCallback",
79 | "VelocitySetInfo::obstacleSimCallback",
80 | "VelocitySetInfo::detectionCallback",
81 | "CrossWalk::crossWalkCallback",
82 | "CrossWalk::areaCallback",
83 | "CrossWalk::lineCallback",
84 | "CrossWalk::pointCallback",
85 | }
86 | )
87 |
88 | self.assert_periodic_publish_calls(model,
89 | {
90 | "obstacle_pub",
91 | "obstacle_waypoint_pub",
92 | "detection_range_pub",
93 | "final_waypoints_pub"
94 | }
95 | )
96 |
97 | def test_obj_reproj(self):
98 | analyzer = SymbolicProgramAnalyzer(self.get_model(self.autoware_file, "obj_reproj", "obj_reproj").program)
99 |
100 | self.assert_publish_calls_in_sub_callback(
101 | analyzer,
102 | {'pub', 'marker_pub', 'jsk_bounding_box_pub'}
103 | )
104 |
105 | self.assert_rate_sleeps(analyzer,set())
106 |
107 | self.assert_sub_callbacks(analyzer,
108 | {
109 | "obj_pos_xyzCallback",
110 | "projection_callback",
111 | "camera_info_callback"
112 | }
113 | )
114 |
115 | self.assert_periodic_publish_calls(analyzer, set())
116 |
117 | def test_wf_simulator(self):
118 | analyzer = SymbolicProgramAnalyzer(self.get_model(self.autoware_file, "waypoint_follower", "wf_simulator").program)
119 |
120 | self.assert_publish_calls(
121 | analyzer,
122 | {'odometry_publisher_', 'velocity_publisher_'}
123 | )
124 |
125 | self.assert_rate_sleeps(analyzer, {50.0})
126 |
127 | self.assert_sub_callbacks(analyzer,
128 | {
129 | "(anonymous namespace)::CmdCallBack",
130 | "(anonymous namespace)::controlCmdCallBack",
131 | "(anonymous namespace)::waypointCallback",
132 | "(anonymous namespace)::callbackFromClosestWaypoint",
133 | "(anonymous namespace)::initialposeCallback",
134 | "(anonymous namespace)::callbackFromPoseStamped",
135 | "(anonymous namespace)::callbackFromPoseStamped"
136 | }
137 | )
138 |
139 | self.assert_periodic_publish_calls(analyzer,
140 | {
141 | "odometry_publisher_",
142 | "velocity_publisher_",
143 | }
144 | )
145 |
146 | def test_turtlebot_move_action_server(self):
147 | analyzer = SymbolicProgramAnalyzer(self.get_model(self.turtlebot_file, "turtlebot_actions", "turtlebot_move_action_server").program)
148 |
149 | self.assert_publish_calls_in_sub_callback(
150 | analyzer,
151 | set()
152 | )
153 |
154 | self.assert_rate_sleeps(analyzer, {25.0})
155 |
156 | self.assert_sub_callbacks(analyzer,
157 | set()
158 | )
159 |
160 | self.assert_periodic_publish_calls(analyzer,
161 | {
162 | "cmd_vel_pub_",
163 | }
164 | )
165 |
166 | if __name__ == '__main__':
167 | unittest.main()
168 |
--------------------------------------------------------------------------------
/src/rosdiscover/project/models.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ("ProjectModels",)
3 |
4 | import os
5 | import types
6 | import typing as t
7 |
8 | import attr
9 | from loguru import logger
10 |
11 | from ..config import Config
12 | from ..interpreter.model import NodeModel, HandwrittenModel, PlaceholderModel
13 | from ..recover import NodeRecoveryTool, RecoveredNodeModelDatabase
14 |
15 |
16 | @attr.s(slots=True, auto_attribs=True)
17 | class ProjectModels:
18 | """Provides an interface for accessing and recovering the models for a given project.
19 |
20 | Attributes
21 | ----------
22 | config: Config
23 | The configuration for the project
24 | allow_recovery: bool
25 | Allows models to be recovered from source if :code:`True`.
26 | allow_placeholders: bool
27 | If :code:`True`, uses a placeholder model if there is no handwritten
28 | model available and recovery is disabled or not possible.
29 | """
30 | config: Config
31 | allow_recovery: bool = attr.ib(default=True)
32 | allow_placeholders: bool = attr.ib(default=True)
33 | _recovered_models: RecoveredNodeModelDatabase = attr.ib(init=False)
34 | _recovery_tool: NodeRecoveryTool = attr.ib(init=False)
35 | # TODO add: use_model_cache
36 | # TODO add: prefer_recovered
37 |
38 | def __attrs_post_init__(self) -> None:
39 | # TODO allow model database path to be specified
40 | recovered_model_database_path = os.path.expanduser("~/.rosdiscover/recovered-models")
41 | self._recovered_models = RecoveredNodeModelDatabase(recovered_model_database_path)
42 | self._recovery_tool = NodeRecoveryTool(app=self.config.app)
43 |
44 | def __enter__(self) -> "ProjectModels":
45 | self.open()
46 | return self
47 |
48 | def __exit__(
49 | self,
50 | ex_type: t.Optional[t.Type[BaseException]],
51 | ex_val: t.Optional[BaseException],
52 | ex_tb: t.Optional[types.TracebackType],
53 | ) -> None:
54 | self.close()
55 |
56 | def open(self) -> None:
57 | self._recovery_tool.open()
58 |
59 | def close(self) -> None:
60 | self._recovery_tool.close()
61 |
62 | def _recover(self, package: str, node: str) -> t.Optional[NodeModel]:
63 | # have we already recovered this model?
64 | logger.debug(f"Recovering {package}/{node}")
65 | if self._recovered_models.contains(self.config, package, node):
66 | return self._recovered_models.fetch(self.config, package, node)
67 |
68 | # is this node model irrecoverable?
69 | if (package, node) not in self.config.node_sources:
70 | try:
71 | logger.info(f"Attempting to recover {package}/{node} from CMakeLists.txt")
72 | model = self._recovery_tool.recover_using_cmakelists(package, node)
73 | except ValueError:
74 | logger.exception(f"Error recovering {package}/{node}")
75 | return None
76 | except RuntimeError:
77 | logger.exception(f"Static recovery failed for {package}/{node}")
78 | return None
79 | else:
80 | logger.info(f"Attempting to use passed in node_sources for {package}/{node}")
81 | node_info = self.config.node_sources[(package, node)]
82 | cmake_filename_and_line = node_info.origin.split(':') if node_info.origin else ["", "-1"]
83 |
84 | # use the recovery tool to recover the model before saving it to the database
85 | model = self._recovery_tool.recover(
86 | package,
87 | node,
88 | node_info.entrypoint,
89 | node_info.sources,
90 | node_info.restrict_to_paths,
91 | cmake_filename_and_line[0],
92 | int(cmake_filename_and_line[1])
93 | )
94 | self._recovered_models.store(model)
95 | return model
96 |
97 | def _fetch_handwritten(self, package: str, node: str) -> t.Optional[NodeModel]:
98 | logger.debug(f"Handwritten {package}/{node}")
99 | if HandwrittenModel.exists(package, node):
100 | return HandwrittenModel.fetch(package, node)
101 | # '*' is a placeholder for a package name. It's admittedly a hack, but it
102 | # saves looking for packages that aren't on the system. It is used when
103 | # we use dummy nodes for misconfiguration detection
104 | # FIXME: Do something more principled.
105 | if HandwrittenModel.exists("*", node):
106 | return HandwrittenModel.fetch("*", node)
107 | return None
108 |
109 | def _fetch_placeholder(self, package: str, node: str) -> NodeModel:
110 | return PlaceholderModel(package, node)
111 |
112 | def fetch(self, package: str, node: str) -> NodeModel:
113 | """Retrieves the model for a given node.
114 | By default, the handwritten model for this node will be returned, if
115 | available, since this is likely to be the most accurate. If there is no
116 | handwritten model for the given node, then the model will be statically
117 | recovered from source if model recovery is allowed. Finally, if there
118 | is no handwritten model and model recovery is disabled or not possible,
119 | a placeholder model will be returned unless placeholders are disabled.
120 |
121 | Parameters
122 | ----------
123 | package: str
124 | The name of the package to which the node belongs.
125 | node: str
126 | The name of the type of node.
127 |
128 | Returns
129 | -------
130 | NodeModel
131 | When searching for a given model, highest preference will be given to
132 | handwritten models. If there is no recovered model, then this method will
133 | attempt to return a handwritten model. If there is also no handwritten model,
134 | then a placeholder model will be returned instead.
135 |
136 | Raises
137 | ------
138 | ValueError
139 | If there is no model could be fetched for the given node and placeholders
140 | have been disabled.
141 | """
142 | # TODO this exists to make it easier to customize preferences later on
143 | # e.g., to prefer recovered models over handwritten ones
144 | model_sources: t.List[t.Callable[[str, str], t.Optional[NodeModel]]] = [
145 | self._fetch_handwritten,
146 | ]
147 | if self.allow_recovery:
148 | model_sources.append(self._recover)
149 |
150 | fetched_model: t.Optional[NodeModel] = None
151 | for model_source in model_sources:
152 | fetched_model = model_source(package, node)
153 | if fetched_model:
154 | return fetched_model
155 |
156 | if self.allow_placeholders:
157 | return self._fetch_placeholder(package, node)
158 | else:
159 | raise ValueError(f"failed to fetch model for node [{node}] in package [{package}]")
160 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/plugins/controller_manager.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import annotations
3 |
4 | __all__ = ("ControllerManagerPlugin",)
5 |
6 | import abc
7 | import typing as t
8 |
9 | import attr
10 | from loguru import logger
11 | from roswire.name import namespace_join
12 | from ...interpreter import Interpreter, ModelPlugin, NodeContext
13 |
14 |
15 | @attr.s(frozen=True, slots=True)
16 | class ControllerManagerPlugin(ModelPlugin, abc.ABC):
17 | type_name: t.ClassVar[str]
18 |
19 | controller_name: str = attr.ib()
20 | controller_manager_node: str = attr.ib()
21 |
22 | @classmethod
23 | def from_type(
24 | cls,
25 | controller_type: str,
26 | controller_name: str,
27 | controller_manager_node: str,
28 | ) -> ControllerManagerPlugin:
29 | """Loads a controller manager plugin.
30 |
31 | Parameters
32 | ----------
33 | controller_type: str
34 | The type of the controller
35 | controller_name: str
36 | The name of the controller
37 | controller_manager_node: str
38 | The fully qualified name of the node that hosts the associated control manager
39 | """
40 | logger.debug(
41 | f"loading controller_manager plugin [{controller_name}] "
42 | f"with type [{controller_type}] "
43 | f"via node [{controller_manager_node}]"
44 | )
45 |
46 | # find the class associated with this type
47 | # TODO iterate over subclasses and inspect type_name
48 | type_to_class: t.Mapping[str, t.Type[ControllerManagerPlugin]] = {
49 | 'joint_state_controller/JointStateController': JointStateControllerPlugin,
50 | 'diff_drive_controller/DiffDriveController': DiffDriveControllerPlugin,
51 | 'effort_controllers/JointEffortController': JointEffortController,
52 | "effort_controllers/JointPositionController": JointPositionController,
53 | }
54 | cls = type_to_class[controller_type]
55 |
56 | plugin = cls.build(controller_name, controller_manager_node)
57 | logger.debug(f'loaded controller_manager plugin [{controller_name}]')
58 | return plugin
59 |
60 | @classmethod
61 | def build(cls, controller_name: str, controller_manager_node: str) -> ControllerManagerPlugin:
62 | return cls(
63 | controller_name=controller_name,
64 | controller_manager_node=controller_manager_node,
65 | )
66 |
67 | @property
68 | def namespace(self) -> str:
69 | """The associated namespace for this controller."""
70 | return namespace_join(self.controller_manager_node, self.controller_name)
71 |
72 | def load(self, interpreter: Interpreter) -> None:
73 | context = interpreter.nodes[self.controller_manager_node]
74 | self._load(interpreter, context)
75 |
76 | @abc.abstractmethod
77 | def _load(self, interpreter: Interpreter, context: NodeContext) -> None:
78 | """Simulates the architectural effects of this controller.
79 |
80 | Parameters
81 | ----------
82 | interpreter: Interpreter
83 | provides access to the simulated architecture for the entire ROS system
84 | context: NodeContext
85 | provides access to the node context for the associated controller_manager node
86 | that hosts this controller
87 | """
88 | ...
89 |
90 |
91 | @attr.s(auto_attribs=True, frozen=True, slots=True)
92 | class JointStateControllerPlugin(ControllerManagerPlugin):
93 | def _load(self, interpreter: Interpreter, context: NodeContext) -> None:
94 | context.read("joints", None)
95 | context.read("publish_rate")
96 | context.read("extra_joints")
97 | context.pub("joint_states", "sensor_msgs::JointState")
98 |
99 |
100 | @attr.s(auto_attribs=True, frozen=True, slots=True)
101 | class DiffDriveControllerPlugin(ControllerManagerPlugin):
102 | def _load(self, interpreter: Interpreter, context: NodeContext) -> None:
103 | # FIXME this is only a partial model!
104 | # https://github.com/ros-controls/ros_controllers/blob/noetic-devel/diff_drive_controller/src/diff_drive_controller.cpp
105 |
106 | # TODO get wheel names
107 |
108 | context.read("publish_rate", 50.0)
109 | context.read("open_loop", False)
110 | context.read("wheel_separation_multiplier", 1.0)
111 |
112 | if context.has_param("wheel_radius_multiplier"):
113 | context.read("wheel_radius_multiplier")
114 | else:
115 | context.read("left_wheel_radius_multiplier", 1.0)
116 | context.read("right_wheel_radius_multiplier", 1.0)
117 |
118 | context.read("velocity_rolling_window_size", 10)
119 | context.read("cmd_vel_timeout", 0.5)
120 | context.read("allow_multiple_cmd_vel_publishers", True)
121 |
122 | context.read("base_frame_id", "base_link")
123 | context.read("odom_frame_id", "odom")
124 | context.read("enable_odom_tf", True)
125 |
126 | context.read("linear/x/has_velocity_limits", False)
127 | context.read("linear/x/has_acceleration_limits", False)
128 | context.read("linear/x/has_jerk_limits", False)
129 | context.read("linear/x/max_velocity", 0.0)
130 | context.read("linear/x/min_velocity", 0.0)
131 | context.read("linear/x/max_acceleration", 0.0)
132 | context.read("linear/x/min_acceleration", 0.0)
133 | context.read("linear/x/max_jerk", 0.0)
134 | context.read("linear/x/min_jerk", 0.0)
135 |
136 | context.read("angular/z/has_velocity_limits", False)
137 | context.read("angular/z/has_acceleration_limits", False)
138 | context.read("angular/z/has_jerk_limits", False)
139 | context.read("angular/z/max_velocity", 0.0)
140 | context.read("angular/z/min_velocity", 0.0)
141 | context.read("angular/z/max_acceleration", 0.0)
142 | context.read("angular/z/min_acceleration", 0.0)
143 | context.read("angular/z/max_jerk", 0.0)
144 | context.read("angular/z/min_jerk", 0.0)
145 |
146 | should_publish_command = context.read("publish_cmd", False)
147 | should_publish_joint_state = context.read("publish_wheel_joint_controller_state", False)
148 |
149 | context.read("wheel_separation", 0.0)
150 | context.read("wheel_radius", 0.0)
151 |
152 | context.sub("cmd_vel", "geometry_msgs/Twist")
153 |
154 | if should_publish_command:
155 | context.pub("cmd_vel_out", "geometry_msgs/TwistStamped")
156 |
157 | if should_publish_joint_state:
158 | context.pub("wheel_joint_controller_state", "control_msgs/JointTrajectoryControllerState")
159 |
160 | context.pub("odom", "nav_msgs/Odometry")
161 | context.pub("/tf", "tf/tfMessage")
162 |
163 | # TODO this node supports dynamic reconfiguration
164 | logger.warning("plugin model is likely incomplete: DiffDriveControllerPlugin")
165 |
166 |
167 | @attr.s(auto_attribs=True, frozen=True, slots=True)
168 | class ForwardCommandController(ControllerManagerPlugin):
169 |
170 | @property
171 | def namespace(self) -> str:
172 | return self.controller_name
173 |
174 | def _load(self, interpreter: Interpreter, context: NodeContext) -> None:
175 | ns = self.namespace
176 | # https://github.com/ros-controls/ros_controllers/blob/melodic-devel/forward_command_controller/include/forward_command_controller/forward_command_controller.h
177 | context.sub(f"{ns}/command", "std_msgs/Float64")
178 |
179 |
180 | class JointEffortController(ForwardCommandController):
181 | def _load(self, interpreter: Interpreter, context: NodeContext) -> None:
182 | super()._load(interpreter, context)
183 |
184 |
185 | @attr.s(auto_attribs=True, frozen=True, slots=True)
186 | class JointPositionController(ControllerManagerPlugin):
187 |
188 | @property
189 | def namespace(self) -> str:
190 | return self.controller_name
191 |
192 | def _load(self, interpreter: Interpreter, context: NodeContext) -> None:
193 | ns = self.namespace
194 | # https://github.com/ros-controls/ros_controllers/blob/melodic-devel/forward_command_controller/include/forward_command_controller/forward_command_controller.h
195 | context.sub(f"{ns}/command", "std_msgs/Float64")
196 | context.pub(f"{ns}/state", "control_msgs/JointControllerStates")
197 |
--------------------------------------------------------------------------------
/src/rosdiscover/recover/call.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = (
3 | "DeleteParam",
4 | "HasParam",
5 | "Publisher",
6 | "ReadParam",
7 | "ReadParamWithDefault",
8 | "RosInit",
9 | "ServiceCaller",
10 | "ServiceProvider",
11 | "Subscriber",
12 | "SymbolicRosApiCall",
13 | "WriteParam",
14 | )
15 |
16 | import abc
17 | import typing as t
18 | from loguru import logger
19 |
20 | import attr
21 |
22 | from .symbolic import (
23 | SymbolicBool,
24 | SymbolicContext,
25 | SymbolicExpr,
26 | SymbolicFloat,
27 | SymbolicStatement,
28 | SymbolicString,
29 | SymbolicValue,
30 | )
31 |
32 |
33 | class SymbolicRosApiCall(SymbolicStatement, abc.ABC):
34 | """Represents a symbolic call to a ROS API."""
35 | @abc.abstractmethod
36 | def is_unknown(self) -> bool:
37 | """Determines whether the target of this API call is unknown."""
38 | ...
39 |
40 | def children(self) -> t.Set[SymbolicExpr]:
41 | return set()
42 |
43 |
44 | @attr.s(frozen=True, auto_attribs=True, slots=True)
45 | class RosInit(SymbolicRosApiCall):
46 | name: SymbolicString
47 |
48 | def to_dict(self) -> t.Dict[str, t.Any]:
49 | return {
50 | "kind": "ros-init",
51 | "name": self.name.to_dict(),
52 | }
53 |
54 | def eval(self, context: SymbolicContext) -> None:
55 | return None
56 |
57 | def is_unknown(self) -> bool:
58 | return self.name.is_unknown()
59 |
60 |
61 | @attr.s(frozen=True, auto_attribs=True, slots=True)
62 | class Publisher(SymbolicRosApiCall):
63 | topic: SymbolicString
64 | format_: str
65 |
66 | def to_dict(self) -> t.Dict[str, t.Any]:
67 | return {
68 | "kind": "publishes-to",
69 | "name": self.topic.to_dict(),
70 | "format": self.format_,
71 | }
72 |
73 | def eval(self, context: SymbolicContext) -> None:
74 | topic = self.topic.eval(context)
75 | context.node.pub(topic, self.format_)
76 |
77 | def is_unknown(self) -> bool:
78 | return self.topic.is_unknown()
79 |
80 |
81 | @attr.s(frozen=True, auto_attribs=True, slots=True)
82 | class Publish(SymbolicRosApiCall):
83 | publisher: str
84 | condition: SymbolicExpr
85 |
86 | def to_dict(self) -> t.Dict[str, t.Any]:
87 | return {
88 | "kind": "publish",
89 | "publisher": self.publisher,
90 | "path_condition": self.condition.to_dict(),
91 | }
92 |
93 | def eval(self, context: SymbolicContext) -> None:
94 | logger.warning("TODO: Add analsis to Publish.eval")
95 | return
96 |
97 | def is_unknown(self) -> bool:
98 | return self.publisher == "unknown"
99 |
100 |
101 | @attr.s(frozen=True, auto_attribs=True, slots=True)
102 | class RateSleep(SymbolicRosApiCall):
103 | rate: SymbolicFloat
104 |
105 | def to_dict(self) -> t.Dict[str, t.Any]:
106 | return {
107 | "kind": "ratesleep",
108 | "rate": self.rate.to_dict(),
109 | }
110 |
111 | def eval(self, context: SymbolicContext) -> None:
112 | logger.warning("TODO: Add analsis to RateSleep.eval")
113 | return
114 |
115 | def is_unknown(self) -> bool:
116 | return self.rate.is_unknown()
117 |
118 |
119 | @attr.s(frozen=True, auto_attribs=True, slots=True)
120 | class Subscriber(SymbolicRosApiCall):
121 | topic: SymbolicString
122 | format_: str
123 | callback_name: str
124 |
125 | def to_dict(self) -> t.Dict[str, t.Any]:
126 | return {
127 | "kind": "subscribes-to",
128 | "name": self.topic.to_dict(),
129 | "format": self.format_,
130 | "callback-name": self.callback_name
131 | }
132 |
133 | def eval(self, context: SymbolicContext) -> None:
134 | topic = self.topic.eval(context)
135 | context.node.sub(topic, self.format_)
136 |
137 | def is_unknown(self) -> bool:
138 | return self.topic.is_unknown()
139 |
140 |
141 | @attr.s(frozen=True, auto_attribs=True, slots=True)
142 | class ServiceProvider(SymbolicRosApiCall):
143 | service: SymbolicString
144 | format_: str
145 |
146 | def to_dict(self) -> t.Dict[str, t.Any]:
147 | return {
148 | "kind": "provides-service",
149 | "name": self.service.to_dict(),
150 | "format": self.format_,
151 | }
152 |
153 | def eval(self, context: SymbolicContext) -> None:
154 | service = self.service.eval(context)
155 | context.node.provide(service, self.format_)
156 |
157 | def is_unknown(self) -> bool:
158 | return self.service.is_unknown()
159 |
160 |
161 | @attr.s(frozen=True, auto_attribs=True, slots=True)
162 | class ServiceCaller(SymbolicRosApiCall):
163 | service: SymbolicValue
164 | format_: str
165 |
166 | def to_dict(self) -> t.Dict[str, t.Any]:
167 | return {
168 | "kind": "calls-service",
169 | "name": self.service.to_dict(),
170 | "format": self.format_,
171 | }
172 |
173 | def eval(self, context: SymbolicContext) -> None:
174 | service = self.service.eval(context)
175 | context.node.use(service, self.format_)
176 |
177 | def is_unknown(self) -> bool:
178 | return self.service.is_unknown()
179 |
180 |
181 | @attr.s(frozen=True, auto_attribs=True, slots=True)
182 | class WriteParam(SymbolicRosApiCall):
183 | param: SymbolicString
184 | value: SymbolicValue
185 |
186 | def to_dict(self) -> t.Dict[str, t.Any]:
187 | return {
188 | "kind": "writes-to-param",
189 | "name": self.param.to_dict(),
190 | "value": self.value.to_dict(),
191 | }
192 |
193 | def eval(self, context: SymbolicContext) -> None:
194 | param = self.param.eval(context)
195 | value = self.value.eval(context)
196 | context.node.write(param, value)
197 |
198 | def is_unknown(self) -> bool:
199 | # NOTE we don't consider the parameter value
200 | # we should discuss whether or not we should
201 | return self.param.is_unknown()
202 |
203 |
204 | @attr.s(frozen=True, auto_attribs=True, slots=True, str=False)
205 | class ReadParam(SymbolicRosApiCall, SymbolicValue):
206 | param: SymbolicString
207 |
208 | def to_dict(self) -> t.Dict[str, t.Any]:
209 | return {
210 | "kind": "reads-param",
211 | "name": self.param.to_dict(),
212 | }
213 |
214 | def eval(self, context: SymbolicContext) -> None:
215 | param = self.param.eval(context)
216 | context.node.read(param)
217 |
218 | def is_unknown(self) -> bool:
219 | return self.param.is_unknown()
220 |
221 | def __str__(self) -> str:
222 | return f"ros::param::read(param={self.param})"
223 |
224 |
225 | @attr.s(frozen=True, auto_attribs=True, slots=True, str=False)
226 | class ReadParamWithDefault(SymbolicRosApiCall, SymbolicValue):
227 | param: SymbolicString
228 | default: SymbolicValue
229 |
230 | def to_dict(self) -> t.Dict[str, t.Any]:
231 | return {
232 | "kind": "reads-param-with-default",
233 | "name": self.param.to_dict(),
234 | "default": self.default.to_dict(),
235 | }
236 |
237 | def eval(self, context: SymbolicContext) -> None:
238 | param = self.param.eval(context)
239 | default = self.default.eval(context)
240 | context.node.read(param, default)
241 |
242 | def is_unknown(self) -> bool:
243 | # NOTE same comment about default
244 | return self.param.is_unknown()
245 |
246 | def __str__(self) -> str:
247 | return f"ros::param::read(param={self.param}, default={self.default})"
248 |
249 |
250 | @attr.s(frozen=True, auto_attribs=True, slots=True, str=False)
251 | class HasParam(SymbolicRosApiCall, SymbolicBool):
252 | param: SymbolicString
253 |
254 | def to_dict(self) -> t.Dict[str, t.Any]:
255 | return {
256 | "kind": "checks-for-param",
257 | "name": self.param.to_dict(),
258 | }
259 |
260 | def eval(self, context: SymbolicContext) -> None:
261 | param = self.param.eval(context)
262 | context.node.has_param(param)
263 |
264 | def is_unknown(self) -> bool:
265 | return self.param.is_unknown()
266 |
267 | def __str__(self) -> str:
268 | return f"ros::param::has(param={self.param})"
269 |
270 |
271 | @attr.s(frozen=True, auto_attribs=True, slots=True)
272 | class DeleteParam(SymbolicRosApiCall):
273 | param: SymbolicString
274 |
275 | def to_dict(self) -> t.Dict[str, t.Any]:
276 | return {
277 | "kind": "deletes-param",
278 | "name": self.param.to_dict(),
279 | }
280 |
281 | def eval(self, context: SymbolicContext) -> None:
282 | param = self.param.eval(context)
283 | context.node.delete_param(param)
284 |
285 | def is_unknown(self) -> bool:
286 | return self.param.is_unknown()
287 |
--------------------------------------------------------------------------------
/src/rosdiscover/observer/nodeinfo.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import re
3 | from typing import Collection, Set
4 |
5 | import attr
6 | from loguru import logger
7 | from roswire import AppInstance, ROS1
8 |
9 | from rosdiscover.interpreter import NodeContext, ParameterServer
10 |
11 | _GOAL = re.compile(r'(.*)/goal')
12 | _GOAL_FMT = re.compile(r'(.*)Goal')
13 | _CANCEL = re.compile(r'(.*)/cancel')
14 | _STATUS = re.compile(r'(.*)/status')
15 | _FEEDBACK = re.compile(r'(.*)/feedback')
16 | _FEEDBACK_FMT = re.compile(r'(.*)Feedback')
17 | _RESULT = re.compile(r'(.*)/result')
18 | _RESULT_FMT = re.compile(r'(.*)Result')
19 |
20 |
21 | @attr.s(slots=True, auto_attribs=True)
22 | class _Action:
23 | """Class for action information."""
24 | name: str
25 | fmt: str
26 |
27 |
28 | @attr.s(slots=True, auto_attribs=True)
29 | class NodeInfo:
30 | """Class for partial node information."""
31 | name: str
32 | publishers: Set[str] = attr.ib(factory=set)
33 | subscribers: Set[str] = attr.ib(factory=set)
34 | provides: Set[str] = attr.ib(factory=set)
35 |
36 | def _identify_action_servers(self, ros: ROS1) -> Collection[_Action]:
37 | """Identify the action servers for the node.
38 |
39 | Looks in the set of publishers for topics related to results, feedback, status and in the
40 | set of subscribers for cancel, goal. If these exist, then create an action server and
41 | remove the topics from the node.
42 |
43 | Returns
44 | -------
45 | Collection[_Action]
46 | A collection of information about each action server
47 | """
48 | return NodeInfo._filter_topics_for_action(ros, self.subscribers, self.publishers)
49 |
50 | def _identify_action_clients(self, ros: ROS1) -> Collection[_Action]:
51 | """Identify the action clients for the node.
52 |
53 | Looks in the set of subscribers for topics related to results, feedback, status and in the
54 | set of publishers for cancel, goal. If these exist, then create an action client and
55 | remove the topics from the node.
56 |
57 | Returns
58 | -------
59 | Collection[_Action]
60 | A collection of information about each action client
61 | """
62 | return NodeInfo._filter_topics_for_action(ros, self.publishers, self.subscribers)
63 |
64 | @classmethod
65 | def _filter_topics_for_action(cls,
66 | ros: ROS1,
67 | goal_related_topics: Set[str],
68 | result_related_topics: Set[str]) -> Collection[_Action]:
69 | """
70 | Filter the topics in :code:`goal_related_topics` and `:code:result_related_topics` to
71 | pull out action-related topics.
72 |
73 | Parameters
74 | ----------
75 | ros: ROS1
76 | Information about the state of the ROS system.
77 | goal_related_topics: Set[str]
78 | The topics where the action goal is meant to be. Servers will subscribe to this,
79 | clients will publish.
80 | result_related_topics: Set[str]
81 | The topics where the action result should be. Servers will publish, clients will
82 | subscribe.
83 |
84 | Returns
85 | -------
86 | A collection of actions that were removed from the topic collections.
87 | """
88 | actions = []
89 | # Copy the topic set containing the goal because we are going to mutate it
90 | topic_copy = set(goal_related_topics)
91 |
92 | # Process the nodes for topics that are action related
93 | for topic in topic_copy:
94 | goal_match = _GOAL.match(topic)
95 | if goal_match: # The topic might be a action goal
96 | fmt_match = _GOAL_FMT.match(ros.topic_to_type[topic])
97 | if fmt_match: # The topic is an action goal
98 | # Have the right goal and format matches. Check if other topics are there
99 | action = goal_match.group(1)
100 | fmt = fmt_match.group(1)
101 | if cls._has_all_action_topics(action, fmt,
102 | ros, goal_related_topics, result_related_topics):
103 | # Remove the topics from the right collections and replace as an action
104 | goal_related_topics.remove(f"{action}/goal")
105 | goal_related_topics.remove(f"{action}/cancel")
106 | result_related_topics.remove(f"{action}/status")
107 | result_related_topics.remove(f"{action}/feedback")
108 | result_related_topics.remove(f"{action}/result")
109 | actions.append(_Action(action, fmt))
110 | return actions
111 |
112 | @classmethod
113 | def _has_all_action_topics(cls,
114 | action: str,
115 | fmt: str,
116 | ros: ROS1,
117 | goal_related_topics: Collection[str],
118 | result_related_topics: Collection[str]) -> bool:
119 | """Check that the non-goal related topics are in the right collections."""
120 | return cls._has_cancel(action, ros, goal_related_topics) and \
121 | cls._has_status(action, ros, result_related_topics) and \
122 | cls._has_feedback(action, fmt, ros, result_related_topics) and \
123 | cls._has_result(action, fmt, ros, result_related_topics)
124 |
125 | @classmethod
126 | def _has_cancel(cls, action: str, ros: ROS1, topics: Collection[str]) -> bool:
127 | cancel = f"{action}/cancel"
128 | try:
129 | return cancel in topics and ros.topic_to_type[cancel] == "actionlib_msgs/GoalID"
130 | except KeyError:
131 | logger.error(f"Topic {cancel} does not have a type.")
132 | return False
133 |
134 | @classmethod
135 | def _has_status(cls, action: str, ros: ROS1, topics: Collection[str]) -> bool:
136 | status = f"{action}/status"
137 | try:
138 | return status in topics and ros.topic_to_type[
139 | status] == "actionlib_msgs/GoalStatusArray"
140 | except KeyError:
141 | logger.error(f"Topic {status} does not have a type.")
142 | return False
143 |
144 | @classmethod
145 | def _has_feedback(cls, action: str, fmt: str, ros: ROS1, topics: Collection[str]) -> bool:
146 | feedback = f"{action}/feedback"
147 | try:
148 | return feedback in topics and ros.topic_to_type[feedback] == f"{fmt}Feedback"
149 | except KeyError:
150 | logger.error(f"Topic {feedback} does not have a type.")
151 | return False
152 |
153 | @classmethod
154 | def _has_result(cls, action: str, fmt: str, ros: ROS1, topics: Collection[str]) -> bool:
155 | result = f"{action}/result"
156 | try:
157 | return result in topics and ros.topic_to_type[result] == f"{fmt}Result"
158 | except KeyError:
159 | logger.error(f"Topic {result} does not have a type.")
160 | return False
161 |
162 | def make_node_context(self, ros: ROS1, app_instance: AppInstance) -> NodeContext:
163 | """
164 | Construct a NodeContext used in rosdiscover from the processed information from roswire.
165 | NodeContext is required by rosdiscover to produce the system summary.
166 | See Also :class:`rosdiscover.interpreter.context.NodeContext`
167 |
168 | Parameters
169 | ----------
170 | node: NodeInfo
171 | The processed node information from roswire
172 | ros: ROS1
173 | Information about the state (used for getting types)
174 | app_instance: AppInstance
175 | The instance the context will be associated with
176 |
177 | Returns
178 | -------
179 | A NodeContext
180 | """
181 | nodecontext = NodeContext(
182 | name=self.name,
183 | namespace="",
184 | kind="",
185 | package="unknown",
186 | args="unknown",
187 | remappings={},
188 | launch_filename="unknown",
189 | app=app_instance,
190 | files=app_instance.files,
191 | params=ParameterServer()
192 | )
193 |
194 | # Process the action servers and clients, which mutates that publishers and subscribers
195 | act_srvrs = self._identify_action_servers(ros)
196 | act_clnts = self._identify_action_clients(ros)
197 |
198 | for action in act_srvrs:
199 | nodecontext.action_server(action.name, action.fmt)
200 |
201 | for action in act_clnts:
202 | nodecontext.action_client(action.name, action.fmt)
203 |
204 | # Process the topics
205 | for topic in self.publishers:
206 | try:
207 | nodecontext.pub(topic, ros.topic_to_type[topic])
208 | except KeyError:
209 | logger.error(f"Topic {topic} does not have a type.")
210 |
211 | for topic in self.subscribers:
212 | try:
213 | nodecontext.sub(topic, ros.topic_to_type[topic])
214 | except KeyError:
215 | logger.error(f"Topic {topic} does not have a type.")
216 |
217 | # Process the service information. Note, ROS1 state has no knowledge of clients
218 | for service in self.provides:
219 | nodecontext.provide(service, ros.services[service].format.fullname)
220 |
221 | return nodecontext
222 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | rosdiscover
2 | ===========
3 |
4 | .. image:: https://github.com/rosqual/rosdiscover/actions/workflows/tox.yml/badge.svg
5 | :target: https://github.com/rosqual/rosdiscover/actions/workflows/tox.yml
6 |
7 | Description
8 | -----------
9 |
10 | rosdiscover is a system for extracting the run-time software architecture from ROS system code.
11 | Current ROS tools like rosgraph or rosdoctor require the system to be running and the node
12 | interconnections to be observed from the system. In contrast, rosdiscover analyzes the launch
13 | files and source code
14 | of ROS systems to derive the anticipated run-time architecture (the nodes that will be started,
15 | the topic, service, and action connections between them, as well as parameters that are read and
16 | written). This architecture information can be used by downstream tools to support various kinds
17 | of architectural analysis, such as configuration error checking.
18 |
19 | This project is a research project within the Institute for Software Research at Carnegie Mellon
20 | University.
21 |
22 | The currently supported output formats are a simple YML file listing the nodes in a similar way
23 | to what might be reported by running :code:`rosnode list` and :code:`rosnode info` but without the
24 | need to
25 | run the system, and an architecture description language called `Acme `_, used within our research group.
27 |
28 | The system assumes that the ROS system is contained inside a Docker container, and uses `roswire
29 | `_ to interact with the container.
30 |
31 | We are developing support for ROS 1 and ROS 2, and both static and dynamic extraction.
32 |
33 | Installation
34 | ------------
35 |
36 | See below for two different methods of installing rosdiscover.
37 | In general, the native installation should be preferred, but for some cases
38 | (e.g., machines that run Mac OS or Windows), the Docker-based method is
39 | ideal.
40 |
41 | Both methods require that Docker is installed on your machine and that your
42 | user belongs to the :code:`docker` group (i.e., :code:`sudo` isn't required
43 | to run :code:`docker` commands). For instructions on installing Docker,
44 | refer to: https://docs.docker.com/install/
45 |
46 |
47 | Native Installation
48 | ...................
49 |
50 | The ideal way to install :code:`rosdiscover` is to install to a virtual environment
51 | running on your host machine. This method requires that your host machine is
52 | running Python 3.6 or greater. If that isn't the case, the safest way to install
53 | a newer version of Python on your machine is via `pyenv `_,
54 | which allows you to manage multiple installations of Python.
55 |
56 | We strongly recommend that you install :code:`rosdiscover` inside a Python virtual
57 | environment (via virtualenv or pipenv) to avoid interfering with the rest of
58 | your system (i.e., to avoid Python’s equivalent of DLL hell).
59 | To install roswire from source within a virtual environment using :code:`pipenv`:
60 |
61 | .. code::
62 |
63 | $ git clone git@github.com:https:cmu-rss-lab/rosdiscover rosdiscover
64 | $ cd rosdiscover
65 | $ pipenv shell
66 |
67 | (rosdiscover) $ pip install -e .
68 |
69 |
70 | (Alternative) Docker Installation
71 | .................................
72 |
73 | **(WARNING: This approach is more complex than the native installation:
74 | Where possible, you should try to stick to the native installation.)**
75 |
76 | In some cases, it may not be possible to install :code:`rosdiscover` natively on
77 | your machine (e.g., Mac OS or Windows machines). :code:`rosdiscover` can be
78 | installed on such systems by building (or downloading) and using a Docker
79 | image for :code:`rosdiscover`.
80 |
81 | To build the Docker image for :code:`rosdiscover`:
82 |
83 | .. code::
84 |
85 | $ docker build -t rosdiscover .
86 |
87 | To run :code:`rosdiscover` commands via Docker, replace in all commands shown below
88 | :code:`rosdiscover` with the following prefix:
89 |
90 | .. code::
91 |
92 | $ docker run --rm \
93 | -v /var/run/docker.sock:/var/run/docker.sock \
94 | -it rosdiscover
95 |
96 | where :code:`-v /var/run/docker.sock:/var/run/docker.sock` is used to mount the
97 | host's Docker socket inside the :code:`rosdiscover` container.
98 | Optionally, you may also want to use volume mounting to persist (and reuse) the cache:
99 |
100 | **TODO: requires careful handling of users/permissions.**
101 |
102 |
103 | Getting Started
104 | ------------------
105 |
106 | ROSDiscover offers a number of commands for interacting with Docker images,
107 | all of which accept the path to a YAML configuration file as one of their
108 | parameters. Several example YAML configuration files can be found under the
109 | :code:`example` directory at the root of this repository. Below is an example of
110 | one of those configuration files, :code:`example/fetch.yml`, a configuration file
111 | for the `Fetch mobile robot `_.
112 |
113 | .. code:: yaml
114 |
115 | image: fetch
116 | sources:
117 | - /opt/ros/melodic/setup.bash
118 | - /ros_ws/devel/setup.bash
119 | launches:
120 | - /ros_ws/src/fetch_gazebo/fetch_gazebo/launch/pickplace_playground.launch
121 |
122 | The :code:`image` property specifies the name of the Docker image that is used
123 | to provide the robot. :code:`sources` gives an ordered list of the scripts that
124 | must be sourced to setup the correct working environment for the robot;
125 | in most cases, :code:`sources` will look as it does above, except the term
126 | :code:`melodic` may be replaced with the name of another ROS distribution
127 | (e.g., :code:`indigo` or :code:`kinetic`).
128 | :code:`launches` gives an ordered list of the XML
129 | launch files that should be used to launch the software for the robot.
130 | For now, each element in this list is an absolute path to a launch file
131 | inside the container. In the near future, support for relative paths
132 | (e.g., name of package + name of launch file) and passing command-line
133 | arguments will be added. There is also an additional :code:`environment` property,
134 | exemplified below, which accepts a mapping from names of environment
135 | variables to their respective values. This is useful in a small number of
136 | cases (e.g., specifying :code:`TURTLEBOT3_MODEL` for TurtleBot3).
137 |
138 | .. code:: yaml
139 |
140 | image: turtlebot3
141 | sources:
142 | - /opt/ros/kinetic/setup.bash
143 | - /ros_ws/devel/setup.bash
144 | environment:
145 | TURTLEBOT3_MODEL: burger
146 | launches:
147 | - filename: /ros_ws/src/turtlebot3_simulations/turtlebot3_gazebo/launch/turtlebot3_house.launch
148 | arg1: value
149 | arg2: value
150 | arg3: value
151 |
152 |
153 | Commands
154 | ........
155 |
156 | To see a complete list of commands that are supported by ROSDiscover,
157 | run the following:
158 |
159 | .. code::
160 |
161 | $ rosdiscover --help
162 | usage: rosdiscover [-h] {launch,rostopic,rosservice,acme} ...
163 |
164 | discovery of ROS architectures
165 |
166 | positional arguments:
167 | {launch,rostopic,rosservice,acme}
168 | launch simulates the effects of a roslaunch.
169 | rostopic simulates the output of rostopic for a given
170 | configuration.
171 | rosservice simulates the output of rosservice for a given
172 | configuration.
173 | acme generates Acme from a source file
174 |
175 | optional arguments:
176 | -h, --help show this help message and exit
177 |
178 | The :code:`launch` command is used to simulate the effects of a sequence of
179 | :code:`roslaunch` calls for a robot application:
180 |
181 | .. code::
182 |
183 | $ pipenv run rosdiscover launch example/fetch.yml
184 |
185 |
186 | Docker Development Setup (for Windows 10)
187 | -----------------------------------------
188 |
189 | If you are planning to develop on Windows 10, then you will need to mount
190 | rosdiscover source directories into a Docker container. You can use your
191 | favorite Python IDE in Windows, but run and test rosdiscover inside the
192 | container.
193 |
194 | We provide a Docker build file for setting up this development environment. To
195 | run inside the image you need to mount (i) the source directory that is the top
196 | of this repository as :code:`/code` in the container, (ii) the socket/port that the
197 | host docker daemon connects to (so that rosdiscover can find other, (iii)
198 | (recommended) a host folder that can be used to cache some of the rosdiscover
199 | builds, so that there is no need to start from scratch with rosdiscover each
200 | time.
201 |
202 | To run rosdiscover on Windows 10, where the source code is mounted on
203 | :code:`D:/rosdiscover`, and you want to store the cache on :code:`D:/cache`:
204 |
205 | 1. Ensure that the folders to mount are shared. This needs to be done through
206 | the Docker settings on your host. (This is done through the Dashboard or
207 | through settings on Windows Docker)
208 | 2. Build the development docker image:
209 |
210 | .. code::
211 |
212 | $ docker build -t build/rosdiscover-dev -f .\Dockerfile-dev .
213 |
214 | 3. Run the docker image:
215 |
216 | .. code::
217 |
218 | $ docker run --rm -v d:/rosdiscover:/code -v d:/cache:/root/.roswire -v //var/run/docker.sock:/var/run/docker.sock -it build/rosdiscover-dev
219 |
220 | 4. Once the shell has started and you are inside the container, you will need to install `rosdiscover` locally:
221 |
222 | .. code::
223 |
224 | bash-4.4# pip install -e .
225 |
226 | You will then be able to run `rosdiscover` from the command line.
227 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/move_base.py:
--------------------------------------------------------------------------------
1 | from loguru import logger
2 |
3 | from ..interpreter import model
4 | from .plugins.navigation import NavigationPlugin
5 |
6 |
7 | @model('move_base', 'move_base')
8 | def move_base(c):
9 | c.read("~base_global_planner", "navfn/NavfnROS")
10 | c.read("~base_local_planner", "base_local_planner/TrajectoryPlannerROS")
11 | c.read("~global_costmap/robot_base_frame", "base_link")
12 | c.read("~global_costmap/global_frame", "/map")
13 | c.read("~planner_frequency", 0.0)
14 | c.read("~controller_frequency", 20.0)
15 | c.read("~planner_patience", 5.0)
16 | c.read("~controller_patience", 15.0)
17 | c.read("~max_planning_retries", -1)
18 | c.read("~oscillation_timeout", 0.0)
19 | c.read("~oscillation_distance", 0.5)
20 |
21 | c.action_server("move_base", "move_base_msgs/MoveBaseAction")
22 |
23 | c.sub("move_base_simple/goal", "geometry_msgs/PoseStamped")
24 | c.pub("~goal", "geometry_msgs/PoseStamped")
25 |
26 | c.pub("cmd_vel", "geometry_msgs/Twist")
27 | c.pub("~current_goal", "geometry_msgs/PoseStamped")
28 |
29 | c.read("~local_costmap/inscribed_radius", 0.325)
30 | circumscribed_radius = c.read("~local_costmap/circumscribed_radius", 0.46)
31 | c.read("~clearing_radius", circumscribed_radius)
32 | c.read("~conservative_reset_dist", 3.0)
33 | c.read("~shutdown_costmaps", False)
34 | c.read("~clearing_rotation_allowed", True)
35 | c.read("~recovery_behavior_enabled", True)
36 |
37 | # FIXME load costmap plugins
38 | # SEE http://wiki.ros.org/costmap_2d?distro=melodic
39 | def create_costmap(name):
40 | logger.debug(f"Creating costmap: {name}")
41 | c.sub(f"~{name}/footprint", 'geometry_msgs/Polygon')
42 | c.pub(f"~{name}/costmap", 'nav_msgs/OccupancyGrid')
43 | c.pub(f"~{name}/costmap_updates", 'nav_msgs/OccupancyGridUpdate')
44 | # c.pub(f"~{name}/voxel_grid", 'costmap_2d/VoxelGrid')
45 |
46 | c.read(f"~{name}/global_frame", "/map")
47 | c.read(f"~{name}/global_frame", "base_link")
48 | c.read(f"~{name}/transform_tolerance", 0.2)
49 | c.read(f"~{name}/update_frequency", 5.0)
50 | c.read(f"~{name}/publish_frequency", 0.0)
51 | c.read(f"~{name}/rolling_window", False)
52 | c.read(f"~{name}/always_send_full_costmap", True)
53 |
54 | c.read(f"~{name}/width", 10)
55 | c.read(f"~{name}/height", 10)
56 | c.read(f"~{name}/resolution", 0.05)
57 | c.read(f"~{name}/origin_x", 0.0)
58 | c.read(f"~{name}/origin_y", 0.0)
59 |
60 | create_costmap("global_costmap")
61 | create_costmap("local_costmap")
62 |
63 | # load the global planner plugin
64 | def plugin_navfn():
65 | name = "NavfnROS"
66 | c.pub(f"~{name}/plan", "nav_msgs/Path")
67 | c.provide(f"~{name}/make_plan", "nav_msgs/GetPlan")
68 | c.read(f"~{name}/allow_unknown", True)
69 | c.read(f"~{name}/planner_window_x", 0.0)
70 | c.read(f"~{name}/planner_window_y", 0.0)
71 | c.read(f"~{name}/default_tolerance", 0.0)
72 | c.read(f"~{name}/visualize_potential", False)
73 |
74 | type_global_planner = c.read("~base_global_planner", "navfn/NavfnROS")
75 | assert type_global_planner == 'navfn/NavfnROS'
76 | plugin_navfn()
77 |
78 | # load the local planner plugin
79 | def base_plugin_local(name):
80 | # type: (str) -> None
81 | c.pub(f"~{name}/global_plan", "nav_msgs/Path")
82 | c.pub(f"~{name}/local_plan", "nav_msgs/Path")
83 | c.sub("odom", "nav_msgs/Odometry")
84 |
85 | def plugin_DWAPlannerROS(): # noqa
86 | name = "DWAPlannerROS"
87 | base_plugin_local(name)
88 |
89 | c.read(f"~{name}/acc_lim_x", 2.5)
90 | c.read(f"~{name}/acc_lim_y", 2.5)
91 | c.read(f"~{name}/acc_lim_th", 3.2)
92 |
93 | c.read(f"~{name}/min_trans_vel", 0.1)
94 | c.read(f"~{name}/max_trans_vel", 0.55)
95 |
96 | c.read(f"~{name}/max_vel_x", 0.55)
97 | c.read(f"~{name}/min_vel_x", 0.0)
98 | c.read(f"~{name}/max_vel_y", 0.1)
99 | c.read(f"~{name}/min_vel_y", -0.1)
100 |
101 | c.read(f"~{name}/max_rot_vel", 1.0)
102 | c.read(f"~{name}/min_rot_vel", 0.4)
103 |
104 | c.read(f"~{name}/yaw_goal_tolerance", 0.05)
105 | c.read(f"~{name}/xy_goal_tolerance", 0.10)
106 | c.read(f"~{name}/latch_xy_goal_tolerance", False)
107 |
108 | c.read(f"~{name}/sim_time", 1.7)
109 | c.read(f"~{name}/sim_granularity", 0.025)
110 | c.read(f"~{name}/vx_samples", 3)
111 | c.read(f"~{name}/vy_samples", 10)
112 | c.read(f"~{name}/vth_samples", 20)
113 | c.read(f"~{name}/controller_frequency", 20.0)
114 |
115 | c.read(f"~{name}/path_distance_bias", 32.0)
116 | c.read(f"~{name}/goal_distance_bias", 24.0)
117 | c.read(f"~{name}/occdist_scale", 0.01)
118 | c.read(f"~{name}/forward_point_distance", 0.325)
119 | c.read(f"~{name}/stop_time_buffer", 0.2)
120 | c.read(f"~{name}/scaling_speed", 0.25)
121 | c.read(f"~{name}/max_scaling_factor", 0.2)
122 |
123 | if c.read(f"~{name}/publish_cost_grid", False) or c.read(f"~{name}/publish_cost_grid_pc", False):
124 | c.pub(f"~{name}/cost_cloud", "sensor_msgs/PointCloud2")
125 |
126 | if c.read(f"~{name}/publish_traj_pc", False):
127 | c.pub(f"~{name}/trajectory_cloud", "base_local_planner/MapGridCostPoint")
128 |
129 | c.read(f"~{name}/oscillation_reset_dist", 0.05)
130 | c.read(f"~{name}/prune_plan", True)
131 |
132 | def plugin_TrajectoryPlannerROS(): # noqa
133 | name = "TrajectoryPlannerROS"
134 | base_plugin_local(name)
135 |
136 | if c.read(f"~{name}/publish_cost_grid_pc", False):
137 | c.pub(f"~{name}/cost_cloud", "sensor_msgs/PointCloud2")
138 |
139 | c.read(f"~{name}/acc_lim_x", 2.5)
140 | c.read(f"~{name}/acc_lim_y", 2.5)
141 | c.read(f"~{name}/acc_lim_theta", 2.5)
142 | c.read(f"~{name}/max_vel_x", 2.5)
143 | c.read(f"~{name}/min_vel_x", 2.5)
144 | c.read(f"~{name}/max_vel_theta", 2.5)
145 | c.read(f"~{name}/min_vel_theta", 2.5)
146 | c.read(f"~{name}/min_in_place_cel_theta", 0.4)
147 |
148 | # replaces ~/backup_vel since v1.3.1 of navigation stack
149 | c.read(f"~{name}/escape_vel", -0.1)
150 |
151 | if c.read(f"~{name}/holonomic_robot", True):
152 | c.read(f"~{name}/y_vels", [-0.3, -0.1, 0.1, 0.3])
153 |
154 | c.read(f"~{name}/yaw_goal_tolerance", 0.05)
155 | c.read(f"~{name}/xy_goal_tolerance", 0.10)
156 | c.read(f"~{name}/latch_xy_goal_tolerance", False)
157 |
158 | c.read(f"~{name}/sim_time", 1.0)
159 | sim_granularity = c.read(f"~{name}/sim_granularity", 0.025)
160 | c.read(f"~{name}/angular_sim_granularity", sim_granularity)
161 |
162 | c.read(f"~{name}/vx_samples", 3)
163 | c.read(f"~{name}/vtheta_samples", 20)
164 |
165 | # FIXME see http://wiki.ros.org/base_local_planner?distro=melodic
166 | # searches parent namespaces for controller_frequency if not present
167 | # in private namespace
168 | c.read(f"~{name}/controller_frequency", 20.0)
169 |
170 | c.read(f"~{name}/meter_scoring", False)
171 | c.read(f"~{name}/pdist_scale", 0.6)
172 | c.read(f"~{name}/gdist_scale", 0.8)
173 | c.read(f"~{name}/occdist_scale", 0.01)
174 | c.read(f"~{name}/heading_lookahead", 0.325)
175 | c.read(f"~{name}/heading_scoring", False)
176 | c.read(f"~{name}/heading_scoring_timestep", 0.8)
177 | c.read(f"~{name}/dwa", True)
178 | c.read(f"~{name}/global_frame_id", "odom")
179 |
180 | c.read("~{}/oscillation_reset_dist", 0.05)
181 |
182 | c.read("~{}/prune_plan", True)
183 |
184 | type_local_planner = c.read("~base_local_planner",
185 | "base_local_planner/TrajectoryPlannerROS")
186 | if type_local_planner == 'base_local_planner/TrajectoryPlannerROS':
187 | plugin_TrajectoryPlannerROS()
188 | elif type_local_planner == 'dwa_local_planner/DWAPlannerROS':
189 | plugin_DWAPlannerROS()
190 | else:
191 | m = f"unsupported local planner: {type_local_planner}"
192 | raise Exception(m)
193 |
194 | c.provide("~make_plan", 'nav_msgs/GetPlan')
195 | # ! MELODIC c.provide("~clear_unknown_space", 'std_srvs/Empty')
196 | c.provide("~clear_costmaps", 'std_srvs/Empty')
197 |
198 | # move_base/src/move_base.cpp:1054
199 | # load_plugin('', 'clear_costmap_recovery/ClearCostmapRecovery') [conservative_reset]
200 | def load_recovery(name):
201 | # type: (str) -> None
202 | nh_p = f"~{name}"
203 | nh_blp = "~TrajectoryPlannerROS"
204 | c.read(f"{nh_p}/sim_granularity", 0.017)
205 | c.read(f"{nh_p}/frequency", 20.0)
206 | c.read(f"{nh_blp}/acc_lim_th", 3.2)
207 | c.read(f"{nh_blp}/max_rotational_vel", 1.0)
208 | c.read(f"{nh_blp}/min_in_place_rotational_vel", 0.4)
209 | c.read(f"{nh_blp}/yaw_goal_tolerance", 0.10)
210 |
211 | load_recovery('conservative_reset')
212 | load_recovery('aggressive')
213 |
214 | # Load navigation plugins
215 | global_plugins = c.read("~global_costmap/plugins")
216 | if global_plugins is not None:
217 | assert isinstance(global_plugins, list)
218 | for plugin_dict in global_plugins:
219 | assert isinstance(plugin_dict, dict)
220 | plugin = NavigationPlugin.from_dict(plugin_dict, c.name, "global_costmap")
221 | c.load_plugin(plugin)
222 |
223 | local_plugins = c.read("~local_costmap/plugins")
224 | if isinstance(local_plugins, list):
225 | for plugin_dict in local_plugins:
226 | assert isinstance(plugin_dict, dict)
227 | plugin = NavigationPlugin.from_dict(plugin_dict, c.name, "local_costmap")
228 | c.load_plugin(plugin)
229 |
--------------------------------------------------------------------------------
/src/rosdiscover/interpreter/summary.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __all__ = ('NodeSummary', 'SystemSummary')
3 |
4 | from typing import Any, Collection, Dict, Iterator, List, Mapping, Set, Tuple
5 |
6 | import attr
7 | from loguru import logger
8 |
9 | from .provenance import Provenance
10 | from ..core import Action, Service, Topic
11 |
12 |
13 | @attr.s(frozen=True, slots=True, auto_attribs=True)
14 | class NodeSummary:
15 | """Summarises the architectural effects of a given node."""
16 | name: str
17 | fullname: str
18 | namespace: str
19 | kind: str
20 | package: str
21 | nodelet: bool
22 | filename: str
23 | # Provenance indicates where the model comes from.
24 | # PLACEHOLDER indicates whether the node was not really discovered, but
25 | # was put in place to "complete" the architecture. Placeholder is set
26 | # if the component template could not be found in the library, either
27 | # because it is not a predefined model, or it's interactions were not
28 | # discovered otherwise. Typically, placeholders will have no information
29 | # about topics, services, etc.
30 | # HANDWRITTEN indicates whether the node was derived from a handwritten
31 | # model.
32 | # RECOVERED indicates that the node was recovered through static analysis
33 | provenance: Provenance
34 | pubs: Collection[Topic]
35 | subs: Collection[Topic]
36 | # The tuple is (name, dynamic) where name is the name of the parameter
37 | # and dynamic is whether the node reacts to updates to the parameter via reconfigure
38 | reads: Collection[Tuple[str, bool]]
39 | writes: Collection[str]
40 | uses: Collection[Service]
41 | provides: Collection[Service]
42 | action_servers: Collection[Action]
43 | action_clients: Collection[Action]
44 |
45 | def __attrs_post_init__(self) -> None:
46 | object.__setattr__(self, 'pubs', frozenset(self.pubs))
47 | object.__setattr__(self, 'subs', frozenset(self.subs))
48 | object.__setattr__(self, 'reads', frozenset(self.reads))
49 | object.__setattr__(self, 'writes', frozenset(self.writes))
50 | object.__setattr__(self, 'uses', frozenset(self.uses))
51 | object.__setattr__(self, 'provides', frozenset(self.provides))
52 | object.__setattr__(self, 'action_servers', frozenset(self.action_servers))
53 | object.__setattr__(self, 'action_clients', frozenset(self.action_clients))
54 |
55 | @classmethod
56 | def merge(cls, lhs: 'NodeSummary', rhs: 'NodeSummary') -> 'NodeSummary':
57 | def merge_collections(s1: Collection[Any], s2: Collection[Any]) -> Collection[Any]:
58 | s: Set[Any] = set()
59 | s.update(s1)
60 | s.update(s2)
61 | return s
62 |
63 | reads = merge_collections(lhs.reads, rhs.reads)
64 | writes = merge_collections(lhs.writes, rhs.writes)
65 | pubs = merge_collections(lhs.pubs, rhs.pubs)
66 | subs = merge_collections(lhs.subs, rhs.subs)
67 | uses = merge_collections(lhs.uses, rhs.uses)
68 | provides = merge_collections(lhs.provides, rhs.provides)
69 | actions_servers = merge_collections(lhs.action_servers, rhs.action_servers)
70 | action_clients = merge_collections(lhs.action_clients, rhs.action_clients)
71 |
72 | if lhs.name != rhs.name and lhs.name:
73 | logger.warning(f"Merging two nodes that are named differently: {lhs.name} & {rhs.name}")
74 | name = lhs.name if lhs.name else rhs.name
75 |
76 | if lhs.package != rhs.package and lhs.package:
77 | logger.warning(f"{lhs.name} from package {rhs.package} retaining original package {lhs.package}")
78 | package = lhs.package if lhs.package else rhs.package
79 |
80 | if lhs.filename != rhs.filename and lhs.filename:
81 | logger.warning(f"{lhs.name} fullname {rhs.fullname} retaining original fullname {lhs.fullname}")
82 | fullname = lhs.fullname if lhs.fullname else rhs.fullname
83 |
84 | if lhs.namespace != rhs.namespace and lhs.namespace:
85 | logger.warning(f"{lhs.name} namespace {rhs.fullname} retaining original namespace {lhs.fullname}")
86 | namespace = lhs.namespace if lhs.namespace else rhs.namespace
87 |
88 | if lhs.kind != rhs.kind and lhs.kind:
89 | logger.warning(f"{lhs.name} namespace {rhs.kind} retaining original namespace {lhs.kind}")
90 | kind = lhs.kind if lhs.kind else rhs.kind
91 |
92 | if lhs.provenance != rhs.provenance:
93 | logger.warning(f"{lhs.name} provenance {rhs.provenance} retaining original provenance {lhs.provenance}")
94 | provenance = lhs.provenance
95 |
96 | if lhs.filename != rhs.filename:
97 | logger.warning(f"{lhs.name} filename {rhs.filename} retaining original filename {lhs.filename}")
98 | filename = lhs.filename if lhs.filename else rhs.filename
99 |
100 | return NodeSummary(
101 | name=name,
102 | fullname=fullname,
103 | namespace=namespace,
104 | kind=kind,
105 | package=package,
106 | nodelet=lhs.nodelet,
107 | filename=filename,
108 | provenance=provenance,
109 | reads=reads,
110 | writes=writes,
111 | provides=provides,
112 | uses=uses,
113 | action_servers=actions_servers,
114 | action_clients=action_clients,
115 | pubs=pubs,
116 | subs=subs,
117 | )
118 |
119 | def to_dict(self) -> Dict[str, Any]:
120 | pubs = [t.to_dict() for t in self.pubs]
121 | subs = [t.to_dict() for t in self.subs]
122 | provides = [s.to_dict() for s in self.provides]
123 | uses = [s.to_dict() for s in self.uses]
124 | action_servers = [a.to_dict() for a in self.action_servers]
125 | action_clients = [a.to_dict() for a in self.action_clients]
126 | reads = [{'name': n, 'dynamic': d} for (n, d) in self.reads]
127 | return {'name': self.name,
128 | 'fullname': self.fullname,
129 | 'namespace': self.namespace,
130 | 'kind': self.kind,
131 | 'package': self.package,
132 | 'nodelet': self.nodelet,
133 | 'filename': self.filename,
134 | 'provenance': self.provenance.value,
135 | 'reads': reads,
136 | 'writes': list(self.writes),
137 | 'provides': provides,
138 | 'uses': uses,
139 | 'action-servers': action_servers,
140 | 'action-clients': action_clients,
141 | 'pubs': pubs,
142 | 'subs': subs}
143 |
144 | @classmethod
145 | def from_dict(cls, dict: Dict[str, Any]) -> 'NodeSummary':
146 | name = dict["name"]
147 | fullname = dict.get('fullname', name)
148 | namepsace = dict.get('namespace', '')
149 | kind = dict.get('kind', '')
150 | package = dict.get('package', '')
151 | nodelet = dict.get('nodelet', False)
152 | filename = dict.get('filename', '')
153 | provenance = Provenance(dict.get('provenance', Provenance.PLACEHOLDER))
154 | reads = [(p['name'], p['dynamic']) for p in dict.get('reads', [])]
155 | writes = dict.get('writes', [])
156 | pubs = [Topic(name=t['name'], format=t['format'], implicit=t.get('implicit', False))
157 | for t in dict.get('pubs', [])]
158 | subs = [Topic(name=t['name'], format=t['format'], implicit=t.get('implicit', False))
159 | for t in dict.get('subs', [])]
160 | provides = [Service(name=s['name'], format=s['format'])
161 | for s in dict.get('provides', [])]
162 | uses = [Service(name=s['name'], format=s['format'])
163 | for s in dict.get('uses', [])]
164 | action_servers = [Action(name=a['name'], format=a['format'])
165 | for a in dict.get('action-servers', [])]
166 | action_clients = [Action(name=a['name'], format=a['format'])
167 | for a in dict.get('action-clients', [])]
168 | return NodeSummary(name=name,
169 | fullname=fullname,
170 | namespace=namepsace,
171 | kind=kind,
172 | package=package,
173 | nodelet=nodelet,
174 | filename=filename,
175 | provenance=provenance,
176 | reads=reads,
177 | writes=writes,
178 | pubs=pubs,
179 | subs=subs,
180 | provides=provides,
181 | uses=uses,
182 | action_servers=action_servers,
183 | action_clients=action_clients)
184 |
185 |
186 | @attr.s(frozen=True, slots=True, auto_attribs=True)
187 | class SystemSummary(Mapping[str, NodeSummary]):
188 | """Summarises the architectural effects of all nodes in a system.
189 | Provides a mapping from node names to the architectural effects of those
190 | nodes."""
191 | _node_to_summary: Mapping[str, NodeSummary]
192 |
193 | def __len__(self) -> int:
194 | return len(self._node_to_summary)
195 |
196 | def __iter__(self) -> Iterator[str]:
197 | yield from self._node_to_summary
198 |
199 | def __getitem__(self, name: str) -> NodeSummary:
200 | return self._node_to_summary[name]
201 |
202 | def to_dict(self) -> List[Dict[str, Any]]:
203 | return [n.to_dict() for n in self.values()]
204 |
205 | @classmethod
206 | def from_dict(cls, arr: Collection[Any]) -> 'SystemSummary':
207 | summaries = [NodeSummary.from_dict(s) for s in arr]
208 | return SystemSummary(node_to_summary={summary.name: summary for summary in summaries})
209 |
210 | @property
211 | def unresolved(self) -> Iterator[NodeSummary]:
212 | for n in self._node_to_summary.values():
213 | if n.provenance == Provenance.PLACEHOLDER:
214 | yield n
215 |
216 | @classmethod
217 | def merge(cls, lhs: 'SystemSummary', rhs: 'SystemSummary') -> 'SystemSummary':
218 | node_summaries: Dict[str, NodeSummary] = {}
219 | for key, summary in lhs.items():
220 | if key not in rhs:
221 | node_summaries[key] = summary
222 | else:
223 | node_summaries[key] = NodeSummary.merge(summary, rhs[key])
224 | for key, summary in rhs.items():
225 | if key not in lhs:
226 | node_summaries[key] = summary
227 | return SystemSummary(node_to_summary=node_summaries)
228 |
--------------------------------------------------------------------------------
/src/rosdiscover/models/plugins/navigation.py:
--------------------------------------------------------------------------------
1 | __all__ = ('NavigationPlugin',)
2 |
3 | import abc
4 | import typing as t
5 | from typing import Mapping, Type
6 |
7 | import attr
8 | from loguru import logger
9 | from roswire.name import canonical_name, namespace_join
10 |
11 | from ...interpreter import Interpreter, ModelPlugin, NodeContext
12 |
13 |
14 | class NavigationPlugin(ModelPlugin):
15 | # e.g., {name: static_map, type: "costmap_2d::StaticLayer"}
16 | @classmethod
17 | def from_dict(cls, dict_: Mapping[str, str],
18 | node_name: str,
19 | reference_name: t.Optional[str] = None) -> 'NavigationPlugin':
20 | cpp_class = dict_['type']
21 | plugin_name = dict_['name']
22 | logger.debug(f'loading navigation plugin [{plugin_name}] from file [{cpp_class}]')
23 | cpp_to_cls: Mapping[str, Type[NavigationPlugin]] = {
24 | 'costmap_2d::StaticLayer': StaticLayerPlugin,
25 | 'costmap_2d::FetchDepthLayer': FetchDepthLayerPlugin,
26 | 'costmap_2d::InflationLayer': InflationLayerPlugin,
27 | 'costmap_2d::ObstacleLayer': ObstacleLayerPlugin,
28 | 'costmap_2d::VoxelLayer': VoxelLayerPlugin
29 | }
30 |
31 | cls = cpp_to_cls[cpp_class]
32 | plugin = cls.build(plugin_name, node_name, reference_name)
33 | logger.debug(f'loaded navigation plugin [{plugin_name}] from file [{cpp_class}]: {plugin}')
34 | return plugin
35 |
36 | @classmethod
37 | @abc.abstractmethod
38 | def build(cls, plugin_name: str, node_name: str, reference_name: t.Optional[str]) -> 'NavigationPlugin':
39 | ...
40 |
41 |
42 | def get_move_base(i: Interpreter, node_name: str) -> 'NodeContext':
43 | if not node_name.startswith('/'):
44 | node_name = f'/{node_name}'
45 | if node_name not in i.nodes.keys():
46 | raise ModuleNotFoundError(f'Could not find node {node_name} in configuration')
47 | move_base = i.nodes[node_name]
48 | if move_base is None:
49 | raise ModuleNotFoundError('Could not find node "move_base" in configuration')
50 | return move_base
51 |
52 |
53 | @attr.s(frozen=True, slots=True)
54 | class StaticLayerPlugin(NavigationPlugin):
55 | """
56 | The static map layer represents a largely unchanging portion of the costmap, like those generated by SLAM.
57 |
58 | http://wiki.ros.org/costmap_2d/hydro/staticmap
59 | """
60 | class_name = 'costmap_2d::StaticLayer'
61 | name: str = attr.ib()
62 | node_name: str = attr.ib()
63 | reference_name: t.Optional[str] = attr.ib()
64 |
65 | def load(self, interpreter: Interpreter) -> None:
66 | move_base = get_move_base(interpreter, self.node_name)
67 |
68 | move_base.read('~unknown_cost_value', -1)
69 | move_base.read('~lethal_cost_value', 100)
70 | map_topic = move_base.read('~map_topic', '/map')
71 | move_base.read('~first_map_only', False)
72 | subscribe_to_updates = move_base.read('~subscribe_to_updates', False)
73 | move_base.read('~track_unknown_space', True)
74 | move_base.read('~use_maximum', False)
75 | move_base.read('~trinary_costmap', True)
76 |
77 | move_base.sub(map_topic, 'nav_msgs/OccupancyGrid')
78 | if subscribe_to_updates:
79 | move_base.sub(f'{map_topic}_updates', 'map_msgs/OccupancyGridUpdate')
80 |
81 | @classmethod
82 | def build(cls, name: str, node_name: str, reference_name: t.Optional[str]) -> NavigationPlugin:
83 | return StaticLayerPlugin(name=name, node_name=node_name, reference_name=reference_name)
84 |
85 |
86 | @attr.s(frozen=True, slots=True)
87 | class InflationLayerPlugin(NavigationPlugin):
88 | """
89 | The inflation layer is an optimization that adds new values around lethal obstacles (i.e. inflates the obstacles)
90 | in order to make the costmap represent the configuration space of the robot.
91 |
92 | http://wiki.ros.org/costmap_2d/hydro/inflation
93 | """
94 | class_name = 'costmap_2d::InflationLayer'
95 | name: str = attr.ib()
96 | node_name: str = attr.ib()
97 | reference_name: t.Optional[str] = attr.ib()
98 |
99 | def load(self, interpreter: 'Interpreter') -> None:
100 | move_base = get_move_base(interpreter, self.node_name)
101 | move_base.read('~inflation_radius', 0.55)
102 | move_base.read('~cost_scaling_factor', 10.0)
103 |
104 | @classmethod
105 | def build(cls, name: str, node_name: str, reference_name: t.Optional[str]) -> NavigationPlugin:
106 | return InflationLayerPlugin(name=name, node_name=node_name, reference_name=reference_name)
107 |
108 |
109 | @attr.s(frozen=True, slots=True)
110 | class FetchDepthLayerPlugin(NavigationPlugin):
111 | """
112 | TODO: This should really be generated
113 |
114 | from /ros_ws/src/fetch_ros/fetch_depth_layer/src/depth_layer.cpp of the Fetch container from TheRobotCooperative
115 | """
116 | class_name = 'costmap_2d::FetchDepthLayer'
117 | name: str = attr.ib()
118 | node_name: str = attr.ib()
119 | reference_name: t.Optional[str] = attr.ib()
120 |
121 | def load(self, interpreter: 'Interpreter') -> None:
122 | move_base = get_move_base(interpreter, self.node_name)
123 |
124 | publish_observations = move_base.read('publish_observations', False)
125 | move_base.read('~observations_separation_threshold', 0.06)
126 | move_base.read('~find_ground_plane', True)
127 | move_base.read('~ground_orientation_threshold', 0.9)
128 | move_base.read('~min_obstacle_height', 0.0)
129 | move_base.read('~max_obstacle_height', 2.0)
130 | move_base.read('~min_clearing_height', float('inf'))
131 | move_base.read('~max_clearing_height', float('inf'))
132 | move_base.read('~skip_rays_bottom', 20)
133 | move_base.read('~skip_rays_top', 20)
134 | move_base.read('~skip_rays_left', 20)
135 | move_base.read('~skip_rays_right', 20)
136 | move_base.read('~clear_with_skipped_rays', False)
137 | move_base.read('~observation_persistence', 0.0)
138 | move_base.read('~expected_update_rate', 0.0)
139 | move_base.read('~transform_tolerance', 0.5)
140 | move_base.read('~obstacle_range', None) # TODO is this the right way to handle this?
141 | move_base.read('~raytrace_range', None)
142 |
143 | if publish_observations:
144 | move_base.pub('clearing_obs', 'sensor_msgs/PointCloud')
145 | move_base.pub('marking_obs', 'sensor_msgs/PointCloud')
146 |
147 | depth_topic = move_base.read('depth_topic', '/head_camera/depth_downsample/image_raw')
148 | info_topic = move_base.read('info_topic', "/head_camera/depth_downsample/camera_info")
149 |
150 | move_base.sub(depth_topic, 'sensor_msgs/Image')
151 | move_base.sub(info_topic, 'sensor_msgs/CameraInfo')
152 |
153 | @classmethod
154 | def build(cls, name: str, node_name: str, reference_name: t.Optional[str]) -> 'NavigationPlugin':
155 | return FetchDepthLayerPlugin(name=name, node_name=node_name, reference_name=reference_name)
156 |
157 |
158 | @attr.s(frozen=True, slots=True)
159 | class ObstacleLayerPlugin(NavigationPlugin):
160 | """
161 | The obstacle layer tracks the obstacles as read by the sensor data. The ObstacleCostmapPlugin marks and raytraces
162 | obstacles in two dimensions, while the VoxelCostmapPlugin does so in three dimensions.
163 |
164 | http://wiki.ros.org/costmap_2d/hydro/obstacles
165 | """
166 | class_name = 'costmap_2d::ObstacleLayer'
167 | name: str = attr.ib()
168 | node_name: str = attr.ib()
169 | reference_name: t.Optional[str] = attr.ib()
170 |
171 | def load(self, interpreter: 'Interpreter') -> None:
172 | move_base = get_move_base(interpreter, self.node_name)
173 | top_ns = move_base.name if not self.reference_name else self.reference_name
174 | observation_sources_param = canonical_name(f"/{top_ns}/{self.name}/observation_sources")
175 | observation_sources = move_base.read(observation_sources_param, "")
176 |
177 | for os in observation_sources.split(" "):
178 | if not os:
179 | continue
180 | os_ns = f"/{top_ns}/{self.name}/{os}"
181 | topic = move_base.read(canonical_name(f"{os_ns}/topic"), os)
182 | move_base.read(namespace_join(os_ns, 'sensor_frame'), "")
183 | move_base.read(namespace_join(os_ns, 'observation_persistence'), 0.0)
184 | move_base.read(namespace_join(os_ns, 'expected_update_rate'), 0.0)
185 | data_type = move_base.read(canonical_name(f"{os_ns}/data_type"), "PointCloud")
186 | move_base.read(namespace_join(os_ns, 'min_obstacle_height'), 0.0)
187 | move_base.read(namespace_join(os_ns, 'max_obstacle_height'), 2.0)
188 | move_base.read(namespace_join(os_ns, 'inf_is_valid'), False)
189 | move_base.read(namespace_join(os_ns, 'clearing'), False)
190 | move_base.read(namespace_join(os_ns, 'marking'), True)
191 |
192 | assert data_type in ['PointCloud', 'LaserScan', 'PointCloud2']
193 |
194 | if data_type == 'LaserScan':
195 | move_base.sub(topic, "sensor_msgs/LaserScan")
196 | elif data_type == 'PointCloud2':
197 | move_base.sub(topic, 'sensor_msgs/PointCloud2')
198 | elif data_type == 'PointCloud':
199 | move_base.sub(topic, 'sensor_msgs/PointCloud')
200 |
201 | @classmethod
202 | def build(cls, name: str, node_name: str, reference_name: t.Optional[str]) -> 'NavigationPlugin':
203 | return ObstacleLayerPlugin(name=name, node_name=node_name, reference_name=reference_name)
204 |
205 |
206 | @attr.s(frozen=True, slots=True)
207 | class VoxelLayerPlugin(ObstacleLayerPlugin):
208 | """
209 | Tracks obstacles in three dimensions
210 | """
211 | class_name = "costmap_2d::VoxelLayer"
212 | name: str = attr.ib()
213 | node_name: str = attr.ib()
214 | reference_name: t.Optional[str] = attr.ib()
215 |
216 | def load(self, interpreter: 'Interpreter') -> None:
217 | ObstacleLayerPlugin.load(self, interpreter)
218 | move_base = get_move_base(interpreter, self.node_name)
219 | top_ns = move_base.name if not self.reference_name else self.reference_name
220 | publish_voxel_sources_param = canonical_name(f"/{top_ns}/{self.name}/publish_voxel_map")
221 | publish_voxel = move_base.read(publish_voxel_sources_param, False)
222 | if publish_voxel:
223 | publish_voxel_topic = canonical_name(f"{top_ns}/{self.name}/voxel_grid")
224 | move_base.pub(publish_voxel_topic, 'costmap_2d/VoxelGrid')
225 | clearing_endpoints_topic = canonical_name(f"{top_ns}/{self.name}/clearing_endpoints")
226 | move_base.pub(f"~{clearing_endpoints_topic}", 'sensor_msgs/PointCloud')
227 |
228 | @classmethod
229 | def build(cls, name: str, node_name: str, reference_name: t.Optional[str]) -> 'NavigationPlugin':
230 | return VoxelLayerPlugin(name=name, node_name=node_name, reference_name=reference_name)
231 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2020 Christopher S. Timperley
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/src/rosdiscover/interpreter/interpreter.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from typing import Dict, Iterator, Optional
3 | import contextlib
4 | import types
5 | import typing as t
6 |
7 | from loguru import logger
8 | import roswire
9 | from roswire import AppInstance, ROSVersion
10 | from roswire.common.launch.config import NodeConfig
11 | from roswire.ros1.launch.reader import ROS1LaunchFileReader
12 | from roswire.ros2.launch.reader import ROS2LaunchFileReader
13 |
14 | from .context import NodeContext
15 | from .model import PlaceholderModel
16 | from .summary import SystemSummary
17 | from .parameter import ParameterServer
18 | from ..config import Config
19 | from ..launch import Launch
20 | from ..project import ProjectModels
21 |
22 |
23 | class Interpreter:
24 | """
25 | Attributes
26 | ----------
27 | params: ParameterServer
28 | The simulated parameter server for this interpreter.
29 | """
30 | @classmethod
31 | @contextlib.contextmanager
32 | def for_config(cls,
33 | config: Config
34 | ) -> Iterator['Interpreter']:
35 | """Constructs an interpreter for a given configuration"""
36 | rsw = roswire.ROSWire() # TODO don't maintain multiple instances
37 | with rsw.launch(config.image, config.sources, environment=config.environment) as app:
38 | with Interpreter(config, app) as interpreter:
39 | yield interpreter
40 |
41 | def __init__(
42 | self,
43 | config: Config,
44 | app: roswire.System,
45 | ) -> None:
46 | self._app = app
47 | self.params = ParameterServer()
48 | self.nodes: Dict[str, NodeContext] = {}
49 | self.models = ProjectModels(config, allow_recovery=True)
50 |
51 | def open(self) -> None:
52 | self.models.open()
53 |
54 | def close(self) -> None:
55 | self.models.close()
56 |
57 | def __enter__(self) -> "Interpreter":
58 | self.open()
59 | return self
60 |
61 | def __exit__(
62 | self,
63 | ex_type: t.Optional[t.Type[BaseException]],
64 | ex_val: t.Optional[BaseException],
65 | ex_tb: t.Optional[types.TracebackType],
66 | ) -> None:
67 | self.close()
68 |
69 | @property
70 | def app(self) -> AppInstance:
71 | return self._app
72 |
73 | def summarise(self) -> SystemSummary:
74 | """Produces an immutable description of the system architecture."""
75 | node_summaries = [node.summarise() for node in self.nodes.values()]
76 | node_to_summary = {s.fullname: s for s in node_summaries}
77 | return SystemSummary(node_to_summary)
78 |
79 | def launch(self, launch_description: Launch) -> None:
80 | """Simulates the effects of `roslaunch` using a given launch file."""
81 | # NOTE this method also supports command-line arguments
82 | if self._app.description.distribution.ros == ROSVersion.ROS1:
83 | reader = ROS1LaunchFileReader.for_app_instance(self._app)
84 | else:
85 | reader = ROS2LaunchFileReader.for_app_instance(self._app)
86 | logger.debug(f"get_argv: {launch_description.get_argv()}")
87 | config = reader.read(launch_description.filename, launch_description.get_argv())
88 |
89 | for param in config.params.values():
90 | self.params[param.name] = param.value
91 |
92 | def key(x: NodeConfig) -> str:
93 | if not x.args:
94 | return "a"
95 | assert isinstance(x.args, str)
96 | return "z" if x.typ == "nodelet" and x.args.strip() != 'manager' else "a"
97 |
98 | # Sort nodes so that nodelets occur after node managers
99 | sorted_nodes = sorted(config.nodes, key=key)
100 |
101 | for node in sorted_nodes:
102 | if not node.filename:
103 | m = ("unable to determine associated launch file for "
104 | f"node: {node}")
105 | raise Exception(m)
106 | logger.debug(f"launching node: {node.name} from {node.filename}")
107 | try:
108 | args = node.args or ''
109 | remappings = {old: new for (old, new) in node.remappings}
110 | self._load(pkg=node.package,
111 | nodetype=node.typ,
112 | name=node.name,
113 | namespace=node.namespace, # FIXME
114 | launch_filename=node.filename,
115 | remappings=remappings,
116 | args=args
117 | )
118 | # FIXME this is waaay too permissive
119 | except Exception:
120 | logger.exception(f"failed to launch node: {node.name}")
121 | raise
122 |
123 | # now that all nodes have been initialised, load all plugins
124 | for node_context in self.nodes.values():
125 | for plugin in node_context._plugins:
126 | plugin.load(self)
127 |
128 | def _create_nodelet_manager(self,
129 | name: str,
130 | namespace: str,
131 | manager: str,
132 | launch_filename: str,
133 | remappings: t.Mapping[str, str]) -> None:
134 | """Creates a nodelet manager with a given name."""
135 | logger.info(f'launched nodelet manager: {manager} as {name}')
136 | ctx = NodeContext(name=name,
137 | namespace=namespace,
138 | kind="nodelet",
139 | package="nodelet",
140 | launch_filename=launch_filename,
141 | remappings=remappings,
142 | files=self._app.files,
143 | params=self.params,
144 | app=self._app,
145 | args='')
146 | self.nodes[ctx.fullname] = ctx
147 |
148 | def _load_nodelet(self,
149 | pkg: str,
150 | nodetype: str,
151 | name: str,
152 | namespace: str,
153 | launch_filename: str,
154 | remappings: Dict[str, str],
155 | manager: Optional[str] = None
156 | ) -> None:
157 | """Loads a nodelet using the provided instructions.
158 |
159 | Parameters
160 | ----------
161 | pkg: str
162 | the name of the package to which the nodelet belongs.
163 | nodetype: str
164 | the name of the type of nodelet that should be loaded.
165 | name: str
166 | the name that should be assigned to the nodelet.
167 | namespace: str
168 | the namespace into which the nodelet should be loaded.
169 | launch_filename: str
170 | the absolute path to the XML launch file where this node
171 | was declared.
172 | remappings: Dict[str, str]
173 | a dictionary of name remappings that should be applied
174 | to this nodelet, where keys correspond to old names and values
175 | correspond to new names.
176 | manager: Optional[str]
177 | the name of the manager, if any, for this nodelet. If
178 | this nodelet is standalone, :code:`manager` should be set to
179 | :code:`None`.
180 |
181 | Raises
182 | ------
183 | Exception
184 | if there is no model for the given nodelet type.
185 | """
186 | if manager:
187 | logger.info(f'launching nodelet [{name}] '
188 | f'inside manager [{manager}] from {launch_filename}')
189 |
190 | return self._load(pkg=pkg,
191 | nodetype=nodetype,
192 | name=name,
193 | namespace=namespace,
194 | launch_filename=launch_filename,
195 | remappings=remappings,
196 | args=f'manager {manager}'
197 | )
198 | else:
199 | logger.info(f'launching standalone nodelet [{name}]')
200 | return self._load(pkg=pkg,
201 | nodetype=nodetype,
202 | name=name,
203 | namespace=namespace,
204 | launch_filename=launch_filename,
205 | remappings=remappings,
206 | args=''
207 | )
208 |
209 | def _load(self,
210 | pkg: str,
211 | nodetype: str,
212 | name: str,
213 | namespace: str,
214 | launch_filename: str,
215 | remappings: Dict[str, str],
216 | args: str,
217 | ) -> None:
218 | """Loads a node using the provided instructions.
219 |
220 | Parameters
221 | ----------
222 | pkg: str
223 | the name of the package to which the node belongs.
224 | nodetype: str
225 | the name of the type of node that should be loaded.
226 | name: str
227 | the name that should be assigned to the node.
228 | namespace: str
229 | the namespace into which the node should be loaded.
230 | launch_filename: str
231 | the absolute path to the XML launch file where this node
232 | was declared.
233 | remappings: Dict[str, str]
234 | a dictionary of name remappings that should be applied
235 | to this node, where keys correspond to old names and values
236 | correspond to new names.
237 | args: str
238 | a string containing command-line arguments to the node.
239 |
240 | Raises
241 | ------
242 | Exception
243 | if there is no model for the given node type.
244 | """
245 | args = args.strip()
246 | split_args = args.split(" ")
247 | if nodetype == 'nodelet':
248 | if args.startswith('manager'):
249 | manager = args.partition(' ')[2]
250 | return self._create_nodelet_manager(name, namespace, manager, launch_filename, remappings)
251 | elif args.startswith('standalone '):
252 | pkg_and_nodetype = args.partition(' ')[2]
253 | pkg, _, nodetype = pkg_and_nodetype.partition('/')
254 | return self._load_nodelet(pkg=pkg,
255 | nodetype=nodetype,
256 | name=name,
257 | namespace=namespace,
258 | launch_filename=launch_filename,
259 | remappings=remappings
260 | )
261 | else:
262 | load = split_args[0] # noqa: F841
263 | pkg_and_nodetype = split_args[1]
264 | mgr = split_args[2]
265 | nodelet_args = "".join(split_args[3:]) # noqa: F841
266 | pkg, _, nodetype = pkg_and_nodetype.partition('/')
267 | return self._load_nodelet(pkg=pkg,
268 | nodetype=nodetype,
269 | name=name,
270 | namespace=namespace,
271 | launch_filename=launch_filename,
272 | remappings=remappings,
273 | manager=mgr
274 | )
275 |
276 | if remappings:
277 | logger.info(f"using remappings: {remappings}")
278 |
279 | try:
280 | model = self.models.fetch(pkg, nodetype)
281 | # This is to handle nodelet strangness
282 | # If we can't find it through node type, look for it by name
283 | if isinstance(model, PlaceholderModel) and name != nodetype:
284 | model = self.models.fetch(pkg, name)
285 | except Exception:
286 | m = (f"failed to find model for node type [{nodetype}] "
287 | f"in package [{pkg}]")
288 | logger.warning(m)
289 | raise
290 | if args.startswith('manager'):
291 | # This is being loaded into an existing manager, so find that as the context
292 | manager_name = args.split(" ")[1]
293 | if namespace:
294 | manager_name = f"{namespace}/{manager_name}"
295 | manager_name = manager_name.replace('//', '/')
296 | if manager_name in self.nodes:
297 | manager_context = self.nodes[manager_name]
298 | elif f"/{manager_name}" in self.nodes:
299 | manager_context = self.nodes[f"/{manager_name}"]
300 | else:
301 | raise ValueError(f"The nodelet manager {manager_name} has not been launched")
302 | # Create a context for the nodelet
303 | ctx = NodeContext(name=name,
304 | namespace=namespace,
305 | kind=nodetype,
306 | package=pkg,
307 | args=args,
308 | launch_filename=launch_filename,
309 | remappings=remappings,
310 | files=self._app.files,
311 | params=self.params,
312 | app=self._app)
313 | model.eval(ctx)
314 | manager_context.load_nodelet(ctx)
315 | # Place the nodelet as a node, which is observed
316 | # TODO: This needs to be rethought -- we should have a separate NodeletManagerContext
317 | # that con contain NodeletContexts. This would better map the NodeletManager/
318 | # Nodelet mapping, and would actually contain traceability between topics
319 | self.nodes[ctx.fullname] = ctx
320 | else:
321 | ctx = NodeContext(name=name,
322 | namespace=namespace,
323 | kind=nodetype,
324 | package=pkg,
325 | args=args,
326 | launch_filename=launch_filename,
327 | remappings=remappings,
328 | files=self._app.files,
329 | params=self.params,
330 | app=self._app)
331 | self.nodes[ctx.fullname] = ctx
332 | model.eval(ctx)
333 |
--------------------------------------------------------------------------------