├── demos └── hackathon │ ├── .env │ ├── config │ ├── simulation.yaml │ ├── records.yaml │ ├── wgs84_anchor.yaml │ ├── robot │ │ ├── devices │ │ │ ├── remote_controller.joystick.yaml │ │ │ ├── ouster.lidar.yaml │ │ │ ├── robot_view.camera.yaml │ │ │ ├── keyboard.joystick.yaml │ │ │ ├── lms151.lidar.yaml │ │ │ ├── xsens.imu.yaml │ │ │ ├── realsense.rgbd_camera.yaml │ │ │ ├── cultivator.implement.yaml │ │ │ └── septentrio.gps.yaml │ │ ├── teleop.yaml │ │ ├── base.yaml │ │ ├── path_following.yaml │ │ ├── devices.yaml │ │ └── localisation.yaml │ └── evaluation.rviz │ ├── compose.yaml │ └── README.md ├── doc ├── media │ ├── rviz.jpg │ ├── gazebo.jpg │ ├── records.jpg │ ├── teleop_joystick.jpg │ ├── base_control_graph.jpg │ ├── localisation_graph.jpg │ ├── path_following_graph.jpg │ └── path_following_joystick.jpg ├── challenge_configuration.md ├── plots_surveying.md ├── robot_control.md ├── challenge.md ├── robot_configuration.md └── devices_configuration.md ├── .gitignore ├── scripts ├── pull ├── update ├── bind_docker_humble └── update_models ├── docker ├── build.yaml ├── ros_setup.sh ├── ros_entrypoint.sh ├── common.yaml ├── Dockerfile ├── repositories └── repositories.private ├── compose.yaml └── README.md /demos/hackathon/.env: -------------------------------------------------------------------------------- 1 | ../../.env -------------------------------------------------------------------------------- /demos/hackathon/config/simulation.yaml: -------------------------------------------------------------------------------- 1 | world_package: hackathon_bringup 2 | world_name: demo.world 3 | -------------------------------------------------------------------------------- /doc/media/rviz.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/rviz.jpg -------------------------------------------------------------------------------- /doc/media/gazebo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/gazebo.jpg -------------------------------------------------------------------------------- /doc/media/records.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/records.jpg -------------------------------------------------------------------------------- /demos/hackathon/config/records.yaml: -------------------------------------------------------------------------------- 1 | directory: /tmp/records 2 | config: true 3 | debug: true 4 | log: true 5 | vcs: true 6 | -------------------------------------------------------------------------------- /doc/media/teleop_joystick.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/teleop_joystick.jpg -------------------------------------------------------------------------------- /doc/media/base_control_graph.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/base_control_graph.jpg -------------------------------------------------------------------------------- /doc/media/localisation_graph.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/localisation_graph.jpg -------------------------------------------------------------------------------- /doc/media/path_following_graph.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/path_following_graph.jpg -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /src/ 2 | /build/ 3 | /install/ 4 | /log/ 5 | /gazebo/ 6 | .cache/ 7 | .venv/ 8 | .env 9 | *.dat 10 | recorded.traj 11 | -------------------------------------------------------------------------------- /doc/media/path_following_joystick.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FiraHackathon/hackathon2025_ws/HEAD/doc/media/path_following_joystick.jpg -------------------------------------------------------------------------------- /scripts/pull: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd "$(dirname -- "${BASH_SOURCE[0]}")/.." 3 | 4 | git pull --ff-only 5 | cd src 6 | vcs -sw6 custom --args pull --ff-only 7 | -------------------------------------------------------------------------------- /demos/hackathon/config/wgs84_anchor.yaml: -------------------------------------------------------------------------------- 1 | # unused: already defined in the .world file 2 | latitude: 46.339159 3 | longitude: 3.433923 4 | altitude: 278.142 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/remote_controller.joystick.yaml: -------------------------------------------------------------------------------- 1 | name: "joystick" 2 | driver: 3 | package: "joy" 4 | executable: "joy_node" 5 | parameters: 6 | autorepeat_rate: 10.0 7 | deadzone: 0.1 8 | configuration: 9 | type: xbox 10 | records: 11 | joy: true 12 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/teleop.yaml: -------------------------------------------------------------------------------- 1 | cmd_output: 2 | message_type: romea_mobile_base_msgs/TwoAxleSteeringCommand 3 | message_priority: 100 4 | cmd_range: 5 | maximal_linear_speed: 6 | slow_mode: 2.0 7 | turbo_mode: 4.0 8 | #maximal_front_steering_angle: 9 | #maximal_rear_steering_angle: 10 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/ouster.lidar.yaml: -------------------------------------------------------------------------------- 1 | name: "lidar" 2 | configuration: 3 | type: ouster 4 | model: os1_32 5 | rate: 10 6 | resolution: 0.17578125 7 | geometry: 8 | parent_link: "base_link" 9 | xyz: [1.2, 0.0, 1.1] 10 | rpy: [0.0, 0.0, 0.0] 11 | records: 12 | scan: true 13 | cloud: false 14 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/robot_view.camera.yaml: -------------------------------------------------------------------------------- 1 | name: robot_view 2 | configuration: 3 | type: axis 4 | model: p1346 5 | resolution: 1280x720 6 | geometry: 7 | parent_link: base_link 8 | xyz: [-4.0, 0.0, 3.2] 9 | rpy: [0.0, 36.0, 0.0] 10 | records: 11 | camera_info: false 12 | image_raw: false 13 | 14 | -------------------------------------------------------------------------------- /docker/build.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | devel: 3 | image: ghcr.io/tirrex-roboterrium/tirrex_workspace:hackathon 4 | build: 5 | context: . 6 | target: dever 7 | ssh: [default] 8 | args: 9 | - REPOS_FILE=${REPOS_FILE:-repositories.private} 10 | # - FROM_IMAGE=osrf/ros:galactic-desktop 11 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/base.yaml: -------------------------------------------------------------------------------- 1 | name: "base" 2 | configuration: 3 | type: adap2e 4 | model: fat 5 | records: 6 | joint_states: true 7 | controller/odom: true 8 | controller/odometry: true 9 | controller/kinematic: true 10 | simulation: 11 | initial_xyz: [107.795, 123.508, 1.15] 12 | initial_rpy: [0.014, 0.021, 0.372] 13 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/keyboard.joystick.yaml: -------------------------------------------------------------------------------- 1 | name: "keyboard" 2 | driver: 3 | package: "pynput_teleop" 4 | executable: "pynput_joy" 5 | parameters: 6 | up_down: 7 | increment: 0.1 8 | left_right: 9 | value_max: 0.3 10 | publish: 11 | rate: 10 12 | configuration: 13 | type: keyboard 14 | records: 15 | joy: true 16 | -------------------------------------------------------------------------------- /docker/ros_setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | source /usr/share/gazebo/setup.sh 4 | source /opt/ros/$ROS_DISTRO/setup.bash 5 | source $WORKSPACE/install/local_setup.bash || true 6 | 7 | export RCUTILS_COLORIZED_OUTPUT=1 8 | export RCUTILS_CONSOLE_OUTPUT_FORMAT="[{severity}] {name}: {message}" 9 | export GAZEBO_RESOURCE_PATH="$WORKSPACE/gazebo:$GAZEBO_RESOURCE_PATH" 10 | export GAZEBO_MODEL_PATH="$WORKSPACE/gazebo/models:$GAZEBO_MODEL_PATH" 11 | 12 | exec $@ 13 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/lms151.lidar.yaml: -------------------------------------------------------------------------------- 1 | name: "lidar2d" 2 | driver: 3 | package: "sick_scan" 4 | executable: "sick_generic_caller" 5 | parameters: 6 | hostname: "192.168.1.112" 7 | port: 2112 8 | configuration: 9 | type: sick 10 | model: lms151 11 | rate: 50 12 | resolution: 0.5 13 | geometry: 14 | parent_link: "base_link" 15 | xyz: [2.02, 0.0, 0.34] 16 | rpy: [0.0, 0.0, 0.0] 17 | records: 18 | scan: true 19 | cloud: false 20 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/xsens.imu.yaml: -------------------------------------------------------------------------------- 1 | name: "imu" 2 | driver: 3 | package: "xsens_driver" 4 | executable: mtnode.py 5 | # package: "bluespace_ai_xsens_mti_driver" 6 | # executable xsens_mti_node 7 | parameters: 8 | device: "/dev/ttyUSB0" 9 | baudrate: 115200 10 | configuration: 11 | type: xsens 12 | model: mti 13 | rate: 100 14 | geometry: 15 | parent_link: "base_link" 16 | xyz: [0.0, 0.0, 0.7] 17 | rpy: [0.0, 0.0, 0.0] 18 | records: 19 | data: true 20 | -------------------------------------------------------------------------------- /docker/ros_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [[ -z "$WORKSPACE" ]] ; then 5 | echo >&2 "Missing environment variable 'WORKSPACE'" 6 | exit 1 7 | fi 8 | cd -- "$WORKSPACE" 9 | 10 | if [[ -z "$USER" ]] ; then 11 | echo >&2 "Missing environment variable 'USER'" 12 | exit 2 13 | fi 14 | 15 | # joy_node needs to access joysticks via event interface 16 | if [[ -d "/dev/input" ]] ; then 17 | chmod -R o+rw /dev/input 18 | fi 19 | 20 | # execute command as USER 21 | exec sudo -snEHu "$USER" -- /ros_setup.sh $@ 22 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/realsense.rgbd_camera.yaml: -------------------------------------------------------------------------------- 1 | name: "rgbd_camera" 2 | configuration: 3 | type: realsense 4 | model: d435 5 | rgb_camera: 6 | resolution: 1280x720 7 | infrared_camera: 8 | resolution: 1280x720 9 | depth_camera: 10 | resolution: 1280x720 11 | geometry: 12 | parent_link: "base_link" 13 | xyz: [1.42, 0.0, 1.14] 14 | rpy: [0.0, 20.0, 0.0] 15 | records: 16 | rgb/camera_info: false 17 | rgb/image_raw: true 18 | depth/camera_info: false 19 | depth/image_raw: true 20 | point_cloud/points: true 21 | 22 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/cultivator.implement.yaml: -------------------------------------------------------------------------------- 1 | name: implement 2 | control: 3 | controller_manager: 4 | update_rate: 10 5 | configuration: 6 | pkg: romea_implement_bringup 7 | file: config/hitch_controller_manager.yaml 8 | controllers: 9 | selected: [position_controller] 10 | configuration: 11 | pkg: romea_implement_bringup 12 | file: config/hitch_controllers.yaml 13 | configuration: 14 | type: hitch 15 | model: cultivator 16 | geometry: 17 | parent_link: "base_link" 18 | xyz: [-0.5, 0.0, 1.] 19 | rpy: [0.0, 0.0, 0.0] 20 | records: 21 | joint_states: true 22 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices/septentrio.gps.yaml: -------------------------------------------------------------------------------- 1 | name: "gps" 2 | driver: 3 | package: romea_gps_driver 4 | executable: tcp_client_node 5 | parameters: 6 | ip: 192.168.0.50 7 | nmea_port: 1001 8 | rtcm_port: 1002 9 | ntrip: 10 | package: ntrip_client 11 | executable: ntrip_ros.py 12 | parameters: 13 | host: caster.centipede.fr 14 | port: 2101 15 | username: centipede 16 | password: centipede 17 | mountpoint: MAGC 18 | configuration: 19 | type: drotek 20 | model: f9p 21 | rate: 10 22 | dual_antenna: true 23 | geometry: 24 | parent_link: "base_link" 25 | xyz: [0.0, 0.3, 1.5] 26 | records: 27 | nmea_sentence: true 28 | gps_fix: false 29 | vel: false 30 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/path_following.yaml: -------------------------------------------------------------------------------- 1 | sampling_period: 10.0 2 | 3 | # configuration of longitudinal control 4 | longitudinal_control: 5 | minimal_linear_speed: 0.3 6 | 7 | # configuration for classic and predictive lateral control laws 8 | lateral_control: 9 | selected: classic 10 | classic: 11 | gains: 12 | front_kd: 0.7 13 | rear_kd: 0.5 14 | predictive: 15 | gains: 16 | front_kd: 0.7 17 | rear_kd: 0.4 18 | prediction: 19 | horizon: 10 20 | a0: 0.1642 21 | a1: 0.1072 22 | b1: 1.0086 23 | b2: -0.2801 24 | 25 | # control setpoint 26 | setpoint: 27 | desired_linear_speed: 2.0 28 | desired_lateral_deviation: 0.0 29 | desired_course_deviation: 0.0 30 | 31 | #configuration for cmd_mux auto connection 32 | cmd_output: 33 | message_type: romea_mobile_base_msgs/TwoAxleSteeringCommand 34 | priority: 10 35 | rate: 10. 36 | 37 | autostart: true 38 | debug: true 39 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/devices.yaml: -------------------------------------------------------------------------------- 1 | # This file allows to enable / disable sensors of the robot. It is also possible to add new sensor 2 | # by adding lines in this file and creating the corresponding file in the "devices" directory. 3 | # Possible values for 'available_mode': [none, simulation, live, all] 4 | 5 | remote_controller: 6 | type: joystick 7 | available_mode: all 8 | # keyboard: 9 | # type: joystick 10 | # available_mode: all 11 | 12 | lms151: 13 | type: lidar 14 | available_mode: none 15 | ouster: 16 | type: lidar 17 | available_mode: all 18 | septentrio: 19 | type: gps 20 | available_mode: all 21 | xsens: 22 | type: imu 23 | available_mode: all 24 | realsense: 25 | type: rgbd_camera 26 | available_mode: all 27 | 28 | cultivator: 29 | type: implement 30 | available_mode: all 31 | 32 | # add a camera to look at the robot using a third-person view 33 | robot_view: 34 | type: camera 35 | available_mode: all 36 | -------------------------------------------------------------------------------- /scripts/update: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | REPOS_FILE='repositories' 3 | 4 | set -e 5 | cd "$(dirname -- "${BASH_SOURCE[0]}")/.." 6 | 7 | # load variables in .env to set correct value of REPOS_FILE 8 | if [[ -r '.env' ]] ; then 9 | eval $(grep -v '^[UG]ID=' .env) 10 | fi 11 | 12 | 13 | # only add non-existing variables in '.env' file 14 | add_variable_to_env_file() { 15 | if ! grep -q "$1=" .env 2>/dev/null ; then 16 | echo " - Add $1=$2 to .env" 17 | echo "$1=$2" >>.env 18 | fi 19 | } 20 | 21 | initialize_env_file() { 22 | echo 'Initialize ".env" file' 23 | add_variable_to_env_file UID "$(id -u)" 24 | add_variable_to_env_file GID "$(id -g)" 25 | add_variable_to_env_file USER "$(whoami)" 26 | add_variable_to_env_file WORKSPACE "$(pwd)" 27 | } 28 | 29 | import_packages() { 30 | echo 'Import packages' 31 | vcs import --recursive -w6 <"docker/$REPOS_FILE" 32 | } 33 | 34 | 35 | initialize_env_file 36 | import_packages 37 | ./scripts/update_models 38 | echo 'Installation completed successfully' 39 | -------------------------------------------------------------------------------- /scripts/bind_docker_humble: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Reverse bind of /opt/ros/humble from the docker to the host (for IDE auto-completion) 3 | if [[ "${USER}" != 'root' ]] ; then 4 | echo >&2 'This command must be run as root' 5 | exit 1 6 | fi 7 | 8 | docker_image='tirrex:latest' 9 | ros_path='/opt/ros/humble' 10 | 11 | cd "$(dirname -- "${BASH_SOURCE[0]}")" 12 | 13 | echo "Start docker container '${docker_image}'" 14 | docker run -d --rm --entrypoint tail "${docker_image}" -f /dev/null 15 | sleep 1 16 | 17 | container_ID="$(docker ps | grep "${docker_image}" | head -1 | cut -d' ' -f1)" 18 | root_path="$(docker inspect --format='{{.GraphDriver.Data.MergedDir}}' "${container_ID}")" 19 | 20 | at_exit() { 21 | echo 'Interrupted. Stopping the container...' 22 | umount "${ros_path}" 23 | docker kill "${container_ID}" 24 | rmdir "${ros_path}" 25 | } 26 | 27 | echo "Bind '${ros_path}' from the container to the host" 28 | mkdir -p "${ros_path}" 29 | mount --bind "${root_path}${ros_path}" "${ros_path}" 30 | 31 | echo 'You can now open your IDE' 32 | trap at_exit INT QUIT TERM 33 | tail -f /dev/null 34 | -------------------------------------------------------------------------------- /demos/hackathon/config/robot/localisation.yaml: -------------------------------------------------------------------------------- 1 | core: 2 | pkg: romea_robot_to_world_localisation_core 3 | launch: robot_to_world_localisation.launch.py 4 | # configuration: 5 | # filter_type: kalman 6 | # state_pool_size: 1000 7 | # number_of_particle: 200 8 | # maximal_dead_recknoning_travelled_distance: 2. 9 | # maximal_dead_recknoning_elapsed_time: 10. 10 | # publish_rate: 10 11 | # debug: true 12 | plugins: 13 | odo: 14 | pkg: romea_localisation_odo_plugin 15 | launch: odo_plugin.launch.py 16 | # configuration: 17 | # controller_topic: kinematic 18 | # restamping: false 19 | imu: 20 | pkg: romea_localisation_imu_plugin 21 | launch: imu_plugin.launch.py 22 | input_sensor_meta_description_filename: xsens 23 | # configuration: 24 | # restamping: false 25 | gps: 26 | pkg: romea_localisation_gps_plugin 27 | launch: gps_plugin.launch.py 28 | input_sensor_meta_description_filename: septentrio 29 | # configuration: 30 | # restamping: false 31 | # minimal_fix_quality: 4 32 | # minimal_speed_over_ground: 0.5 33 | -------------------------------------------------------------------------------- /scripts/update_models: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | MODELS_ARCHIVE='hackathon2025.tar.xz' 3 | 4 | set -e 5 | cd "$(dirname -- "${BASH_SOURCE[0]}")/.." 6 | 7 | import_gazebo_models() { 8 | echo 'Import gazebo models' 9 | mkdir -p gazebo 10 | 11 | nextcloud_download='https://nextcloud.inrae.fr/s/4PRo8ebgiLyf788/download' 12 | archive_hash="${MODELS_ARCHIVE}.sha1" 13 | remote_hash="/tmp/${archive_hash}" 14 | local_hash="gazebo/${archive_hash}" 15 | archive="/tmp/${MODELS_ARCHIVE}" 16 | 17 | echo " download hash (${archive_hash})" 18 | wget -q -O "${remote_hash}" "${nextcloud_download}?path=%2F&files=${archive_hash}" 19 | 20 | # download archive only if the remote hash differs from the local one 21 | if [[ ! -r "${local_hash}" ]] || ! cmp -s "${remote_hash}" "${local_hash}" ; then 22 | wget -O "${archive}" "${nextcloud_download}?path=%2F&files=${MODELS_ARCHIVE}" 23 | if tar -C gazebo -xvf "${archive}" ; then 24 | mv -f "${remote_hash}" "${local_hash}" 25 | else 26 | rm "${remote_hash}" 27 | fi 28 | rm "${archive}" 29 | else 30 | echo " models are up-to-date" 31 | rm "${remote_hash}" 32 | fi 33 | } 34 | 35 | import_gazebo_models 36 | -------------------------------------------------------------------------------- /compose.yaml: -------------------------------------------------------------------------------- 1 | x-extends: 2 | x11: &x11 # Create 'x11' aliases that can be referenced using '*x11' 3 | extends: 4 | file: docker/common.yaml 5 | service: x11_base 6 | profiles: [optional] 7 | 8 | base: &base 9 | extends: 10 | file: docker/common.yaml 11 | service: base 12 | profiles: [optional] 13 | 14 | 15 | services: 16 | compile: 17 | <<: *base 18 | environment: # remove annoying warning of setuptools 19 | - PYTHONWARNINGS=ignore:::setuptools.command.easy_install 20 | command: 'colcon --log-base /dev/null build --symlink-install --event-handlers event_log- log-' 21 | 22 | compile_with_log: 23 | <<: *base 24 | environment: # remove annoying warning of setuptools 25 | - PYTHONWARNINGS=ignore:::setuptools.command.easy_install 26 | command: 'colcon build --symlink-install' 27 | 28 | bash: 29 | <<: *x11 30 | command: 'bash --rcfile /etc/bash.bashrc' 31 | 32 | adap2e_test: 33 | <<: *x11 34 | command: 'ros2 launch adap2e_bringup adap2e_test.launch.py' 35 | 36 | alpo_test: 37 | <<: *x11 38 | command: 'ros2 launch alpo_bringup alpo_test.launch.py' 39 | 40 | ceol_test: 41 | <<: *x11 42 | command: 'ros2 launch ceol_bringup ceol_test.launch.py' 43 | 44 | robufast_test: 45 | <<: *x11 46 | command: 'ros2 launch robucar_bringup robucar_test.launch.py' 47 | 48 | path_following: 49 | <<: *x11 50 | command: >- 51 | ros2 launch tirrex_path_following path_following.launch.py 52 | robot:=adap2e path:=field.txt 53 | -------------------------------------------------------------------------------- /docker/common.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | base: 3 | build: # use tirrex_workspace image but add the host user in it 4 | dockerfile_inline: | 5 | FROM ghcr.io/tirrex-roboterrium/tirrex_workspace:hackathon 6 | RUN groupadd -g ${GID-} $USER && \ 7 | useradd -u ${UID-} -g ${GID-} -s /bin/bash -d $HOME -m -G dialout $USER 8 | image: hackathon2025_ws:$USER # add username to avoid image collision on multi-user systems 9 | volumes: 10 | - $HOME/.ros:$HOME/.ros:Z # for logs and records 11 | - $WORKSPACE:$WORKSPACE:Z 12 | - $HOME/.config/ros.org:$HOME/.config/ros.org:Z # for rqt tools 13 | env_file: 14 | - ../.env 15 | environment: 16 | - ROS_LOCALHOST_ONLY=1 # avoid sending multicast on the local network of your office 17 | network_mode: host 18 | pid: host # fix a multicast problem, PID are used by DDS to generate unique GUID 19 | ipc: host # used by ROS2 and X11 (for shared memory) 20 | privileged: false 21 | stop_signal: SIGINT # Correctly forward SIGINT on services started with 'compose up' 22 | 23 | x11_base: 24 | extends: 25 | service: base 26 | volumes: 27 | - /tmp/.X11-unix:/tmp/.X11-unix:Z 28 | - $XDG_RUNTIME_DIR:$XDG_RUNTIME_DIR:Z 29 | environment: 30 | - DISPLAY 31 | - XDG_RUNTIME_DIR 32 | - GAZEBO_RESOURCE_PATH 33 | - GAZEBO_MODEL_PATH 34 | devices: 35 | - /dev/input/ # for joypads 36 | - /dev/dri/ # for graphic card 37 | 38 | x11_gpu: 39 | extends: 40 | service: base 41 | volumes: 42 | - /tmp/.X11-unix:/tmp/.X11-unix:Z 43 | - $XDG_RUNTIME_DIR:$XDG_RUNTIME_DIR:Z 44 | environment: # these variables allows to use the GPU when the nvidia profile is 'on-demand' 45 | - __NV_PRIME_RENDER_OFFLOAD=1 46 | - __GLX_VENDOR_LIBRARY_NAME=nvidia 47 | - NVIDIA_DRIVER_CAPABILITIES=all 48 | - DISPLAY 49 | - XDG_RUNTIME_DIR 50 | - GAZEBO_RESOURCE_PATH 51 | - GAZEBO_MODEL_PATH 52 | devices: 53 | - /dev/input/ # for joypads 54 | - /dev/dri/ # for graphic card 55 | deploy: 56 | resources: 57 | reservations: 58 | devices: 59 | - driver: nvidia 60 | count: 1 61 | capabilities: [gpu] 62 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG FROM_IMAGE=osrf/ros:humble-desktop 2 | ARG WORKSPACE=/opt/tirrex_ws 3 | ARG REPOS_FILE=./repositories 4 | 5 | # multi-stage for caching 6 | FROM $FROM_IMAGE AS cacher 7 | 8 | ARG WORKSPACE 9 | ARG REPOS_FILE 10 | WORKDIR $WORKSPACE 11 | COPY $REPOS_FILE repos 12 | RUN --mount=type=ssh \ 13 | mkdir -pm 700 ~/.ssh && \ 14 | ssh-keyscan github.com >>~/.ssh/known_hosts && \ 15 | ssh-keyscan gitlab-ssh.irstea.fr >>~/.ssh/known_hosts && \ 16 | ssh-keyscan forgemia.inra.fr >>~/.ssh/known_hosts && \ 17 | vcs import --recursive -w6 /dev/null || true 24 | 25 | 26 | # multi-stage for installing dependencies 27 | FROM $FROM_IMAGE AS installer 28 | 29 | ENV DEBIAN_FRONTEND=noninteractive 30 | RUN --mount=type=cache,target=/var/lib/apt/lists \ 31 | apt-get update && \ 32 | apt-get install -y --no-install-recommends \ 33 | # you can add some ubuntu packages here \ 34 | python3-pip \ 35 | nlohmann-json3-dev \ 36 | libgsl-dev && \ 37 | pip3 install --no-cache-dir \ 38 | # you can add some pip packages here \ 39 | numpy \ 40 | matplotlib \ 41 | geojson \ 42 | pyserial \ 43 | transforms3d \ 44 | pymap3d && \ 45 | rm -rf /var/lib/apt/lists/* 46 | 47 | ARG WORKSPACE 48 | WORKDIR $WORKSPACE 49 | 50 | # install package dependencies 51 | COPY --from=cacher /tmp/$WORKSPACE/src ./src 52 | RUN --mount=type=cache,target=/var/lib/apt/lists \ 53 | apt-get update && \ 54 | rosdep install -iyr --from-paths src && \ 55 | rm -rf /var/lib/apt/lists/* 56 | 57 | # fix a bug when rendering shadows in gazebo 58 | RUN sed -i '7i \ \ depth += 0.001; // added to fix a bug' \ 59 | /usr/share/gazebo-11/media/materials/programs/shadow_caster_fp.glsl 60 | 61 | # multi-stage for developing 62 | FROM installer as dever 63 | 64 | ARG WORKSPACE 65 | WORKDIR $WORKSPACE 66 | ENV SHELL=/bin/bash 67 | # Remove the cloned projects to use a volume instead 68 | RUN rm -rf "$WORKSPACE" 69 | 70 | COPY --chmod=755 ros_entrypoint.sh ros_setup.sh / 71 | 72 | RUN echo 'source <(register-python-argcomplete3 ros2)' >>/etc/bash.bashrc 73 | -------------------------------------------------------------------------------- /doc/challenge_configuration.md: -------------------------------------------------------------------------------- 1 | # Hackathon configuration 2 | 3 | In the hackathon configuration directory, you will find two subdirectories (`robot` and `path`) and three main files: `records.yaml`, `simulation.yaml`, and `wgs84_anchor.yaml`. The `robot` directory contains the [configuration details](robot_configuration.md) for the robot, including specifications for the mobile base, onboard sensors, tools, as well as localization and path-following algorithms provided by INRAE. The `path` directory holds predefined trajectories for the path following algorithm. 4 | 5 | #### Simulation configuration: 6 | 7 | For the hackathon, the **Gazebo** simulator was selected for the qualification phase. The configuration file `simulation.yaml` allows you to specify the virtual world where the competition takes place. In this case (see below), the selected world is located in the `world` directory of the ROS 2 package `hackathon_bringup` and is named `demo.world`: 8 | 9 | ```yaml 10 | world_package: hackathon_bringup 11 | world_name: demo.world 12 | ``` 13 | 14 | ![gazebo](media/gazebo.jpg) 15 | 16 | #### WGS84 anchor configuration: 17 | 18 | The `wgs84_anchor.yaml` file (see below) defines the geodetic reference coordinates for the challenge. This reference point serves two main purposes: 1) it enables the simulator to generate NMEA frames from the GPS plugin (see the `romea_gps_plugin` ROS2 package), and 2) it allows the localization algorithm to transform GPS data within an ENU frame. 19 | 20 | ```yaml 21 | latitude: 46.339159 22 | longitude: 3.433923 23 | altitude: 278.142 24 | ``` 25 | 26 | #### Records configuration 27 | 28 | The `records.yaml` file configures the recording options. It allows you to specify the directory where the data will be saved (e.g., `/tmp/records/hackathon`) and its contents (see below). In addition to the ROS 2 bag, it is possible to record debug information, logs, configurations, and code versions with their differences. 29 | 30 | ```yaml 31 | directory: /tmp/records 32 | config: true 33 | debug: true 34 | log: true 35 | vcs: true 36 | ``` 37 | 38 | For each recording, a new directory is created with a name based on the date and time when the demo is launched. To replay the recorded data, you can use the following command: 39 | 40 | ```bash 41 | docker compose run --rm bash ros2 launch tirrex_demo replay.launch.py replay_directory:=/tmp/records/hackathon/24-12-12-15-51 use_recorded_config:=true 42 | ``` 43 | 44 | ![records](media/records.jpg) 45 | -------------------------------------------------------------------------------- /demos/hackathon/compose.yaml: -------------------------------------------------------------------------------- 1 | x-yaml-anchors: # create anchors "&something" that can be referenced using "*something" 2 | base: &base 3 | extends: 4 | file: ../../docker/common.yaml 5 | service: x11_base 6 | # service: x11_gpu # use this service if you want to use nvidia GPU 7 | volumes: 8 | - ./config:/config:Z 9 | # - /tmp:/tmp # uncomment to read generated files in /tmp 10 | 11 | 12 | services: 13 | bash: # this service can be used to open a shell in the ROS2 environment 14 | <<: [*base] # add elements of 'x-yaml-anchors.base' 15 | profiles: [optional] # avoid starting this service when 'compose up' 16 | command: bash --rcfile /etc/bash.bashrc 17 | 18 | simulator: # start the simulator and spawn the robot 19 | <<: [*base] 20 | command: >- 21 | ros2 launch hackathon_bringup simulator.launch.py 22 | demo_config_directory:=/config 23 | robot_config_directory:=/config/robot 24 | mode:=simulation_gazebo_classic 25 | robot_namespace:=robot 26 | 27 | localisation: # compute absolute localization using GNSS, IMU and odometry measurements 28 | <<: [*base] 29 | depends_on: [simulator] 30 | command: >- 31 | ros2 launch tirrex_demo robot_localisation.launch.py 32 | demo_config_directory:=/config 33 | robot_config_directory:=/config/robot 34 | mode:=simulation_gazebo_classic 35 | robot_namespace:=robot 36 | 37 | path_following: # control the robot to follow the specified trajectory file 38 | <<: [*base] 39 | depends_on: [localisation] 40 | command: >- 41 | ros2 launch tirrex_demo robot_path_following.launch.py 42 | demo_config_directory:=/config 43 | robot_config_directory:=/config/robot 44 | mode:=simulation_gazebo_classic 45 | robot_namespace:=robot 46 | trajectory_filename:=test01.traj 47 | 48 | path_recorder: # record a trajectory file from the current localization 49 | <<: [*base] 50 | depends_on: [localisation] 51 | profiles: [optional] 52 | command: >- 53 | ros2 launch tirrex_demo robot_path_recorder.launch.py 54 | demo_config_directory:=/config 55 | robot_config_directory:=/config/robot 56 | mode:=simulation_gazebo_classic 57 | robot_namespace:=robot 58 | trajectory_filename:=/config/paths/recorded.traj 59 | 60 | robot_view: # open a third-person view of the robot 61 | <<: [*base] 62 | stop_signal: SIGTERM 63 | depends_on: [simulator] 64 | profiles: [optional] 65 | command: ros2 run rqt_image_view rqt_image_view /robot/robot_view/image_raw 66 | 67 | evaluation: # start evaluation nodes and publish information for rviz 68 | <<: [*base] 69 | depends_on: [simulator] 70 | command: >- 71 | ros2 launch hackathon_bringup evaluation.launch.py 72 | demo_config_directory:=/config 73 | robot_namespace:=robot 74 | 75 | rviz: # start evaluation nodes and publish information for rviz 76 | <<: [*base] 77 | depends_on: [simulator] 78 | command: rviz2 -d /config/evaluation.rviz 79 | -------------------------------------------------------------------------------- /doc/plots_surveying.md: -------------------------------------------------------------------------------- 1 | In order to generate a path for the robot, the survey of the agricultural plots is available in 2 | different file formats in the `data` directory of the `hackathon_bringup` package: 3 | 4 | * `field_surveying.json` 5 | * `field_surveying.geojson` 6 | * `field_surveying.csv` 7 | 8 | One of this file should be automatically loaded in your program to compute the path. 9 | They contain lists of points which, if taken in pairs, correspond to the rows of crops to be 10 | treated with the robot implement. 11 | However, the line of crops are not straight, so you cannot use it to generate the exacy trajectory 12 | to follow. 13 | 14 | 15 | ## Description of the JSON file 16 | 17 | Here is an example of a survey file: 18 | ```json 19 | { 20 | "origin": [ 46.339159, 3.433923 ], 21 | "fields": { 22 | "mixed_field": [ 23 | [ 46.340248, 3.435536, 124.242, 121.087 ], 24 | [ 46.340156, 3.435170, 96.017, 110.921 ], 25 | [ 46.340131, 3.435183, 97.033, 108.099 ], 26 | [ 46.340222, 3.435549, 125.258, 118.265 ], 27 | [ 46.340197, 3.435563, 126.274, 115.442 ], 28 | [ 46.340106, 3.435196, 98.050, 105.277 ] 29 | ], 30 | "sloping_field": [ 31 | [ 46.340913, 3.433783, -10.755, 195.028 ], 32 | [ 46.341092, 3.434376, 34.940, 214.967 ] 33 | ] 34 | } 35 | } 36 | ``` 37 | 38 | Description of the fields of this file: 39 | * __`origin`__: the `[latitude, longitude]` (in WGS84) of the (0,0) point of the world in the local 40 | East-North-Up (ENU) coordinates system used by the gazebo simulator and the localization node (its 41 | frame ID is `map`) 42 | * __`fields`__: the list of fields that the robot have to cover. Each field is an object where the 43 | key is name of the field and the value is a list of points. Each point is 4D element corresponding 44 | to `[latitude, longitude, x, y]` (in degrees and meters). If taken in pairs, the points correspond 45 | to the crops row to treat. 46 | 47 | 48 | ## Description of the GeoJSON file 49 | 50 | The GeoJSON file contains a set of feature of type _MultiLineString_. 51 | Each feature corresponds to a field and each segment of the _MultiLineString_ corresponds to a row 52 | of crops to follow. 53 | The feature also contains properties to store the name of the field and the coordinates of the 54 | points in the (x,y) coordinates of the world. 55 | Here is an example of feature: 56 | ```json 57 | { 58 | "type": "Feature", 59 | "geometry": { 60 | "type": "MultiLineString", 61 | "coordinates": [ 62 | [ [ 3.433783, 46.340 ], [ 3.434376, 46.341 ] ] 63 | ] 64 | }, 65 | "properties": { 66 | "field_name": "sloping_field", 67 | "xy": [ 68 | [ [ -10.755, 195.028 ], [ 34.940, 214.967 ] ] 69 | ] 70 | } 71 | } 72 | ``` 73 | 74 | ## Description of the CSV file 75 | 76 | The CSV file contains the points of the row of crops to follow. 77 | A point corresponds to the following columns: 78 | * __`field_name`__: the name of the field 79 | * __`latitude`__: latitude coordinate (WGS84) 80 | * __`longitude`__: longitude coordinate (WGS84) 81 | * __`x`__: _x_ coordinate in the simulator (frame ID: `map`) 82 | * __`y`__: _z_ coordinate in the simulator (frame ID: `map`) 83 | -------------------------------------------------------------------------------- /demos/hackathon/README.md: -------------------------------------------------------------------------------- 1 | This directory corresponds to the entry point of the simulation. 2 | It contains a `compose.yaml` file that contains all the services to start: 3 | * `simulator`: start the simulator and spawn the robot 4 | * `localisation`: compute absolute localization using GNSS, IMU and odometry measurements 5 | * `path_following`: control the robot to follow the specified trajectory file 6 | * `evaluation`: start evaluation nodes and publish information for rviz 7 | * `rviz`: start evaluation nodes and publish information for rviz 8 | 9 | 10 | ## Running 11 | 12 | To start the demo, just execute the following command (from this directory): 13 | ``` 14 | docker compose up 15 | ``` 16 | 17 | This command will start all the docker services defined in the `compose.yaml` file. 18 | The `bash` service is however not started automatically because it is only used to open a shell in 19 | the docker. 20 | 21 | 22 | ### Keeping the simulator open 23 | 24 | You can start the services individually by specifying their name after the `compose up` arguments. 25 | It can be useful to keep the simulator open while restarting the robot programs. 26 | You can start only the simulator by executing: 27 | ``` 28 | docker compose up -d simulator 29 | ``` 30 | The `-d` option starts it in the background. 31 | After that, you can start everything else using the command 32 | ``` 33 | docker compose up 34 | ``` 35 | This command can be interrupted by typing _Ctrl+C_ and can be executed again while keeping the 36 | simulator open. 37 | When you no longer need the simulator, you can stop it with 38 | ``` 39 | docker compose stop simulator 40 | ``` 41 | 42 | ### Opening a shell in the docker 43 | 44 | If you want to open execute some ROS command, you can open a shell in the ROS2 environment using 45 | ``` 46 | docker compose run bash 47 | ``` 48 | 49 | 50 | ### Re-starting already created containers 51 | 52 | The command `up` allows creating the containers and starting them. 53 | After the containers are created, you can use `start` command to run one or several services in 54 | detached mode and `stop` command to stop them. 55 | At the end, if you want to remove the containers, you can use the `down` command. 56 | 57 | 58 | ### Services with `profiles: [optional]` attribute 59 | 60 | The services that specify a `profiles` attribute are not enabled by default. 61 | Usually, this parameter is used to start certain services by specifying the profile name on the 62 | command line with the `-p optional` option. 63 | In our case, this is used to disable automatic startup of these services. 64 | If you want to start them, you have to specify their name in the command line. 65 | For example, to start the view of the robot, you can have to execute 66 | ``` 67 | docker compose up -d robot_view 68 | ``` 69 | This will also start the simulator (because of the `depends_on` attribute) if it is not already 70 | started. 71 | 72 | 73 | ## Documentation 74 | 75 | * [Scenario of the challenge](/doc/challenge.md) 76 | * [Configuration of the simulation](/doc/challenge_configuration.md) 77 | * [Configuration of the robot](/doc/robot_configuration.md) 78 | * [Configuration of the devices](/doc/devices_configuration.md) 79 | * [Description of the robot control node](/doc/robot_control.md) 80 | * [Description of the provided data files](/doc/plots_surveying.md) 81 | -------------------------------------------------------------------------------- /doc/robot_control.md: -------------------------------------------------------------------------------- 1 | ## Robot control 2 | 3 | #### Base control: 4 | 5 | The mobile base control system operates via a ROS2 controller node called `/robot/base/mobile_base_controller`. To command the robot's movement, simply publish messages into the `/robot/controller/cmd_two_axle_steering` topic using the `romea_mobile_base_msgs/msg/TwoAxleSteeringCommand` message type. This message includes the following parameters: 6 | 7 | ``` 8 | float64 longitudinal_speed # linear speed of robot 9 | float64 front_steering_angle # virtual front steering angle 10 | float64 rear_steering_angle # virtual rear steering angle 11 | ``` 12 | 13 | When multiple algorithms send control commands, a command multiplexer is required to establish priority between them. In our provided example, both the teleoperation node (`/robot/base/teleop`) and the path-following (`/robot/path_following`) node send commands to the controller through the multiplexer node ()`/robot/base/cmd_mux`). As shown in the [configuration file](robot_configuration) of these nodes, the teleoperation node has a priority of 100, while the path-following node has a priority of 10. This means that teleoperation commands will take precedence over path-following commands. 14 | 15 | ![base_control](media/base_control_graph.jpg) 16 | 17 | The multiplexer operates by subscribing and unsubscribing topics through the services `/robot/base/cmd_mux/subscribe` and `/robot/base/cmd_mux/unsubscribe`. For instance, if you have an additional command node called `robot/foo` that publishes on `/robot/foo/cmd_two_axle_steering` topic, you can make the multiplexer listen to this new command by calling the `subscribe` service as shown below: 18 | 19 | ```bash 20 | docker compose run --rm bash 21 | ros2 service call /robot/base/cmd_mux/unsubscribe romea_cmd_mux_msgs/msg/unsubscribe 22 | ros2 service call /robot/base/cmd_mux/subscribe romea_cmd_mux_msgs/srv/Subscribe "{topic: /foo/cmd_two_axle_sterring, priority: 50, timeout: 0.1}" 23 | ros2 node info /robot/base/cmd_mux 24 | exit 25 | ``` 26 | 27 | In this configuration, the multiplexer will now listen to `/foo/cmd_two_axle_steering` (as seen below), with a priority higher than the path-following commands but lower than the teleoperation commands. 28 | 29 | ```bash 30 | /robot/base/cmd_mux 31 | Subscribers: 32 | /clock: rosgraph_msgs/msg/Clock 33 | /foo/cmd_two_axle_sterring: romea_mobile_base_msgs/msg/TwoAxleSteeringCommand 34 | /parameter_events: rcl_interfaces/msg/ParameterEvent 35 | /robot/base/teleop/cmd_two_axle_steering: romea_mobile_base_msgs/msg/TwoAxleSteeringCommand 36 | /robot/path_following/cmd_two_axle_steering: romea_mobile_base_msgs/msg/TwoAxleSteeringCommand 37 | Publishers: 38 | /diagnostics: diagnostic_msgs/msg/DiagnosticArray 39 | /parameter_events: rcl_interfaces/msg/ParameterEvent 40 | /robot/base/controller/cmd_two_axle_steering: romea_mobile_base_msgs/msg/TwoAxleSteeringCommand 41 | /rosout: rcl_interfaces/msg/Log 42 | Service Servers: 43 | /robot/base/cmd_mux/describe_parameters: rcl_interfaces/srv/DescribeParameters 44 | /robot/base/cmd_mux/get_parameter_types: rcl_interfaces/srv/GetParameterTypes 45 | /robot/base/cmd_mux/get_parameters: rcl_interfaces/srv/GetParameters 46 | /robot/base/cmd_mux/list_parameters: rcl_interfaces/srv/ListParameters 47 | /robot/base/cmd_mux/set_parameters: rcl_interfaces/srv/SetParameters 48 | /robot/base/cmd_mux/set_parameters_atomically: rcl_interfaces/srv/SetParametersAtomically 49 | /robot/base/cmd_mux/subscribe: romea_cmd_mux_msgs/srv/Subscribe 50 | /robot/base/cmd_mux/unsubscribe: romea_cmd_mux_msgs/srv/Unsubscribe 51 | Service Clients: 52 | 53 | Action Servers: 54 | 55 | Action Clients: 56 | ``` 57 | 58 | 59 | 60 | #### Implement control: 61 | 62 | -------------------------------------------------------------------------------- /doc/challenge.md: -------------------------------------------------------------------------------- 1 | # Scenario of the challenge 2 | 3 | The aim of this challenge is to develop a robot control program to perform an agricultural task 4 | autonomously. 5 | It is composed of the following steps: 6 | * follow a path provided by the organization team, 7 | * perform an agricultural task (weeding) in a crops field, 8 | * avoid static and mobile obstacles (objects, humans, vehicles, ditches), 9 | * indoor robot navigation (degraded GNSS signal inside buildings), 10 | * robot control and stability on sloping terrain. 11 | * Implement management 12 | 13 | During the preliminary phase, you will use a simulation environment based on _Gazebo Classic_ 14 | simulator. 15 | This environment corresponds to a farm containing a crop field, a robot you control and various 16 | elements. 17 | The robot is equipped with a hoeing implement to treat the plot and a set of sensors to perceive the 18 | environment and localize the robot. 19 | All the elements are interfaced using ROS2 topics, services and actions. 20 | The description of the ROS interface [is available here](robot_control.md). 21 | There is also a [documentation of the robot configuration](robot_configuration.md). 22 | 23 | The final phase of the competition, to be held at FIRA 2025, will replace the Gazebo simulator with 24 | 4DV simulator. 25 | The latter will run on a dedicated machine and provide more realistic sensor information. 26 | However, the robot interface will stay the same. 27 | 28 | You don't have to develop a solution capable of solving every challenge. 29 | The difficulty level will increase progressively. 30 | 31 | 32 | ## Details of the different steps 33 | 34 | ### Follow a path 35 | 36 | We provide a reference path that the robot have to follow throughout the challenge. 37 | This path passes through the farm plot, a building and a sloping area. 38 | You don't need to follow this path precisely, but you do need to stay within an interval around it. 39 | It is therefore possible to calculate more optimized trajectory to save time and improve your score. 40 | However, if the trajectory leaves the interval, this will result in a penalty on the score. 41 | 42 | The path does not follow the crop fields. 43 | As a result, following perfectly the absolute path will lead to crushing plants of interest. 44 | You can generate a more precise path using the survey file provided. 45 | For more information about this file, please refer to [this documentation](plots_surveying.md). 46 | 47 | We also provide some ROS nodes: 48 | * `robot_to_world_localisation` to fuse odometry, GNSS and IMU using a Kalman filter 49 | * `path_matching` to compute the lateral and angular deviation to the path 50 | * `path_following` to compute the speed and steering angles to send to the robot 51 | 52 | You can modify these programs or use your own solutions. 53 | If you rewrite `path_matching`, you will need to load the provided path file yourself. 54 | This file is written using the [TIARA trajectory 55 | format](https://github.com/Romea/romea-ros-path-tools/blob/main/tiara_format.md). 56 | Some tools are provided in the [`romea_path_tools`](https://github.com/Romea/romea-ros-path-tools) 57 | package to visualize, convert or generate paths in this format using its python library. 58 | 59 | 60 | ### Weeding the agricultural plot 61 | 62 | The robot's path crosses a field containing various plants (beans, corn and wheat). 63 | You need to use the weeder to treat the entire plot. 64 | However, you will need to have a good perception of the crop rows to avoid driving over them because 65 | the lines are not straight. 66 | There is also an obstacle in the ground which requires to raise the implement of the robot. 67 | Collisions with crops and implement will result in penalties to the score. 68 | 69 | The localization of the plots is known before starting the simulation by reading [the plots 70 | surveying file](plots_surveying.md). 71 | It allows to know when the implement should be raised or lowered. 72 | It is mandatory to follow the rows in the order specified in the plots surveying file. 73 | 74 | 75 | ### Avoiding obstacles 76 | 77 | Obstacles are placed all along the trajectory (except in the field and the sloping area). 78 | Some are static and prevent the robot from following its path. 79 | Some are mobile and move cross the robot path. 80 | There are also moving obstacles that can be represented by humans or other robots. 81 | Thus, you need to use the exteroceptive sensors of the robot and compute an avoidance path in real 82 | time. 83 | The sensors are configurable ([description of the configuration](devices_configuration.md)). 84 | Collisions with obstacles or any element of the environment will result in penalties to the score. 85 | 86 | 87 | ### Indoor localization 88 | 89 | A part of the robot's trajectory passes inside a building. 90 | To best represent reality, we simulate a loss of the GNSS signal when the robot enters the building 91 | by causing a random offset in the estimated position. 92 | For the final phase, the 4DV simulator performs a more realistic GNSS simulation by simulating the 93 | signal bouncing off the walls of the buildings. 94 | You will therefore need to take this degradation into account and try to maintain good localization 95 | by relying on the robot's other sensors. 96 | 97 | If you have not implemented this part of the challenge, it is possible to disable it (i.e. use the `main` branch of romea_gps instead of `fira_hackathon2025` in https://github.com/FiraHackathon/hackathon2025_ws/blob/main/docker/repositories#L68), but it will 98 | induce penalties on the score. 99 | 100 | 101 | ### Robot control on sloping terrain 102 | 103 | Another part of the trajectory passes through a sloped area. 104 | The goal of this part of the challenge is to control the robot so that it stays as close as possible 105 | to the reference trajectory, despite the slippery terrain. 106 | The score will be calculated based on the measurement of the lateral deviation, as measured by the 107 | `path_matching` node. 108 | 109 | 110 | ## Score calculation 111 | 112 | The score calculation is based on: 113 | * the total time to reach the end of the path 114 | * the surface covered by the weeder in the field 115 | 116 | There is also different source of penalties: 117 | * crushing crops 118 | * colliding with obstacles 119 | * keeping implement down outside the crop fields 120 | * leaving the geofence 121 | -------------------------------------------------------------------------------- /docker/repositories: -------------------------------------------------------------------------------- 1 | repositories: 2 | # -------- core -------------------------- 3 | src/romea_core/romea_common: 4 | type: git 5 | url: https://github.com/Romea/romea-core-common.git 6 | version: main 7 | src/romea_core/romea_gps: 8 | type: git 9 | url: https://github.com/Romea/romea-core-gps.git 10 | version: main 11 | src/romea_core/romea_imu: 12 | type: git 13 | url: https://github.com/Romea/romea-core-imu.git 14 | version: main 15 | src/romea_core/romea_lidar: 16 | type: git 17 | url: https://github.com/Romea/romea-core-lidar.git 18 | version: main 19 | src/romea_core/romea_mobile_base: 20 | type: git 21 | url: https://github.com/Romea/romea-core-mobile-base.git 22 | version: main 23 | src/romea_core/romea_filtering: 24 | type: git 25 | url: https://github.com/Romea/romea-core-filtering.git 26 | version: main 27 | src/romea_core/romea_localisation: 28 | type: git 29 | url: https://github.com/Romea/romea-core-localisation.git 30 | version: main 31 | src/romea_core/romea_localisation_imu: 32 | type: git 33 | url: https://github.com/Romea/romea-core-localisation-imu.git 34 | version: main 35 | src/romea_core/romea_localisation_gps: 36 | type: git 37 | url: https://github.com/Romea/romea-core-localisation-gps.git 38 | version: main 39 | src/romea_core/romea_control: 40 | type: git 41 | url: https://github.com/Romea/romea-core-control.git 42 | version: main 43 | src/romea_core/romea_path: 44 | type: git 45 | url: https://github.com/Romea/romea-core-path.git 46 | version: main 47 | src/romea_core/romea_path_matching: 48 | type: git 49 | url: https://github.com/Romea/romea_core_path_matching.git 50 | version: main 51 | src/romea_core/romea_path_following: 52 | type: git 53 | url: https://github.com/Romea/romea-core-path-following.git 54 | version: main 55 | 56 | # -------- ros2 device interfaces -------------------------- 57 | src/romea_ros2/interfaces/implements/romea_implement: 58 | type: git 59 | url: https://github.com/Romea/romea-ros2-implement.git 60 | version: main 61 | src/romea_ros2/interfaces/manipulators/romea_arm: 62 | type: git 63 | url: https://github.com/Romea/romea-ros2-arm.git 64 | version: main 65 | src/romea_ros2/interfaces/sensors/romea_gps: 66 | type: git 67 | url: https://github.com/FiraHackathon/romea_gps.git 68 | version: fira_hackathon2025 69 | src/romea_ros2/interfaces/sensors/romea_imu: 70 | type: git 71 | url: https://github.com/Romea/romea-ros2-imu.git 72 | version: main 73 | src/romea_ros2/interfaces/sensors/romea_lidar: 74 | type: git 75 | url: https://github.com/Romea/romea-ros2-lidar.git 76 | version: main 77 | src/romea_ros2/interfaces/sensors/romea_camera: 78 | type: git 79 | url: https://github.com/Romea/romea-ros2-camera.git 80 | version: main 81 | src/romea_ros2/interfaces/sensors/romea_stereo_camera: 82 | type: git 83 | url: https://github.com/Romea/romea-ros2-stereo-camera.git 84 | version: main 85 | src/romea_ros2/interfaces/sensors/romea_rgbd_camera: 86 | type: git 87 | url: https://github.com/Romea/romea-ros2-rgbd-camera.git 88 | version: main 89 | src/romea_ros2/interfaces/teleoperation/romea_joy: 90 | type: git 91 | url: https://github.com/Romea/romea-ros2-joy.git 92 | version: main 93 | src/romea_ros2/interfaces/teleoperation/romea_teleop: 94 | type: git 95 | url: https://github.com/Romea/romea-ros2-teleop.git 96 | version: main 97 | src/romea_ros2/interfaces/teleoperation/pynput_teleop: 98 | type: git 99 | url: https://forgemia.inra.fr/tscf/ros2/pynput_teleop.git 100 | version: main 101 | 102 | # -------- ros2 vehicle interfaces -------------------------- 103 | src/romea_ros2/interfaces/vehicles/romea_mobile_base: 104 | type: git 105 | url: https://github.com/Romea/romea-ros2-mobile-base.git 106 | version: main 107 | src/romea_ros2/interfaces/vehicles/adap2e: 108 | type: git 109 | url: https://github.com/Romea/adap2e.git 110 | version: main 111 | src/romea_ros2/interfaces/vehicles/alpo: 112 | type: git 113 | url: https://github.com/Romea/alpo.git 114 | version: main 115 | src/romea_ros2/interfaces/vehicles/ceol: 116 | type: git 117 | url: https://github.com/Romea/ceol.git 118 | version: main 119 | src/romea_ros2/interfaces/vehicles/robufast: 120 | type: git 121 | url: https://github.com/Romea/robucar.git 122 | version: main 123 | 124 | # -------- algorithms-------------------------- 125 | src/romea_ros2/algorithms/localisation/romea_localisation_bringup: 126 | type: git 127 | url: https://github.com/Romea/romea-ros2-localisation-bringup.git 128 | version: main 129 | src/romea_ros2/algorithms/localisation/romea_localisation_msgs: 130 | type: git 131 | url: https://github.com/Romea/romea-ros2-localisation-msgs.git 132 | version: main 133 | src/romea_ros2/algorithms/localisation/romea_localisation_utils: 134 | type: git 135 | url: https://github.com/Romea/romea-ros2-localisation-utils.git 136 | version: main 137 | src/romea_ros2/algorithms/localisation/romea_localisation_odo: 138 | type: git 139 | url: https://github.com/Romea/romea-ros2-localisation-odo-plugin.git 140 | version: main 141 | src/romea_ros2/algorithms/localisation/romea_localisation_gps: 142 | type: git 143 | url: https://github.com/Romea/romea-ros2-localisation-gps-plugin.git 144 | version: main 145 | src/romea_ros2/algorithms/localisation/romea_localisation_imu: 146 | type: git 147 | url: https://github.com/Romea/romea-ros2-localisation-imu-plugin.git 148 | version: main 149 | src/romea_ros2/algorithms/localisation/romea_robot_to_world_localisation: 150 | type: git 151 | url: https://github.com/Romea/romea-ros2-robot-to-world-localisation-core.git 152 | version: main 153 | src/romea_ros2/algorithms/control/path_following/romea_path: 154 | type: git 155 | url: https://github.com/Romea/romea-ros2-path.git 156 | version: main 157 | src/romea_ros2/algorithms/control/path_following/romea_path_matching: 158 | type: git 159 | url: https://github.com/Romea/romea-ros2-path-matching.git 160 | version: main 161 | src/romea_ros2/algorithms/control/path_following/romea_path_following: 162 | type: git 163 | url: https://github.com/Romea/romea-ros2-path-following.git 164 | version: main 165 | 166 | # -------- tirrex -------------------------- 167 | 168 | src/romea_ros2/demos/tirrex/core: 169 | type: git 170 | url: https://github.com/Tirrex-Roboterrium/tirrex_demo.git 171 | version: main 172 | src/romea_ros2/demos/tirrex/robots/adap2e: 173 | type: git 174 | url: https://github.com/Tirrex-Roboterrium/tirrex_adap2e.git 175 | version: main 176 | src/romea_ros2/demos/tirrex/robots/alpo: 177 | type: git 178 | url: https://github.com/Tirrex-Roboterrium/tirrex_alpo.git 179 | version: main 180 | src/romea_ros2/demos/tirrex/robots/ceol: 181 | type: git 182 | url: https://github.com/Tirrex-Roboterrium/tirrex_ceol.git 183 | version: main 184 | src/romea_ros2/demos/tirrex/robots/robufast: 185 | type: git 186 | url: https://github.com/Tirrex-Roboterrium/tirrex_robufast.git 187 | version: main 188 | src/romea_ros2/demos/tirrex/path_following: 189 | type: git 190 | url: https://github.com/Tirrex-Roboterrium/tirrex_path_following.git 191 | version: main 192 | 193 | # -------- ros2 tools -------------------------- 194 | src/romea_ros2/tools/romea_common: 195 | type: git 196 | url: https://github.com/Romea/romea-ros2-common.git 197 | version: main 198 | src/romea_ros2/tools/romea_cmd_mux: 199 | type: git 200 | url: https://github.com/Romea/romea-ros2-cmd_mux.git 201 | version: main 202 | src/romea_ros2/tools/romea_path_tools: 203 | type: git 204 | url: https://github.com/Romea/romea-ros-path-tools.git 205 | version: main 206 | src/romea_ros2/tools/romea_simulation: 207 | type: git 208 | url: https://github.com/Romea/romea-ros2-simulation.git 209 | version: main 210 | 211 | # -------- third party -------------------------- 212 | src/third_party/serial: 213 | type: git 214 | url: https://github.com/Romea-Third-Party/serial-ros2.git 215 | version: romea 216 | src/third_party/nmea_navsat_driver: 217 | type: git 218 | url: https://github.com/Romea-Third-Party/nmea_navsat_driver.git 219 | version: romea 220 | src/third_party/rviz_2d_overlay_plugins: 221 | type: git 222 | url: https://github.com/teamspatzenhirn/rviz_2d_overlay_plugins.git 223 | version: main 224 | 225 | # -------- hackathon -------------------------- 226 | src/romea_ros2/tools/gazebo_classic_tools: 227 | type: git 228 | url: https://github.com/FiraHackathon/gazebo_classic_tools.git 229 | version: main 230 | src/romea_ros2/demos/hackathon_bringup: 231 | type: git 232 | url: https://github.com/FiraHackathon/hackathon_bringup.git 233 | version: main 234 | src/romea_ros2/tools/hackathon_evaluation: 235 | type: git 236 | url: https://github.com/FiraHackathon/hackathon_evaluation.git 237 | version: main 238 | # vim: ft=yaml 239 | -------------------------------------------------------------------------------- /docker/repositories.private: -------------------------------------------------------------------------------- 1 | repositories: 2 | # -------- core -------------------------- 3 | src/romea_core/romea_common: 4 | type: git 5 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_common.git 6 | version: main 7 | src/romea_core/romea_gps: 8 | type: git 9 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_gps.git 10 | version: main 11 | src/romea_core/romea_imu: 12 | type: git 13 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_imu.git 14 | version: main 15 | src/romea_core/romea_lidar: 16 | type: git 17 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_lidar.git 18 | version: main 19 | src/romea_core/romea_mobile_base: 20 | type: git 21 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_mobile_base.git 22 | version: main 23 | src/romea_core/romea_filtering: 24 | type: git 25 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_filtering.git 26 | version: main 27 | src/romea_core/romea_localisation: 28 | type: git 29 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_localisation.git 30 | version: main 31 | src/romea_core/romea_localisation_imu: 32 | type: git 33 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_localisation_imu.git 34 | version: main 35 | src/romea_core/romea_localisation_gps: 36 | type: git 37 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_localisation_gps.git 38 | version: main 39 | src/romea_core/romea_control: 40 | type: git 41 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_control.git 42 | version: main 43 | src/romea_core/romea_path: 44 | type: git 45 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_path.git 46 | version: main 47 | src/romea_core/romea_path_matching: 48 | type: git 49 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_path_matching.git 50 | version: main 51 | src/romea_core/romea_path_following: 52 | type: git 53 | url: git@gitlab-ssh.irstea.fr:romea_core/romea_path_following.git 54 | version: main 55 | 56 | # -------- ros2 interfaces -------------------------- 57 | src/romea_ros2/interfaces/implements/romea_implement: 58 | type: git 59 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/implements/romea_implement.git 60 | version: main 61 | src/romea_ros2/interfaces/manipulators/romea_arm: 62 | type: git 63 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/manipulators/romea_arm.git 64 | version: main 65 | src/romea_ros2/interfaces/sensors/romea_gps: 66 | type: git 67 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/sensors/romea_gps.git 68 | version: main 69 | src/romea_ros2/interfaces/sensors/romea_imu: 70 | type: git 71 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/sensors/romea_imu.git 72 | version: main 73 | src/romea_ros2/interfaces/sensors/romea_lidar: 74 | type: git 75 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/sensors/romea_lidar.git 76 | version: main 77 | src/romea_ros2/interfaces/sensors/romea_camera: 78 | type: git 79 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/sensors/romea_camera.git 80 | version: main 81 | src/romea_ros2/interfaces/sensors/romea_stereo_camera: 82 | type: git 83 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/sensors/romea_stereo_camera.git 84 | version: main 85 | src/romea_ros2/interfaces/sensors/romea_rgbd_camera: 86 | type: git 87 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/sensors/romea_rgbd_camera.git 88 | version: main 89 | src/romea_ros2/interfaces/teleoperation/romea_joy: 90 | type: git 91 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/teleoperation/romea_joy.git 92 | version: main 93 | src/romea_ros2/interfaces/teleoperation/romea_teleop: 94 | type: git 95 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/teleoperation/romea_teleop.git 96 | version: main 97 | src/romea_ros2/interfaces/teleoperation/pynput_teleop: 98 | type: git 99 | url: git@forgemia.inra.fr:tscf/ros2/pynput_teleop.git 100 | version: main 101 | 102 | # -------- ros2 vehicle interfaces -------------------------- 103 | src/romea_ros2/interfaces/vehicles/romea_mobile_base: 104 | type: git 105 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/vehicles/romea_mobile_base.git 106 | version: main 107 | src/romea_ros2/interfaces/vehicles/adap2e: 108 | type: git 109 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/vehicles/adap2e.git 110 | version: main 111 | src/romea_ros2/interfaces/vehicles/alpo: 112 | type: git 113 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/vehicles/alpo.git 114 | version: main 115 | src/romea_ros2/interfaces/vehicles/ceol: 116 | type: git 117 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/vehicles/ceol.git 118 | version: main 119 | src/romea_ros2/interfaces/vehicles/robufast: 120 | type: git 121 | url: git@gitlab-ssh.irstea.fr:romea_ros2/interfaces/vehicles/robucar.git 122 | version: main 123 | 124 | # -------- ros2 algorithms -------------------------- 125 | src/romea_ros2/algorithms/localisation/romea_localisation_bringup: 126 | type: git 127 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/localisation/romea_localisation_bringup.git 128 | version: main 129 | src/romea_ros2/algorithms/localisation/romea_localisation_msgs: 130 | type: git 131 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/localisation/romea_localisation_msgs.git 132 | version: main 133 | src/romea_ros2/algorithms/localisation/romea_localisation_utils: 134 | type: git 135 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/localisation/romea_localisation_utils.git 136 | version: main 137 | src/romea_ros2/algorithms/localisation/romea_localisation_odo: 138 | type: git 139 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/localisation/romea_localisation_odo_plugin.git 140 | version: main 141 | src/romea_ros2/algorithms/localisation/romea_localisation_gps: 142 | type: git 143 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/localisation/romea_localisation_gps_plugin.git 144 | version: main 145 | src/romea_ros2/algorithms/localisation/romea_localisation_imu: 146 | type: git 147 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/localisation/romea_localisation_imu_plugin.git 148 | version: main 149 | src/romea_ros2/algorithms/localisation/romea_robot_to_world_localisation: 150 | type: git 151 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/localisation/romea_robot_to_world_localisation_core.git 152 | version: main 153 | src/romea_ros2/algorithms/control/path_following/romea_path: 154 | type: git 155 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/control/path_following/romea_path.git 156 | version: main 157 | src/romea_ros2/algorithms/control/path_following/romea_path_matching: 158 | type: git 159 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/control/path_following/romea_path_matching.git 160 | version: main 161 | src/romea_ros2/algorithms/control/path_following/romea_path_following: 162 | type: git 163 | url: git@gitlab-ssh.irstea.fr:romea_ros2/algorithms/control/path_following/romea_path_following2.git 164 | version: main 165 | 166 | 167 | # -------- tirrex -------------------------- 168 | src/romea_ros2/demos/tirrex/core: 169 | type: git 170 | url: git@gitlab-ssh.irstea.fr:romea_projects/tirrex/tirrex_demo.git 171 | version: main 172 | src/romea_ros2/demos/tirrex/robots/adap2e: 173 | type: git 174 | url: git@gitlab-ssh.irstea.fr:romea_projects/tirrex/adap2e.git 175 | version: main 176 | src/romea_ros2/demos/tirrex/robots/alpo: 177 | type: git 178 | url: git@gitlab-ssh.irstea.fr:romea_projects/tirrex/alpo.git 179 | version: main 180 | src/romea_ros2/demos/tirrex/robots/ceol: 181 | type: git 182 | url: git@gitlab-ssh.irstea.fr:romea_projects/tirrex/ceol.git 183 | version: main 184 | src/romea_ros2/demos/tirrex/robots/robufast: 185 | type: git 186 | url: git@gitlab-ssh.irstea.fr:romea_projects/tirrex/robufast.git 187 | version: main 188 | src/romea_ros2/demos/tirrex/path_following: 189 | type: git 190 | url: git@gitlab-ssh.irstea.fr:romea_projects/tirrex/path_following.git 191 | version: main 192 | 193 | # -------- ros2 tools -------------------------- 194 | src/romea_ros2/tools/romea_common: 195 | type: git 196 | url: git@gitlab-ssh.irstea.fr:romea_ros2/tools/romea_common.git 197 | version: main 198 | src/romea_ros2/tools/romea_cmd_mux: 199 | type: git 200 | url: git@gitlab-ssh.irstea.fr:romea_ros2/tools/romea_cmd_mux.git 201 | version: main 202 | src/romea_ros2/tools/romea_simulation: 203 | type: git 204 | url: git@gitlab-ssh.irstea.fr:romea_ros2/simulation/romea_simulation.git 205 | version: main 206 | src/romea_ros2/tools/romea_path_tools: 207 | type: git 208 | url: git@gitlab-ssh.irstea.fr:romea_ros2/tools/romea_path_tools.git 209 | version: main 210 | 211 | # -------- third party -------------------------- 212 | src/third_party/serial: 213 | type: git 214 | url: git@gitlab-ssh.irstea.fr:romea_ros2/third_party/serial.git 215 | version: romea 216 | src/third_party/nmea_navsat_driver: 217 | type: git 218 | url: git@gitlab-ssh.irstea.fr:romea_ros2/third_party/nmea_navsat_driver.git 219 | version: romea 220 | src/third_party/rviz_2d_overlay_plugins: 221 | type: git 222 | url: https://github.com/teamspatzenhirn/rviz_2d_overlay_plugins.git 223 | version: main 224 | 225 | # -------- hackathon -------------------------- 226 | src/romea_ros2/tools/gazebo_classic_tools: 227 | type: git 228 | url: git@github.com:FiraHackathon/gazebo_classic_tools.git 229 | version: main 230 | src/romea_ros2/demos/hackathon_bringup: 231 | type: git 232 | url: git@github.com:FiraHackathon/hackathon_bringup.git 233 | version: main 234 | src/romea_ros2/tools/hackathon_evaluation: 235 | type: git 236 | url: git@github.com:FiraHackathon/hackathon_evaluation.git 237 | version: main 238 | 239 | # vim: ft=yaml 240 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This project corresponds to the workspace of the FIRA 2025 hackathon. 2 | It allows installing everything required to run the challenges and demos of the hackathon. 3 | It is based on the [TIRREX workspace](https://github.com/Tirrex-Roboterrium/tirrex_workspace) 4 | developed for the [_roboterrium_ platform of the TIRREX project](https://tirrex.fr/plateforme/roboterrium/). 5 | 6 | 7 | ## Contents 8 | 9 | * [Installation](#installation) 10 | * [Installation(for INRAE developers)](#installation-only-for-inrae-developers) 11 | * [Usage](#usage) 12 | * [Architecture of the workspace](#architecture-of-the-workspace) 13 | * [FAQ](#faq) 14 | 15 | Other documents available: 16 | 17 | * [Scenario of the challenge](/doc/challenge.md) 18 | * [Configuration of the simulation](/doc/challenge_configuration.md) 19 | * [Configuration of the robot](/doc/robot_configuration.md) 20 | * [Configuration of the devices](/doc/devices_configuration.md) 21 | * [Description of the robot control node](/doc/robot_control.md) 22 | * [Description of the provided data files](/doc/plots_surveying.md) 23 | 24 | 25 | ## Installation 26 | 27 | ### Create workspace 28 | 29 | You need to install `vcstool`. You can install it using pip: 30 | ``` 31 | pip3 install vcstool 32 | vcs --version 33 | ``` 34 | 35 | If the `vcs` command is not found, you can follow [these 36 | instructions](#the-vcs-command-is-not-found). 37 | Alternatively, if you use Ubuntu 20 or 22, you can install `vcs` using `apt`: 38 | ``` 39 | sudo apt install python3-vcstool 40 | ``` 41 | and for Ubuntu 24, the package is now named `vcstool`: 42 | ``` 43 | sudo apt install vcstool 44 | ``` 45 | 46 | Clone this project and go to the root: 47 | ``` 48 | git clone https://github.com/FiraHackathon/hackathon2025_ws.git 49 | cd hackathon2025_ws 50 | ``` 51 | 52 | All the following commands must be run from the root of this project. 53 | Execute this script to clone all the ROS packages and download the gazebo models: 54 | ``` 55 | ./scripts/update 56 | ``` 57 | 58 | ### Build the docker image and compile 59 | 60 | The recommended method for compiling the workspace and running the programs is to use docker. 61 | If you do not want to use docker, and you are using Ubuntu 22.04 and ROS Humble, you can skip these 62 | steps and directly use `colcon` and `ros2` commands. 63 | However, the following instructions assume that docker is being used. 64 | 65 | You first need to install a recent version of docker compose by [following the instruction on the 66 | docker documentation](https://docs.docker.com/compose/install/linux/). 67 | If it is already installed, you can check that its version is greater or equal to `2.20` using the 68 | command: 69 | ``` 70 | docker compose version 71 | ``` 72 | 73 | By default, running docker is only available for root user. 74 | You have to add your user to the `docker` group (and create it if it does not exist) to execute 75 | docker commands with your own user. 76 | ``` 77 | sudo groupadd docker 78 | sudo usermod -aG docker $USER 79 | ``` 80 | Bear in mind that by executing these commands, you are giving your user privileges equivalent to 81 | those of root using docker commands. 82 | For more details, see [Docker Daemon Attack 83 | Surface](https://docs.docker.com/engine/security/#docker-daemon-attack-surface) 84 | 85 | Once your user is added to the `docker` group, you need to reboot (or log out / log in and restart 86 | the docker daemon). 87 | 88 | After that, you can build the image (the first time) and compile the workspace: 89 | ``` 90 | docker compose run --rm compile 91 | ``` 92 | This command will: 93 | * pull the latest image containing a ROS environment with all the workspace dependencies installed 94 | * build a local image based on the previous one but including a copy of your local 95 | user in order to execute every command using the same user as the host system 96 | * run the `compile` service that execute a `colcon build` command to compile everything 97 | 98 | If you modify some packages of the workspace, you need to re-execute this command regularly. 99 | 100 | 101 | ## Installation (only for INRAE developers) 102 | 103 | If you are an INRAE developer with access to the projects on gitlab.irstea.fr, you can use 104 | alternative repositories with ssh URLs. 105 | To do that, you have to add `REPOS_FILE` in the `.env` file before creating the workspace and 106 | building the docker image. 107 | Execute the following commands: 108 | ```bash 109 | git clone git@github.com:FiraHackathon/hackathon2025_ws.git 110 | cd hackathon2025_ws 111 | echo 'REPOS_FILE=repositories.private' >> .env 112 | ./scripts/update_ws 113 | docker compose run --rm --build compile 114 | ``` 115 | 116 | ## Usage 117 | 118 | You can run the hackathon demo by starting the services in `demos/hackathon` 119 | ``` 120 | cd demos/hackathon 121 | docker compose up 122 | ``` 123 | 124 | More details in the [README of the demo](/demos/hackathon/README.md). 125 | 126 | ### Run other commands in docker 127 | 128 | It is also possible to open a shell on the docker using the `bash` service: 129 | ``` 130 | docker compose run --rm bash 131 | ``` 132 | 133 | The option `--rm` allows to automatically delete the container when the command finishes. 134 | 135 | ### Updating 136 | 137 | You can update the ROS packages using: 138 | ``` 139 | ./scripts/pull 140 | ``` 141 | or simply 142 | ``` 143 | vcs pull -nw6 144 | ``` 145 | 146 | If you want to load repositories, switch to the correct branches and update gazebo models, you 147 | can re-run the installation script 148 | ``` 149 | ./scripts/update 150 | ``` 151 | 152 | 153 | ## Architecture of the workspace 154 | 155 | Here is a brief presentation of the main directories: 156 | 157 | * `build` contains the files generated by the compilation step 158 | * `docker` contains the configuration files for docker 159 | * `gazebo` contains the downloaded gazebo models that are too large to be versioned 160 | * `install` contains the shared files and executables of the ROS packages 161 | * `log` contains the logged information of the compilation 162 | * `scripts` contains some useful tools to prepare the workspace 163 | * `src` contains all the cloned ROS packages 164 | 165 | ### How the docker image works 166 | 167 | The docker image corresponds to an Ubuntu 22.04 image with a complete installation of ROS2 Humble 168 | and all the dependencies of the ROS2 packages of this workspace. 169 | The image is built from the [Dockerfile](docker/Dockerfile) and can be edited to add other Ubuntu or 170 | pip packages. 171 | 172 | When a docker service is started, this workspace is mounted as a volume inside the docker 173 | container and the ROS commands are run using your own Linux user. 174 | This makes using the tools in docker similar to using them directly. 175 | This means that, if you have ROS Humble on your host system, you should be able to directly run ros2 176 | commands without using docker commands. 177 | 178 | ### Organization of the ROS packages 179 | 180 | In the `src` directory, the packages are organized in several sub-folders: 181 | 182 | * `romea_core` contains all packages that are independent of ROS 183 | * `romea_ros2` contains all ROS2 packages (in several sub-folders) 184 | * `third_party` contains packages that are not written by our team 185 | 186 | For more details about these packages, read their README. 187 | 188 | 189 | ## FAQ 190 | 191 | ### The `vcs` command is not found 192 | 193 | If you have installed this tools using `pip` or `pip3` outside a virtual env, then the executable 194 | are located in the `~/.local/bin` directory. 195 | You can make them available by adding this path into your `PATH` environment variable: 196 | ```bash 197 | export PATH="$PATH:$HOME/.local/bin" 198 | ``` 199 | 200 | 201 | ### The service `compile` does not exist 202 | 203 | If you are in a subdirectory that contains a `compose.yaml` file, the `docker compose` command will 204 | load this file instead of the `compose.yaml` at the root of the workspace. 205 | The `compile` service is only defined in the one at the root, so you need to move to the root before 206 | executing `docker compose run --rm compile`. 207 | 208 | ### How to use NVIDIA GPU? 209 | 210 | You need to install the [NVIDIA container 211 | toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). 212 | 213 | After that, edit the file `compose.yaml` of your demo to replace the service `x11_base` by 214 | `x11_gpu`. 215 | For example, the beginning of `demos/hackathon/compose.yaml` should look like to this: 216 | ```yaml 217 | x-yaml-anchors: 218 | base: &base 219 | extends: 220 | file: ../../docker/common.yaml 221 | service: x11_gpu 222 | ... 223 | ``` 224 | These services are defined in the file `docker/common.yaml` of the workspace and provide all the 225 | required docker options. 226 | The `x11_gpu` service add special options to use the NVIDIA GPUs. 227 | 228 | ### Gazebo is slow 229 | 230 | Gazebo comprises a physics engine (`gzserver` program) and a rendering engine (`gzclient` program). 231 | You can check the rendering performance by looking at the _FPS_ (frame per seconds) value at the 232 | bottom of the gazebo window, and the physics performance by looking at the _real time factor_. 233 | The rendering engine is configured to run at 60 FPS and a real time factor of 1.0, but you may 234 | experience slow down if your computer is not powerful enough. 235 | By default, the docker does not use NVIDIA GPU, but it can significantly improve gazebo performance 236 | to use it. 237 | If you have one, you can refer to [How to use NVIDIA GPU?](#how-to-use-nvidia-gpu). 238 | If you do not have one, you can increase FPS by disabling shadows by following instructions of 239 | [Gazebo is dark](#gazebo-is-dark). 240 | You can also improve the _real time factor_ by increasing `max_step_size` in the `Physics` component 241 | of gazebo, but it will also degrade its behavior. 242 | 243 | ### Gazebo is dark 244 | 245 | When using Gazebo with no GPU, you may experience a graphical bug that seems to cause rendering to 246 | be darker than it should be. 247 | This is due to the fact that shadow calculation is buggy with Intel graphics chipsets. 248 | You can disable shadow rendering by clicking on `Scene`, then unchecking `shadows`. 249 | If it is already unchecked, check it and uncheck it again (this is another bug). 250 | -------------------------------------------------------------------------------- /doc/robot_configuration.md: -------------------------------------------------------------------------------- 1 | ## Robot configuration 2 | 3 | #### Base configuration: 4 | 5 | For this project, the mobile base selected is the **Adape robot** developed by the **TSCF unit of INRAE**. The `base.yaml` file (see below) defines this configuration choice. This file also allows you to specify the robot’s initial position (`simulation.initial_xyz`) and orientation (`simulation.initial_rpy`) in the simulation environment. By default, these parameters are set to place the robot near the start of the demonstration trajectory. 6 | 7 | ```yaml 8 | name: base # name of the base 9 | configuration: 10 | type: adap2e # robot type 11 | model: fat # robot model 12 | records: 13 | joint_states: true # joint_states will be recorded into bag 14 | controller/odom: true # controller/odom will be recorded into bag 15 | controller/odometry: true # controller/odometry will be recorded into bag 16 | controller/kinematic: true # controller/kinematic will be recorded into bag 17 | simulation: 18 | initial_xyz: [107.795, 123.508, 1.15] # initial position (meters) of the robot in simulation world 19 | initial_rpy: [0.014, 0.021, 0.372] # initial oriention (degrees) of the robot in simulation world 20 | ``` 21 | 22 | The mobile base is controlled using `ros2_control`, and several information are published on four topics: 23 | 24 | - **`base/joint_states`** (`sensors_msgs/JointState`): Provides joint states (position, velocity, effort) for the robot’s actuators. 25 | - **`base/controller/odom`** (`nav_msgs/Odometry`): Outputs the pose calculated via dead reckoning. 26 | - **`base/controller/odometry`** (`romea_mobile_base_msgs/TwoAxleSteeringMeasure`): Provides displacement measures in the control space (linear speed + front & rear steering angles). 27 | - **`base/controller/kinematic`** (`romea_mobile_base_msgs/KinematicMeasure`): Publishes the twist + instantaneous curvature. 28 | 29 | In `base.yaml`, you can specify which topics should be included in the ROS bag during demo recording by setting each item in the `records` section to `true` or `false`. 30 | 31 | 32 | 33 | #### Devices configuration: 34 | 35 | The robot can be equipped with various devices (sensors, implements, robotic arms, etc.). The configuration for these onboard devices is split into two parts: the [individual device configurations](devices_configuration.md) (located in the `devices` directory) and the `devices.yaml` file (see below). This file enables you to easily activate or deactivate devices; each item corresponds to a file in the `devices` directory. For each device, specify the device type (`joystick`, `lidar`, `gps`, `imu`, `camera`, `rgbd_camera`, `stereo_camera`, or `implement`) and whether it is available and in which mode (`none`, `simulation`, `live`, or `all`): 36 | 37 | - **none**: The device is not supported. 38 | - **simulation**: The device is supported only in simulation mode. 39 | - **live**: The device is supported only in live mode. 40 | - **all**: The device is supported in both simulation and live modes. 41 | 42 | To add a new sensor, create a configuration file in the `devices` directory following the naming convention `device_name.device_type.yaml` and add a corresponding entry in `devices.yaml`. 43 | 44 | ```yaml 45 | remote_controller: 46 | type: joystick 47 | available_mode: all 48 | 49 | lms151: 50 | type: lidar 51 | available_mode: none 52 | ouster: 53 | type: lidar 54 | available_mode: all 55 | septentrio: 56 | type: gps 57 | available_mode: all 58 | xsens: 59 | type: imu 60 | available_mode: all 61 | realsense: 62 | type: rgbd_camera 63 | available_mode: all 64 | 65 | cultivator: 66 | type: implement 67 | available_mode: all 68 | ``` 69 | 70 | You can visualize the position of the devices embedded on the robot using the Rviz visualization software. To do so, simply launch the Rviz Docker service with the following command: 71 | 72 | ```bash 73 | docker compose run rviz 74 | ``` 75 | 76 | This command starts both the simulator and the Rviz software. In Rviz, open the left panel, set the background color to black, expand the **TF** item, and select **show_names** to display the names of the robot's "mechanical" links. These links, represented by red, blue, and green markers, follow the naming convention `robot_device_name_link`. Their positions are specified in each device's individual configuration file, enabling you to adjust device placement on the robot as desired. 77 | 78 | ![rviz](media/rviz.jpg) 79 | 80 | Finally, you can visualize data from the sensors by selecting items in the **sensors** list on the left panel. 81 | 82 | #### Teleop configuration: 83 | 84 | The ADAP2E robot can be controlled using a remote controller via the ROS2 teleop node at `/robot/teleop`. This node is responsible for converting inputs from the remote controller into control messages that are sent to the command multiplexer. The configuration of this teleop node is based on the `teleop.yaml` file (see below). This file allows you to specify the type of messages sent to the command multiplexer along with their priority (set to 100 here). Additionally, it defines the maximum allowable command values based on the selected mode: in slow mode, the maximum speed is 2 m/s, and in turbo mode, it is 5 m/s. Since the limits for the rear and front steering angles are unspecified, they are inferred based on the robot's mechanical capabilities. 85 | 86 | ```yaml 87 | cmd_output: 88 | message_type: romea_mobile_base_msgs/TwoAxleSteeringCommand # command type 89 | message_priority: 100 # priority between [0-255] 90 | cmd_range: 91 | maximal_linear_speed: 92 | slow_mode: 2.0 # m/s 93 | turbo_mode: 5.0 # m/s 94 | #maximal_front_steering_angle: # radian 95 | #maximal_rear_steering_angle: # radian 96 | ``` 97 | 98 | The specific mapping of buttons and sticks on the remote controller is shown in the figure below, detailing how each control input corresponds to different actions on the robot. 99 | 100 | ![Joytiskc Mapping](media/teleop_joystick.jpg) 101 | 102 | ![Base control pipeline](media/base_control_graph.jpg) 103 | 104 | #### Localization configuration: 105 | 106 | The INRAE TSCF unit provides competitors with a localization algorithm based on a Kalman filter that fuses data from the robot's odometry, the Xsens inertial measurement unit (IMU), and the Septentrio GPS receiver. Four ROS2 nodes are launched to achieve this: a core node, `/robot/localisation/localisation_robot_to_world`, which performs data fusion, and three plugin nodes (`/robot/localisation/odo_plugin`, `/robot/localisation/gps_plugin`, and `/robot/localisation/imu_plugin`) that handle sensor data preprocessing (see pipeline below). These nodes can be configured via the `localisation.yaml` file (example configuration shown below). The filter outputs a ROS2 `nav_msgs/odom` message called `/robot/localisation/filtered_odom`. 107 | 108 | ```yaml 109 | core: 110 | pkg: romea_robot_to_world_localisation_core 111 | launch: robot_to_world_localisation.launch.py 112 | # configuration: 113 | # maximal_dead_recknoning_travelled_distance: 2. # maximal distance in dead reckoning before to reset filter 114 | # maximal_dead_recknoning_elapsed_time: 10. # maximal duration in dead reckoning before to reset filter 115 | # publish_rate: 10 # The rate of publishing the filter results (Hz) 116 | # debug: true # Enable debug logs 117 | plugins: 118 | odo: 119 | pkg: romea_localisation_odo_plugin 120 | launch: odo_plugin.launch.py 121 | # configuration: 122 | # controller_topic: kinematic 123 | # restamping: false 124 | imu: 125 | pkg: romea_localisation_imu_plugin 126 | launch: imu_plugin.launch.py 127 | input_sensor_meta_description_filename: xsens 128 | # configuration: 129 | # restamping: false 130 | gps: 131 | pkg: romea_localisation_gps_plugin 132 | launch: gps_plugin.launch.py 133 | input_sensor_meta_description_filename: septentrio 134 | # configuration: 135 | # restamping: false 136 | # minimal_fix_quality: 4 # Minimal fix quality to take into account gps fix into fusion 137 | # minimal_speed_over_ground: 0.5 # Minimal speed to take into account gps heading into fusion 138 | ``` 139 | 140 | 141 | 142 | ![Localisation pipeline](media/localisation_graph.jpg) 143 | 144 | #### Path following configuration: 145 | 146 | The INRAE TSCF unit provides a path following algorithm for participants. This algorithm uses both a reference trajectory and localization data to control the robot. Two ROS 2 nodes are launched to enable this process: the `/robot/path_following/path_matching` node calculates deviations from the trajectory, while the `/robot/path_following/path_following` node computes the commands to send to the robot through the command multiplexer. The path following configuration is specified in the `path_following.yaml` file. 147 | 148 | This configuration file allows you to select the control law for computing commands—either the classic [1] or predictive [2] control laws. It also specifies the type of messages sent to the command multiplexer along with their priority (set to 10). This priority is lower than that of teleoperation, meaning that path following messages will be ignored if the robot is controlled via the remote controller. Finally, the file allows you to specify whether path following should start automatically or not. 149 | 150 | ```yaml 151 | sampling_period: 10.0 152 | 153 | # configuration of longitudinal control 154 | longitudinal_control: 155 | minimal_linear_speed: 0.3 # minimal speed during maneuvers 156 | 157 | # configuration for classic and predictive lateral control laws 158 | lateral_control: 159 | selected: classic # selected control law see below 160 | classic: 161 | gains: 162 | front_kd: 0.7 163 | rear_kd: 0.5 164 | predictive: 165 | gains: 166 | front_kd: 0.7 167 | rear_kd: 0.4 168 | prediction: 169 | horizon: 10 170 | a0: 0.1642 171 | a1: 0.1072 172 | b1: 1.0086 173 | b2: -0.2801 174 | 175 | # control setpoint 176 | setpoint: 177 | desired_linear_speed: 2.0 178 | desired_lateral_deviation: 0.0 179 | desired_course_deviation: 0.0 180 | 181 | #configuration for cmd_mux auto connection 182 | cmd_output: 183 | message_type: romea_mobile_base_msgs/TwoAxleSteeringCommand # type of message send to robot 184 | priority: 10 # priority of these message between [0-255], must be lower than teleop 185 | rate: 10. # Rate (Hz) at which these messages are published 186 | 187 | autostart: false # autostart path following if path matching is OK 188 | debug: true # enable debug logs 189 | ``` 190 | 191 | If `autostart` is not selected, path following needs to be started manually using the remote controller. The image below shows the controller mapping: the X button starts path following, and the B button stops it. Note that even if path following starts automatically, manual control can always be taken via the remote controller 192 | 193 | ![path_following_joystick](media/path_following_joystick.jpg) 194 | 195 | .![path following pipeline](media/path_following_graph.jpg) 196 | -------------------------------------------------------------------------------- /doc/devices_configuration.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Devices configuration 4 | 5 | #### Remote controller configuration: 6 | 7 | The remote controller can be configured using the `remote_controller.joystick.yaml` file located in the `devices` directory. You can specify the type of controller (e.g., `xbox` or `dualshock4`; other joystick types can be added upon request). Additionally, you can assign a name to the controller, which will also serve as the ROS2 namespace under which the driver will launch. 8 | 9 | To activate the driver, specify the ROS 2 driver type you want to use to retrieve data from the joystick. This includes setting the ROS 2 package, executable, and any desired parameters. For details on driver options based on joystick type, please refer to the documentation for the `romea_joystick_bringup` package. Note that topic remappings for this node are predefined and cannot be changed; the topic provided by the driver is named `joystick/joy`. Finally, you can specify whether or not this topic should be added in ROS bag during the demo recording. 10 | 11 | ```yaml 12 | name: joystick # name of the joystick 13 | driver: 14 | package: joy # ros2 driver package 15 | executable: joy_node # ros2 node launch 16 | parameters: # parameters of driver node 17 | autorepeat_rate: 10.0 18 | deadzone: 0.1 19 | configuration: 20 | type: xbox # joystick type 21 | records: 22 | joy: true # joy topic will be recorded into bag 23 | ``` 24 | 25 | 26 | 27 | #### Xsens IMU configuration: 28 | 29 | ```yaml 30 | name: imu 31 | driver: 32 | package: xsens_driver #ros2 driver package name 33 | executable: mtnode.py # node to be launched 34 | parameters: # node parameters 35 | device: "/dev/ttyUSB0" 36 | baudrate: 115200 37 | configuration: 38 | type: xsens # imu type 39 | model: mti #imu model 40 | rate: 100 # rate Hz 41 | geometry: 42 | parent_link: "base_link" # name of parent link where is located the imu sensor 43 | xyz: [0.0, 0.0, 0.7] # its position in meters 44 | rpy: [0.0, 0.0, 0.0] # its orienation in degrees 45 | records: 46 | data: true # data topic will not be recorded in bag 47 | ``` 48 | 49 | 50 | 51 | #### Septentrio GPS configuration: 52 | 53 | An AteRX GPS receiver from Septentrio is mounted on rear top of the robot. To configure this setup, you’ll need to edit the `septentrio.gps.yaml` file located in the `devices` directory (see example below). 54 | 55 | In this file, it is possible to define the gps `type` and `model`, specified here as `septentrio` and `asterx`, along with its configuration parameters. For a list of supported types and models, please refer to the `romea_gps_bringup` package documentation. Two key settings are required for gps receiver configuration: `rate` and `dual_antenna` (set here to 10 Hz and true, respectively). These parameters may be optional if the sensor supports only one fixed configuration. For details on selectable parameters, please consult the specification file for each sensor in the `config` directory of the `romea_gps_description` package. You can also specify the gps receiver antenna's position (`geometry.xyz`) on the mobile base. This positioning is relative to a "parent link" on the robot’s structure, commonly `base_link` or `base_footprint`. 56 | 57 | Additionally, you can assign a name for the gps receiver, which will also serve as the ROS 2 namespace under which the driver launches. To activate the driver, specify the ROS 2 driver package, executable, and any required parameters, note that certain parameters are derived automatically based on the selected configuration. For specific driver options based on gps receiver type, please refer to the `romea_gps_bringup` package documentation. Note that topic remappings for this node are predefined and cannot be modified; the driver publishes on the fixed topic `gps/name_sentence`, `gps/fix` and`gps/vel` , which is also the topics published by the simulator. Lastly, you can specify whether these topics should be recorded in the ROS bag during the demo. 58 | 59 | ```yaml 60 | name: gps # name of the lidar 61 | driver: 62 | package: romea_gps_driver #ros2 driver package name 63 | executable: tcp_client_node # node to be launched 64 | parameters: # node parameters 65 | ip: 192.168.0.50 66 | nmea_port: 1001 67 | rtcm_port: 1002 68 | ntrip: 69 | package: ntrip_client #ros2 driver package name 70 | executable: ntrip_ros.py # node to be launched 71 | parameters: # node parameters 72 | host: caster.centipede.fr 73 | port: 2101 74 | username: centipede 75 | password: centipede 76 | mountpoint: MAGC 77 | configuration: 78 | type: drotek # gps type 79 | model: f9p # gps model 80 | rate: 10 # nmea rate Hz 81 | dual_antenna: true # specify if gps use dual antenna 82 | geometry: 83 | parent_link: base_link # name of parent link where is located the gps sensor 84 | xyz: [0.0, 0.3, 1.5] # its position in meters 85 | records: 86 | nmea_sentence: true # nmea sentence topic will be recorded in bag 87 | fix: false # gps_fix topic will not be recorded in bag 88 | vel: false # vel topic will not be recorded in bag 89 | ``` 90 | 91 | 92 | 93 | #### LMS151 lidar configuration: 94 | 95 | A LMS151 lidar from Sick is mounted at the front of the robot. To configure this setup, you’ll need to edit the `lms151.lidar.yaml` file located in the `devices` directory (see example below). 96 | 97 | In this file, it is possible to define the lidar `type` and `model`, specified here as `sick` and `lms151`, along with its configuration parameters. For a list of supported types and models, please refer to the `romea_lidar_bringup` package documentation. Two key settings are required for lidar configuration: `rate` and `resolution` (set here to 50 Hz and 0.5°, respectively). These parameters may be optional if the sensor supports only one fixed configuration. For details on selectable parameters, please consult the specification file for each sensor in the `config` directory of the `romea_lidar_description` package. You can also specify the lidar's position (`geometry.xyz`) and orientation (`geometry.rpy`) on the mobile base. This positioning is relative to a "parent link" on the robot’s structure, commonly `base_link` or `base_footprint`. 98 | 99 | Additionally, you can assign a name for the lidar, which will also serve as the ROS 2 namespace under which the driver launches. To activate the driver, specify the ROS 2 driver package, executable, and any required parameters, note that certain parameters are derived automatically based on the selected configuration. For specific driver options based on lidar type, please refer to the `romea_lidar_bringup` package documentation. Note that topic remappings for this node are predon efined and cannot be modified; the driver publishes on the fixed topic `lidar2d/scan` , which is also the topic published by the simulator. Lastly, you can specify whether the `scan` topic should be recorded in the ROS bag during the demo. 100 | 101 | ```yaml 102 | name: lidar2d # name of the lidar 103 | driver: 104 | package: sick_scan #ros2 driver package name 105 | executable: sick_generic_caller # node to be launched 106 | parameters: # node parameters 107 | hostname: 192.168.1.112 #device ip 108 | port: 2112 #communication port 109 | configuration: 110 | type: sick # lidar type 111 | model: lms151 # lidar model 112 | rate: 50 # hz (optional according lidar model) 113 | resolution: 0.5 # deg (optional according lidar model) 114 | geometry: 115 | parent_link: base_link # name of parent link where is located the LIDAR senor 116 | xyz: [2.02, 0.0, 0.34] # its position in meters 117 | rpy: [0.0, 0.0, 0.0] # its orienation in degrees 118 | records: 119 | scan: true # scan topic will be recorded in bag 120 | ``` 121 | 122 | 123 | 124 | #### Ouster lidar configuration: 125 | 126 | An OS1 lidar from Ouster is mounted on the top of the robot. To configure this setup, you’ll need to edit the `ouster.lidar.yaml` file located in the `devices` directory (see example below). 127 | 128 | In this file, it is possible to define the lidar `type` and `model`, specified here as `ouster` and `os1_32`, along with its configuration parameters. For a list of supported types and models, please refer to the `romea_lidar_bringup` package documentation. Two key settings are required for lidar configuration: `rate` and `resolution` (set here to 10 Hz and 0.17578125°, respectively). These parameters may be optional if the sensor supports only one fixed configuration. For details on selectable parameters, please consult the specification file for each sensor in the `config` directory of the `romea_lidar_description` package. You can also specify the lidar's position (`geometry.xyz`) and orientation (`geometry.rpy`) on the mobile base. This positioning is relative to a "parent link" on the robot’s structure, commonly `base_link` or `base_footprint`. 129 | 130 | Additionally, you can assign a name for the lidar, which will also serve as the ROS 2 namespace under which the driver launches. To activate the driver, specify the ROS 2 driver package, executable, and any required parameters, note that certain parameters are derived automatically based on the selected configuration. For specific driver options based on lidar type, please refer to the `romea_lidar_bringup` package documentation. Note that topic remappings for this node are predefined and cannot be modified; the driver publishes on the fixed topic `lidar2d/cloud` , which is also the topic published by the simulator. Lastly, you can specify whether the `cloud` topic should be recorded in the ROS bag during the demo. 131 | 132 | ```yaml 133 | name: lidar # name of the lidar 134 | driver: 135 | package: ouster_ros #ros2 driver package name 136 | executable: os_driver # node to be launched 137 | parameters: # node parameters 138 | sensor_hostname: '' #device ip automatic detection 139 | configuration: 140 | type: ouster # lidar type 141 | model: os1_32 # lidar model 142 | rate: 10 # hz (optional according lidar model) 143 | resolution: 0.17578125 # deg (optional according lidar model) 144 | geometry: 145 | parent_link: base_link # name of parent link where is located the LIDAR sensor 146 | xyz: [1.2, 0.0, 1.1] # its position in meters 147 | rpy: [0.0, 0.0, 0.0] # its orienation in degrees 148 | records: 149 | cloud: true # cloud topic will be recorded in bag 150 | ``` 151 | 152 | 153 | 154 | #### Robot view camera configuration: 155 | 156 | A RGB camera has been set above the robot to provide a bird’s-eye view, making it easier to monitor the robot's activities. To configure this setup, edit the `robot_view.camera.yaml` file located in the `devices` directory, as shown in the example below. 157 | 158 | In this configuration file, you can define the camera’s `type` and `model`, here specified as `axis` and `p1346`, along with essential configuration parameters. For a full list of supported types and models, refer to the `romea_camera_bringup` package documentation. Depending on the camera model, several parameters may need to be set, with primary ones including `resolution`, `frame_rate`, `horizontal_fov`, `vertical_fov`, and `video_format`. These parameters may be optional if the sensor only supports a fixed configuration or if default values are available. For details on selectable parameter, please consult the specification files for each camera in the `config` directory of the `romea_camera_description` package. You can also specify the camera's physical location on the robot by setting its position (`geometry.xyz`) and orientation (`geometry.rpy`) relative to a "parent link" in the robot’s frame, commonly `base_link` or `base_footprint`. 159 | 160 | Additionally, you may assign a unique name for the camera, which also serves as the ROS 2 namespace under which the driver will be launched. To activate the driver, specify the ROS 2 driver package, executable, and any required parameters. Certain parameters may automatically be derived from the selected configuration. For specific driver options based on camera type, please refer to the `romea_camera_bringup` package documentation. Note that topic remappings for this node are predefined and cannot be modified. The driver publishes data on the fixed topics `robot_view/image_raw` and `robot_view/camera_info`, which are also used by the simulator. Finally, you can specify whether these topics should be recorded in the ROS bag during the demo. 161 | 162 | ```yaml 163 | name: robot_view # name of the camera 164 | driver: 165 | package: usb_cam # driver ros2 package 166 | executable: usb_cam_node_exe # node to be launch 167 | parameters: # parameter of driver node 168 | video_device: /dev/video0 169 | configuration: 170 | type: axis # type of camera 171 | model: p146 # model of camera 172 | resolution: 1280x720 # resolution of image provided by camera (optional) default is 1280x720 173 | # horizontal_fov: 27 # horizontal field of view in degree (optional) default is 27 174 | # frame_rate: 30 # frame rate in hz (optional) depend of the selected resolution 175 | # video_format: h264 # output video codec (optional), default h264 176 | geometry: 177 | parent_link: base_link # name of parent link where is located the camera sensor 178 | xyz: [-4.5, 0.0, 2.7] # its position in meters 179 | rpy: [0.0, 22.0, 0.0] # its orienation in degrees 180 | records: 181 | camera_info: false # camera info topic will not be recorded in bag 182 | image_raw: false # image_raw topic will not be recorded in bag 183 | 184 | ``` 185 | 186 | 187 | 188 | #### Realsense camera configuration: 189 | 190 | An RGB-D camera, such as the Realsense D435, can be mounted on the mobile base to capture both color and depth information. In this example, a D435 camera is mounted on top of the robot. To configure this setup, edit the `realsense.rgbd_camera.yaml` file located in the `devices` directory, as shown below. 191 | 192 | In this configuration file, you can define the camera’s `type` and `model`, specified here as `realsense` and `d435`, along with relevant configuration parameters. For a list of supported types and models, please refer to the `romea_rgbd_camera_bringup` package documentation. An RGB-D camera is composed of multiple sensors: an RGB camera, an infrared camera, and a depth camera. Each of these sensors can be configured individually by specifying settings like `resolution`, `frame_rate`, `horizontal_fov`, and `vertical_fov`, etc.. . These parameters may be optional if the sensor only supports a fixed configuration or if default values are available. In this example, we use the same resolution for all sensors and leave the other parameters at their default values. For details on selectable parameters, refer to the specification files for each camera in the `config` directory of the `romea_rgbd_camera_description` package. You can also specify the camera's position (`geometry.xyz`) and orientation (`geometry.rpy`) on the mobile base. This positioning is relative to a "parent link" in the robot’s structure, commonly `base_link` or `base_footprint`. 193 | 194 | Additionally, you may assign a unique name for the camera, which will also serve as the ROS 2 namespace under which the driver will be launched. To activate the driver, specify the ROS 2 driver package, executable, and any necessary parameters. Certain parameters will automatically be derived from the selected configuration. For specific driver options based on camera type, refer to the `romea_camera_bringup` package documentation. Note that topic remappings for this node are predefined and cannot be modified. By default, the driver publishes on fixed topics such as `camera_info` and `image_raw` for each camera sensor. Additionally, the driver publishes a colorized point cloud topic `/rgbd_camera/point_cloud/points` generated from RGB and depth images. These topics are also used by the simulator. Finally, you can choose whether these topics should be recorded in the ROS bag during the demo. 195 | 196 | ```yaml 197 | name: "rgbd_camera" 198 | driver: 199 | package: realsense2_camera # driver ros2 package 200 | executable: realsense2_camera_node # node to be launch 201 | parameters: # parameter of driver node 202 | enable_color: true 203 | spatial_filter.enable: true 204 | temporal_filter.enable: true 205 | configuration: 206 | type: realsense # type of camera 207 | model: d435 # model of camera 208 | rgb_camera: 209 | resolution: 1280x720 #resolution of image provided by RGB camera (optional) default is 1280x720 210 | infrared_camera: 211 | resolution: 1280x720: #resolution of image provided by infrared camera (optional) default is 1280x720 212 | depth_camera: 213 | resolution: 1280x720: #resolution of image provided by deapth camera (optional) default is 1280x720 214 | geometry: 215 | parent_link: "base_link" # name of parent link where is located the camera sensor 216 | xyz: [1.42, 0.0, 1.14] # its position in meters 217 | rpy: [0.0, 20.0, 0.0] # its orienation in degrees 218 | records: 219 | rgb/camera_info: false # RGB camera info topic will not be recorded in bag 220 | rgb/image_raw: true # RGB raw image will be recorded in bag 221 | depth/camera_info: false # depth camera info topic will not be recorded in bag 222 | depth/image_raw: true # depth raw_image topic will be recorded in bag 223 | point_cloud/points: true # point cloud topic will be recorded in bag 224 | 225 | ``` 226 | 227 | 228 | 229 | -------------------------------------------------------------------------------- /demos/hackathon/config/evaluation.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz_common/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /evaluation1 10 | Splitter Ratio: 0.49872124195098877 11 | Tree Height: 732 12 | - Class: rviz_common/Selection 13 | Name: Selection 14 | - Class: rviz_common/Tool Properties 15 | Expanded: 16 | - /2D Goal Pose1 17 | - /Publish Point1 18 | Name: Tool Properties 19 | Splitter Ratio: 0.5886790156364441 20 | - Class: rviz_common/Views 21 | Expanded: 22 | - /Current View1 23 | - /robot orbit1/Focal Point1 24 | Name: Views 25 | Splitter Ratio: 0.5 26 | - Class: rviz_common/Time 27 | Experimental: false 28 | Name: Time 29 | SyncMode: 0 30 | SyncSource: lidar 31 | Visualization Manager: 32 | Class: "" 33 | Displays: 34 | - Alpha: 0.10000000149011612 35 | Cell Size: 1 36 | Class: rviz_default_plugins/Grid 37 | Color: 160; 160; 164 38 | Enabled: true 39 | Line Style: 40 | Line Width: 0.029999999329447746 41 | Value: Lines 42 | Name: Grid 43 | Normal Cell Count: 0 44 | Offset: 45 | X: 0 46 | Y: 0 47 | Z: 0 48 | Plane: XY 49 | Plane Cell Count: 1000 50 | Reference Frame: 51 | Value: true 52 | - Class: rviz_default_plugins/TF 53 | Enabled: true 54 | Frame Timeout: 15 55 | Frames: 56 | All Enabled: false 57 | map: 58 | Value: true 59 | robot_base_footprint: 60 | Value: true 61 | robot_base_link: 62 | Value: true 63 | robot_front_left_wheel_spinning_link: 64 | Value: true 65 | robot_front_left_wheel_steering_link: 66 | Value: true 67 | robot_front_right_wheel_spinning_link: 68 | Value: true 69 | robot_front_right_wheel_steering_link: 70 | Value: true 71 | robot_gps_link: 72 | Value: true 73 | robot_implement_hitch_link: 74 | Value: true 75 | robot_implement_link: 76 | Value: true 77 | robot_imu_link: 78 | Value: true 79 | robot_inertial_link: 80 | Value: true 81 | robot_lidar_link: 82 | Value: true 83 | robot_rear_left_wheel_spinning_link: 84 | Value: true 85 | robot_rear_left_wheel_steering_link: 86 | Value: true 87 | robot_rear_right_wheel_spinning_link: 88 | Value: true 89 | robot_rear_right_wheel_steering_link: 90 | Value: true 91 | robot_rgbd_camera_left_infrared_camera_link: 92 | Value: false 93 | robot_rgbd_camera_left_infrared_camera_optical_frame: 94 | Value: false 95 | robot_rgbd_camera_link: 96 | Value: true 97 | robot_rgbd_camera_rgb_camera_link: 98 | Value: false 99 | robot_rgbd_camera_rgb_camera_optical_frame: 100 | Value: false 101 | robot_rgbd_camera_right_infrared_camera_link: 102 | Value: false 103 | robot_rgbd_camera_right_infrared_camera_optical_frame: 104 | Value: false 105 | robot_robot_view_link: 106 | Value: true 107 | robot_robot_view_optical_frame: 108 | Value: false 109 | Marker Scale: 1 110 | Name: TF 111 | Show Arrows: false 112 | Show Axes: true 113 | Show Names: false 114 | Tree: 115 | map: 116 | {} 117 | robot_base_footprint: 118 | robot_base_link: 119 | robot_front_left_wheel_steering_link: 120 | robot_front_left_wheel_spinning_link: 121 | {} 122 | robot_front_right_wheel_steering_link: 123 | robot_front_right_wheel_spinning_link: 124 | {} 125 | robot_gps_link: 126 | {} 127 | robot_implement_link: 128 | robot_implement_hitch_link: 129 | {} 130 | robot_imu_link: 131 | {} 132 | robot_inertial_link: 133 | {} 134 | robot_lidar_link: 135 | {} 136 | robot_rear_left_wheel_steering_link: 137 | robot_rear_left_wheel_spinning_link: 138 | {} 139 | robot_rear_right_wheel_steering_link: 140 | robot_rear_right_wheel_spinning_link: 141 | {} 142 | robot_rgbd_camera_link: 143 | robot_rgbd_camera_left_infrared_camera_link: 144 | robot_rgbd_camera_left_infrared_camera_optical_frame: 145 | {} 146 | robot_rgbd_camera_rgb_camera_link: 147 | robot_rgbd_camera_rgb_camera_optical_frame: 148 | {} 149 | robot_rgbd_camera_right_infrared_camera_link: 150 | robot_rgbd_camera_right_infrared_camera_optical_frame: 151 | {} 152 | robot_robot_view_link: 153 | robot_robot_view_optical_frame: 154 | {} 155 | Update Interval: 0 156 | Value: true 157 | - Alpha: 0.4000000059604645 158 | Class: rviz_default_plugins/RobotModel 159 | Collision Enabled: false 160 | Description File: "" 161 | Description Source: Topic 162 | Description Topic: 163 | Depth: 5 164 | Durability Policy: Volatile 165 | History Policy: Keep Last 166 | Reliability Policy: Reliable 167 | Value: /robot/robot_description 168 | Enabled: true 169 | Links: 170 | All Links Enabled: true 171 | Expand Joint Details: false 172 | Expand Link Details: false 173 | Expand Tree: false 174 | Link Tree Style: Links in Alphabetic Order 175 | robot_base_footprint: 176 | Alpha: 1 177 | Show Axes: false 178 | Show Trail: false 179 | robot_base_link: 180 | Alpha: 1 181 | Show Axes: false 182 | Show Trail: false 183 | Value: true 184 | robot_front_left_wheel_spinning_link: 185 | Alpha: 1 186 | Show Axes: false 187 | Show Trail: false 188 | Value: true 189 | robot_front_left_wheel_steering_link: 190 | Alpha: 1 191 | Show Axes: false 192 | Show Trail: false 193 | Value: true 194 | robot_front_right_wheel_spinning_link: 195 | Alpha: 1 196 | Show Axes: false 197 | Show Trail: false 198 | Value: true 199 | robot_front_right_wheel_steering_link: 200 | Alpha: 1 201 | Show Axes: false 202 | Show Trail: false 203 | Value: true 204 | robot_gps_link: 205 | Alpha: 1 206 | Show Axes: false 207 | Show Trail: false 208 | Value: true 209 | robot_implement_hitch_link: 210 | Alpha: 1 211 | Show Axes: false 212 | Show Trail: false 213 | Value: true 214 | robot_implement_link: 215 | Alpha: 1 216 | Show Axes: false 217 | Show Trail: false 218 | robot_imu_link: 219 | Alpha: 1 220 | Show Axes: false 221 | Show Trail: false 222 | Value: true 223 | robot_inertial_link: 224 | Alpha: 1 225 | Show Axes: false 226 | Show Trail: false 227 | robot_lidar_link: 228 | Alpha: 1 229 | Show Axes: false 230 | Show Trail: false 231 | Value: true 232 | robot_rear_left_wheel_spinning_link: 233 | Alpha: 1 234 | Show Axes: false 235 | Show Trail: false 236 | Value: true 237 | robot_rear_left_wheel_steering_link: 238 | Alpha: 1 239 | Show Axes: false 240 | Show Trail: false 241 | Value: true 242 | robot_rear_right_wheel_spinning_link: 243 | Alpha: 1 244 | Show Axes: false 245 | Show Trail: false 246 | Value: true 247 | robot_rear_right_wheel_steering_link: 248 | Alpha: 1 249 | Show Axes: false 250 | Show Trail: false 251 | Value: true 252 | robot_rgbd_camera_left_infrared_camera_link: 253 | Alpha: 1 254 | Show Axes: false 255 | Show Trail: false 256 | robot_rgbd_camera_left_infrared_camera_optical_frame: 257 | Alpha: 1 258 | Show Axes: false 259 | Show Trail: false 260 | robot_rgbd_camera_link: 261 | Alpha: 1 262 | Show Axes: false 263 | Show Trail: false 264 | Value: true 265 | robot_rgbd_camera_rgb_camera_link: 266 | Alpha: 1 267 | Show Axes: false 268 | Show Trail: false 269 | robot_rgbd_camera_rgb_camera_optical_frame: 270 | Alpha: 1 271 | Show Axes: false 272 | Show Trail: false 273 | robot_rgbd_camera_right_infrared_camera_link: 274 | Alpha: 1 275 | Show Axes: false 276 | Show Trail: false 277 | robot_rgbd_camera_right_infrared_camera_optical_frame: 278 | Alpha: 1 279 | Show Axes: false 280 | Show Trail: false 281 | robot_robot_view_link: 282 | Alpha: 1 283 | Show Axes: false 284 | Show Trail: false 285 | Value: true 286 | robot_robot_view_optical_frame: 287 | Alpha: 1 288 | Show Axes: false 289 | Show Trail: false 290 | Mass Properties: 291 | Inertia: false 292 | Mass: false 293 | Name: RobotModel 294 | TF Prefix: "" 295 | Update Interval: 0 296 | Value: true 297 | Visual Enabled: true 298 | - Class: rviz_common/Group 299 | Displays: 300 | - Class: rviz_default_plugins/MarkerArray 301 | Enabled: true 302 | Name: crops 303 | Namespaces: 304 | crops: true 305 | crushed_crops: true 306 | Topic: 307 | Depth: 5 308 | Durability Policy: Volatile 309 | History Policy: Keep Last 310 | Reliability Policy: Reliable 311 | Value: /evaluation/evaluation/crops_marker 312 | Value: true 313 | - Class: rviz_default_plugins/MarkerArray 314 | Enabled: true 315 | Name: path 316 | Namespaces: 317 | curve: true 318 | path: true 319 | Topic: 320 | Depth: 1 321 | Durability Policy: Volatile 322 | History Policy: Keep Last 323 | Reliability Policy: Reliable 324 | Value: /robot/path_matching/markers 325 | Value: true 326 | - Class: rviz_default_plugins/MarkerArray 327 | Enabled: true 328 | Name: geofence shape 329 | Namespaces: 330 | geofence_markers: true 331 | Topic: 332 | Depth: 5 333 | Durability Policy: Volatile 334 | History Policy: Keep Last 335 | Reliability Policy: Reliable 336 | Value: /evaluation/evaluation/geofence_markers 337 | Value: true 338 | - Align Bottom: false 339 | Background Alpha: 0.800000011920929 340 | Background Color: 0; 0; 0 341 | Class: rviz_2d_overlay_plugins/TextOverlay 342 | Enabled: false 343 | Foreground Alpha: 0.800000011920929 344 | Foreground Color: 25; 255; 240 345 | Invert Shadow: false 346 | Name: info overlay 347 | Overtake BG Color Properties: false 348 | Overtake FG Color Properties: false 349 | Overtake Position Properties: false 350 | Topic: 351 | Depth: 5 352 | Durability Policy: Volatile 353 | History Policy: Keep Last 354 | Reliability Policy: Reliable 355 | Value: /evaluation/evaluation/info_overlay 356 | Value: false 357 | font: DejaVu Sans Mono 358 | height: 128 359 | hor_alignment: left 360 | hor_dist: 0 361 | line width: 2 362 | text size: 12 363 | ver_alignment: top 364 | ver_dist: 0 365 | width: 128 366 | - Class: rviz_2d_overlay_plugins/PieChartOverlay 367 | Enabled: true 368 | Name: p1 cover. 369 | Topic: 370 | Depth: 5 371 | Durability Policy: Volatile 372 | History Policy: Keep Last 373 | Reliability Policy: Reliable 374 | Value: /evaluation/mixed_field/coverage 375 | Value: true 376 | auto color change: true 377 | background color: 0; 0; 0 378 | backround alpha: 0 379 | clockwise rotate direction: true 380 | foreground alpha: 0.699999988079071 381 | foreground alpha 2: 0.4000000059604645 382 | foreground color: 226; 60; 19 383 | left: 10 384 | max color: 53; 222; 15 385 | max color change threthold: 0.800000011920929 386 | max value: 100 387 | med color: 220; 191; 28 388 | med color change threthold: 0.4000000059604645 389 | min value: 0 390 | show caption: true 391 | size: 100 392 | text size: 12 393 | top: 10 394 | - Class: rviz_2d_overlay_plugins/PieChartOverlay 395 | Enabled: true 396 | Name: p1 crush. 397 | Topic: 398 | Depth: 5 399 | Durability Policy: Volatile 400 | History Policy: Keep Last 401 | Reliability Policy: Reliable 402 | Value: /evaluation/mixed_field/crushed 403 | Value: true 404 | auto color change: true 405 | background color: 0; 0; 0 406 | backround alpha: 0 407 | clockwise rotate direction: true 408 | foreground alpha: 0.699999988079071 409 | foreground alpha 2: 0.4000000059604645 410 | foreground color: 53; 222; 15 411 | left: 120 412 | max color: 226; 60; 19 413 | max color change threthold: 0.05000000074505806 414 | max value: 100 415 | med color: 220; 191; 28 416 | med color change threthold: 0.029999999329447746 417 | min value: 0 418 | show caption: true 419 | size: 100 420 | text size: 12 421 | top: 10 422 | - Class: rviz_2d_overlay_plugins/PieChartOverlay 423 | Enabled: true 424 | Name: p2 cover. 425 | Topic: 426 | Depth: 5 427 | Durability Policy: Volatile 428 | History Policy: Keep Last 429 | Reliability Policy: Reliable 430 | Value: /evaluation/sloping_field/coverage 431 | Value: true 432 | auto color change: true 433 | background color: 0; 0; 0 434 | backround alpha: 0 435 | clockwise rotate direction: true 436 | foreground alpha: 0.699999988079071 437 | foreground alpha 2: 0.4000000059604645 438 | foreground color: 226; 60; 19 439 | left: 10 440 | max color: 53; 222; 15 441 | max color change threthold: 0.800000011920929 442 | max value: 100 443 | med color: 220; 191; 28 444 | med color change threthold: 0.4000000059604645 445 | min value: 0 446 | show caption: true 447 | size: 100 448 | text size: 12 449 | top: 140 450 | - Class: rviz_2d_overlay_plugins/PieChartOverlay 451 | Enabled: true 452 | Name: p2 crush. 453 | Topic: 454 | Depth: 5 455 | Durability Policy: Volatile 456 | History Policy: Keep Last 457 | Reliability Policy: Reliable 458 | Value: /evaluation/sloping_field/crushed 459 | Value: true 460 | auto color change: true 461 | background color: 0; 0; 0 462 | backround alpha: 0 463 | clockwise rotate direction: true 464 | foreground alpha: 0.699999988079071 465 | foreground alpha 2: 0.4000000059604645 466 | foreground color: 53; 222; 15 467 | left: 120 468 | max color: 226; 60; 19 469 | max color change threthold: 0.05000000074505806 470 | max value: 100 471 | med color: 220; 191; 28 472 | med color change threthold: 0.029999999329447746 473 | min value: 0 474 | show caption: true 475 | size: 100 476 | text size: 12 477 | top: 140 478 | - Class: rviz_2d_overlay_plugins/PieChartOverlay 479 | Enabled: true 480 | Name: collisions 481 | Topic: 482 | Depth: 5 483 | Durability Policy: Volatile 484 | History Policy: Keep Last 485 | Reliability Policy: Reliable 486 | Value: /evaluation/evaluation/collisions_count 487 | Value: true 488 | auto color change: false 489 | background color: 0; 0; 0 490 | backround alpha: 0 491 | clockwise rotate direction: false 492 | foreground alpha: 0.699999988079071 493 | foreground alpha 2: 0.4000000059604645 494 | foreground color: 190; 75; 239 495 | left: 260 496 | max color: 255; 0; 0 497 | max color change threthold: 0 498 | max value: 1000 499 | med color: 255; 0; 0 500 | med color change threthold: 0 501 | min value: 0 502 | show caption: true 503 | size: 100 504 | text size: 12 505 | top: 10 506 | - Class: rviz_2d_overlay_plugins/PieChartOverlay 507 | Enabled: true 508 | Name: impl. down 509 | Topic: 510 | Depth: 5 511 | Durability Policy: Volatile 512 | History Policy: Keep Last 513 | Reliability Policy: Reliable 514 | Value: /evaluation/field_coverage/implement_down_outside 515 | Value: true 516 | auto color change: false 517 | background color: 0; 0; 0 518 | backround alpha: 0 519 | clockwise rotate direction: false 520 | foreground alpha: 0.699999988079071 521 | foreground alpha 2: 0.4000000059604645 522 | foreground color: 190; 75; 239 523 | left: 370 524 | max color: 255; 0; 0 525 | max color change threthold: 0 526 | max value: 1 527 | med color: 255; 0; 0 528 | med color change threthold: 0 529 | min value: 0 530 | show caption: true 531 | size: 100 532 | text size: 12 533 | top: 10 534 | - Class: rviz_2d_overlay_plugins/PieChartOverlay 535 | Enabled: true 536 | Name: geofencing 537 | Topic: 538 | Depth: 5 539 | Durability Policy: Volatile 540 | History Policy: Keep Last 541 | Reliability Policy: Reliable 542 | Value: /evaluation/evaluation/safe_zone_exits_count 543 | Value: true 544 | auto color change: false 545 | background color: 0; 0; 0 546 | backround alpha: 0 547 | clockwise rotate direction: false 548 | foreground alpha: 0.699999988079071 549 | foreground alpha 2: 0.4000000059604645 550 | foreground color: 190; 75; 239 551 | left: 480 552 | max color: 255; 0; 0 553 | max color change threthold: 0 554 | max value: 1 555 | med color: 255; 0; 0 556 | med color change threthold: 0 557 | min value: 0 558 | show caption: true 559 | size: 100 560 | text size: 12 561 | top: 10 562 | Enabled: true 563 | Name: evaluation 564 | - Class: rviz_common/Group 565 | Displays: 566 | - Alpha: 0.20000000298023224 567 | Autocompute Intensity Bounds: true 568 | Autocompute Value Bounds: 569 | Max Value: 10 570 | Min Value: -10 571 | Value: true 572 | Axis: Z 573 | Channel Name: intensity 574 | Class: rviz_default_plugins/PointCloud2 575 | Color: 63; 63; 63 576 | Color Transformer: FlatColor 577 | Decay Time: 0 578 | Enabled: true 579 | Invert Rainbow: false 580 | Max Color: 255; 255; 255 581 | Max Intensity: 1 582 | Min Color: 0; 0; 0 583 | Min Intensity: 0 584 | Name: lidar 585 | Position Transformer: XYZ 586 | Selectable: true 587 | Size (Pixels): 3 588 | Size (m): 0.009999999776482582 589 | Style: Points 590 | Topic: 591 | Depth: 5 592 | Durability Policy: Volatile 593 | Filter size: 10 594 | History Policy: Keep Last 595 | Reliability Policy: Reliable 596 | Value: /robot/lidar/points 597 | Use Fixed Frame: true 598 | Use rainbow: true 599 | Value: true 600 | - Class: rviz_default_plugins/Image 601 | Enabled: false 602 | Max Value: 1 603 | Median window: 5 604 | Min Value: 0 605 | Name: D435 depth Image 606 | Normalize Range: true 607 | Topic: 608 | Depth: 5 609 | Durability Policy: Volatile 610 | History Policy: Keep Last 611 | Reliability Policy: Reliable 612 | Value: /robot/rgbd_camera/depth/image_raw 613 | Value: false 614 | - Class: rviz_default_plugins/Image 615 | Enabled: false 616 | Max Value: 1 617 | Median window: 5 618 | Min Value: 0 619 | Name: D435 RGB image 620 | Normalize Range: true 621 | Topic: 622 | Depth: 5 623 | Durability Policy: Volatile 624 | History Policy: Keep Last 625 | Reliability Policy: Reliable 626 | Value: /robot/rgbd_camera/rgb/image_raw 627 | Value: false 628 | Enabled: true 629 | Name: sensors 630 | Enabled: true 631 | Global Options: 632 | Background Color: 255; 255; 255 633 | Fixed Frame: robot_base_footprint 634 | Frame Rate: 30 635 | Name: root 636 | Tools: 637 | - Class: rviz_default_plugins/Interact 638 | Hide Inactive Objects: true 639 | - Class: rviz_default_plugins/MoveCamera 640 | - Class: rviz_default_plugins/Select 641 | - Class: rviz_default_plugins/FocusCamera 642 | - Class: rviz_default_plugins/Measure 643 | Line color: 128; 128; 0 644 | - Class: rviz_default_plugins/SetInitialPose 645 | Covariance x: 0.25 646 | Covariance y: 0.25 647 | Covariance yaw: 0.06853891909122467 648 | Topic: 649 | Depth: 5 650 | Durability Policy: Volatile 651 | History Policy: Keep Last 652 | Reliability Policy: Reliable 653 | Value: /initialpose 654 | - Class: rviz_default_plugins/SetGoal 655 | Topic: 656 | Depth: 5 657 | Durability Policy: Volatile 658 | History Policy: Keep Last 659 | Reliability Policy: Reliable 660 | Value: /goal_pose 661 | - Class: rviz_default_plugins/PublishPoint 662 | Single click: true 663 | Topic: 664 | Depth: 5 665 | Durability Policy: Volatile 666 | History Policy: Keep Last 667 | Reliability Policy: Reliable 668 | Value: /clicked_point 669 | Transformation: 670 | Current: 671 | Class: rviz_default_plugins/TF 672 | Value: true 673 | Views: 674 | Current: 675 | Class: rviz_default_plugins/ThirdPersonFollower 676 | Distance: 15.755678176879883 677 | Enable Stereo Rendering: 678 | Stereo Eye Separation: 0.05999999865889549 679 | Stereo Focal Distance: 1 680 | Swap Stereo Eyes: false 681 | Value: false 682 | Focal Point: 683 | X: -0.6902098655700684 684 | Y: 0.1107020378112793 685 | Z: -1.0967254638671875e-05 686 | Focal Shape Fixed Size: true 687 | Focal Shape Size: 0.05000000074505806 688 | Invert Z Axis: false 689 | Name: Current View 690 | Near Clip Distance: 0.009999999776482582 691 | Pitch: 1.5697963237762451 692 | Target Frame: robot_base_footprint 693 | Value: ThirdPersonFollower (rviz_default_plugins) 694 | Yaw: 3.1423871517181396 695 | Saved: 696 | - Class: rviz_default_plugins/Orbit 697 | Distance: 16.18014144897461 698 | Enable Stereo Rendering: 699 | Stereo Eye Separation: 0.05999999865889549 700 | Stereo Focal Distance: 1 701 | Swap Stereo Eyes: false 702 | Value: false 703 | Focal Point: 704 | X: 0 705 | Y: 0 706 | Z: 0 707 | Focal Shape Fixed Size: true 708 | Focal Shape Size: 0.05000000074505806 709 | Invert Z Axis: false 710 | Name: robot orbit 711 | Near Clip Distance: 0.009999999776482582 712 | Pitch: 0.8297972083091736 713 | Target Frame: robot_base_footprint 714 | Value: Orbit (rviz_default_plugins) 715 | Yaw: 0.0892154648900032 716 | - Angle: 0 717 | Class: rviz_default_plugins/TopDownOrtho 718 | Enable Stereo Rendering: 719 | Stereo Eye Separation: 0.05999999865889549 720 | Stereo Focal Distance: 1 721 | Swap Stereo Eyes: false 722 | Value: false 723 | Invert Z Axis: false 724 | Name: robot topdown 725 | Near Clip Distance: 0.009999999776482582 726 | Scale: 72.92960357666016 727 | Target Frame: robot_base_footprint 728 | Value: TopDownOrtho (rviz_default_plugins) 729 | X: 0 730 | Y: 0 731 | - Class: rviz_default_plugins/ThirdPersonFollower 732 | Distance: 7.127068519592285 733 | Enable Stereo Rendering: 734 | Stereo Eye Separation: 0.05999999865889549 735 | Stereo Focal Distance: 1 736 | Swap Stereo Eyes: false 737 | Value: false 738 | Focal Point: 739 | X: -0.6902098655700684 740 | Y: 0.1107020378112793 741 | Z: -1.0967254638671875e-05 742 | Focal Shape Fixed Size: true 743 | Focal Shape Size: 0.05000000074505806 744 | Invert Z Axis: false 745 | Name: robot third person 746 | Near Clip Distance: 0.009999999776482582 747 | Pitch: 1.5697963237762451 748 | Target Frame: robot_base_footprint 749 | Value: ThirdPersonFollower (rviz_default_plugins) 750 | Yaw: 3.1423871517181396 751 | Window Geometry: 752 | D435 RGB image: 753 | collapsed: false 754 | D435 depth Image: 755 | collapsed: false 756 | Displays: 757 | collapsed: true 758 | Height: 1023 759 | Hide Left Dock: true 760 | Hide Right Dock: true 761 | QMainWindow State: 000000ff00000000fd00000004000000000000018900000365fc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073000000003b00000365000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb00000020004400340033003500200064006500700074006800200049006d00610067006500000001a0000001010000002800fffffffb0000001c0044003400330035002000520047004200200069006d006100670065000000023a000001660000002800ffffff000000010000017f00000365fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000365000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000003b60000003efc0100000002fb0000000800540069006d00650100000000000003b60000025300fffffffb0000000800540069006d00650100000000000004500000000000000000000003b60000036500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 762 | Selection: 763 | collapsed: false 764 | Time: 765 | collapsed: false 766 | Tool Properties: 767 | collapsed: false 768 | Views: 769 | collapsed: true 770 | Width: 950 771 | X: 0 772 | Y: 23 773 | --------------------------------------------------------------------------------