├── .gitignore ├── realsense_camera_msgs ├── msg │ ├── Extrinsics.msg │ └── IMUInfo.msg ├── CHANGELOG.rst ├── CMakeLists.txt └── package.xml ├── realsense_ros2_camera ├── rviz │ ├── ros2_rviz.png │ └── ros2.rviz ├── launch │ ├── rs.launch.py │ ├── ros2_intel_realsense.launch.py │ ├── realsense2_to_laserscan.py │ └── default.rviz ├── package.xml ├── include │ └── realsense_ros2_camera │ │ └── constants.hpp ├── CMakeLists.txt ├── CHANGELOG.rst ├── test │ └── test_api.cpp └── src │ └── realsense_camera_node.cpp ├── CHANGELOG.rst ├── README.md └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | *.bin 2 | *.ppm 3 | *.bag 4 | -------------------------------------------------------------------------------- /realsense_camera_msgs/msg/Extrinsics.msg: -------------------------------------------------------------------------------- 1 | std_msgs/Header header 2 | float64[9] rotation 3 | float64[3] translation 4 | -------------------------------------------------------------------------------- /realsense_ros2_camera/rviz/ros2_rviz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intel/ros2_intel_realsense/HEAD/realsense_ros2_camera/rviz/ros2_rviz.png -------------------------------------------------------------------------------- /realsense_camera_msgs/msg/IMUInfo.msg: -------------------------------------------------------------------------------- 1 | # header.frame_id is either set to "imu_accel" or "imu_gyro" 2 | # to distinguish between "accel" and "gyro" info. 3 | std_msgs/Header header 4 | float64[12] data 5 | float64[3] noise_variances 6 | float64[3] bias_variances 7 | -------------------------------------------------------------------------------- /realsense_camera_msgs/CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 2 | Changelog for package realsense_camera_msgs 3 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 4 | 5 | 2.0.3 (2018-12-11) 6 | ------------------ 7 | 8 | 2.0.4 (2019-05-30) 9 | ------------------ 10 | * update maintainer 11 | * 2.0.3 12 | * update changelog 13 | * Contributors: Chris Ye 14 | 15 | 2.0.2 (2018-12-07) 16 | ------------------ 17 | -------------------------------------------------------------------------------- /realsense_camera_msgs/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.5) 2 | 3 | project(realsense_camera_msgs) 4 | 5 | if(NOT CMAKE_CXX_STANDARD) 6 | set(CMAKE_CXX_STANDARD 14) 7 | endif() 8 | 9 | if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") 10 | add_compile_options(-Wall -Wextra -Wpedantic) 11 | endif() 12 | 13 | find_package(ament_cmake REQUIRED) 14 | find_package(rosidl_default_generators REQUIRED) 15 | find_package(builtin_interfaces REQUIRED) 16 | find_package(std_msgs REQUIRED) 17 | 18 | set(msg_files 19 | "msg/IMUInfo.msg" 20 | "msg/Extrinsics.msg" 21 | ) 22 | rosidl_generate_interfaces(${PROJECT_NAME} 23 | ${msg_files} 24 | DEPENDENCIES builtin_interfaces std_msgs 25 | ADD_LINTER_TESTS 26 | ) 27 | 28 | ament_export_dependencies(rosidl_default_runtime) 29 | 30 | ament_package() 31 | -------------------------------------------------------------------------------- /realsense_ros2_camera/launch/rs.launch.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Intel Corporation 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Launch realsense_ros2_camera node without rviz2.""" 16 | 17 | from launch import LaunchDescription 18 | import launch_ros.actions 19 | 20 | 21 | def generate_launch_description(): 22 | return LaunchDescription([ 23 | # Realsense 24 | launch_ros.actions.Node( 25 | package='realsense_ros2_camera', node_executable='realsense_ros2_camera', 26 | output='screen'), 27 | ]) 28 | -------------------------------------------------------------------------------- /realsense_camera_msgs/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | realsense_camera_msgs 5 | 2.0.4 6 | A package containing realsense camera messages definitions. 7 | Sharron LIU 8 | Chris Ye 9 | Apache License 2.0 10 | 11 | ament_cmake 12 | rosidl_default_generators 13 | builtin_interfaces 14 | std_msgs 15 | 16 | rosidl_default_runtime 17 | builtin_interfaces 18 | std_msgs 19 | 20 | ament_lint_common 21 | 22 | rosidl_interface_packages 23 | 24 | 25 | ament_cmake 26 | 27 | 28 | -------------------------------------------------------------------------------- /realsense_ros2_camera/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | realsense_ros2_camera 5 | 2.0.4 6 | The realsense_ros2_camera package 7 | Sharron LIU 8 | Chris Ye 9 | Apache License 2.0 10 | 11 | ament_cmake 12 | 13 | eigen 14 | 15 | builtin_interfaces 16 | cv_bridge 17 | image_transport 18 | librealsense2 19 | rclcpp 20 | realsense_camera_msgs 21 | sensor_msgs 22 | std_msgs 23 | tf2 24 | tf2_ros 25 | 26 | ament_cmake_gtest 27 | ament_lint_auto 28 | ament_lint_common 29 | libopencv-dev 30 | 31 | 32 | ament_cmake 33 | 34 | 35 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 2 | Changelog for ros2_intel_realsense 3 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 4 | 5 | 2.0.3 (2018-12-11) 6 | ------------------ 7 | * fix build dependence for ros2 crystal release 8 | * Add eigen as a build dependency. 9 | 10 | 2.0.2 (2018-12-07) 11 | ------------------ 12 | * create for bloom release 13 | 14 | 15 | 2.0.1 (2018-05-31) 16 | -------------------------------------------------------------------------------- 17 | Sharron LIU (12): 18 | ros2_intel_realsense: added initial version 19 | README: updated for repo name "ros2_intel_realsense" 20 | Merge pull request #1 from dongx1x/master 21 | camera_msgs: added test dependent ament_lint_common 22 | realsense_ros2_camera: added test dependency ament_lint_common 23 | realsense_ros2_camera: copyright notes in ROS2 style 24 | ament test: pass ament test 25 | Merge pull request #3 from sharronliu/master 26 | cv_bridge: use the upstream ros2 cv_bridge 27 | Merge pull request #4 from sharronliu/master 28 | realsense_ros2_camera: added test dependent ros package 29 | README: clarified host OS supported 30 | 31 | Xiaocheng Dong (1): 32 | Add tests 33 | 34 | dongx1x (1): 35 | Update README.md 36 | 37 | -------------------------------------------------------------------------------- /realsense_ros2_camera/launch/ros2_intel_realsense.launch.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Intel Corporation 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Launch realsense_ros2_camera node and rviz.""" 16 | 17 | import os 18 | 19 | from ament_index_python.packages import get_package_share_directory 20 | from launch import LaunchDescription 21 | import launch_ros.actions 22 | 23 | 24 | def generate_launch_description(): 25 | default_rviz = os.path.join(get_package_share_directory('realsense_ros2_camera'), 26 | 'launch', 'default.rviz') 27 | return LaunchDescription([ 28 | # Realsense 29 | launch_ros.actions.Node( 30 | package='realsense_ros2_camera', node_executable='realsense_ros2_camera', 31 | output='screen'), 32 | 33 | # Rviz 34 | launch_ros.actions.Node( 35 | package='rviz2', node_executable='rviz2', output='screen', 36 | arguments=['--display-config', default_rviz]), 37 | ]) 38 | -------------------------------------------------------------------------------- /realsense_ros2_camera/launch/realsense2_to_laserscan.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Intel Corporation 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Launch face detection and rviz.""" 16 | 17 | from launch import LaunchDescription 18 | import launch.actions 19 | import launch_ros.actions 20 | 21 | 22 | def generate_launch_description(): 23 | enable_align_depth = launch.substitutions.LaunchConfiguration('enable_aligned_depth', 24 | default='false') 25 | output_frame = launch.substitutions.LaunchConfiguration('output_frame', default='base_scan') 26 | range_max = launch.substitutions.LaunchConfiguration('range_max', default='2.0') 27 | range_min = launch.substitutions.LaunchConfiguration('range_min', default='0.2') 28 | return LaunchDescription([ 29 | # Realsense 30 | launch_ros.actions.Node( 31 | package='realsense_ros2_camera', 32 | node_executable='realsense_ros2_camera', 33 | node_name='realsense_ros2_camera', 34 | parameters=[{'enable_aligned_depth': enable_align_depth}], 35 | output='screen'), 36 | launch_ros.actions.Node( 37 | package='depthimage_to_laserscan', 38 | node_executable='depthimage_to_laserscan_node', 39 | node_name='depthimage_to_laserscan_node', 40 | output='screen', 41 | parameters=[{'output_frame': output_frame}, 42 | {'range_min': range_min}, 43 | {'range_max': range_max}], 44 | arguments=['depth:=/camera/depth/image_rect_raw', 45 | 'depth_camera_info:=/camera/depth/camera_info', 46 | 'scan:=/scan']), 47 | ]) 48 | -------------------------------------------------------------------------------- /realsense_ros2_camera/include/realsense_ros2_camera/constants.hpp: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2018 Intel Corporation. All Rights Reserved 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #include 16 | 17 | #pragma once 18 | #ifndef REALSENSE_ROS2_CAMERA__CONSTANTS_HPP_ 19 | #define REALSENSE_ROS2_CAMERA__CONSTANTS_HPP_ 20 | 21 | #define REALSENSE_ROS_MAJOR_VERSION 2 22 | #define REALSENSE_ROS_MINOR_VERSION 0 23 | #define REALSENSE_ROS_PATCH_VERSION 1 24 | 25 | #define STRINGIFY(arg) #arg 26 | #define VAR_ARG_STRING(arg) STRINGIFY(arg) 27 | /* Return version in "X.Y.Z" format */ 28 | #define REALSENSE_ROS_VERSION_STR (VAR_ARG_STRING(REALSENSE_ROS_MAJOR_VERSION. \ 29 | REALSENSE_ROS_MINOR_VERSION.REALSENSE_ROS_PATCH_VERSION)) 30 | 31 | namespace realsense_ros2_camera 32 | { 33 | const bool POINTCLOUD = false; 34 | const bool ALIGN_POINTCLOUD = true; 35 | const bool SYNC_FRAMES = true; 36 | 37 | const bool ALIGN_DEPTH = true; 38 | 39 | const int DEPTH_WIDTH = 640; 40 | const int DEPTH_HEIGHT = 480; 41 | 42 | const int INFRA1_WIDTH = 640; 43 | const int INFRA1_HEIGHT = 480; 44 | 45 | const int INFRA2_WIDTH = 640; 46 | const int INFRA2_HEIGHT = 480; 47 | 48 | const int COLOR_WIDTH = 640; 49 | const int COLOR_HEIGHT = 480; 50 | 51 | const int FISHEYE_WIDTH = 640; 52 | const int FISHEYE_HEIGHT = 480; 53 | 54 | 55 | const int DEPTH_FPS = 30; 56 | const int INFRA1_FPS = 30; 57 | const int INFRA2_FPS = 30; 58 | const int COLOR_FPS = 30; 59 | const int FISHEYE_FPS = 30; 60 | const int GYRO_FPS = 1000; 61 | const int ACCEL_FPS = 1000; 62 | 63 | 64 | const bool ENABLE_DEPTH = true; 65 | const bool ENABLE_INFRA1 = true; 66 | const bool ENABLE_INFRA2 = true; 67 | const bool ENABLE_COLOR = true; 68 | const bool ENABLE_FISHEYE = true; 69 | const bool ENABLE_IMU = true; 70 | 71 | 72 | const char DEFAULT_BASE_FRAME_ID[] = "camera_link"; 73 | const char DEFAULT_DEPTH_FRAME_ID[] = "camera_depth_frame"; 74 | const char DEFAULT_INFRA1_FRAME_ID[] = "camera_infra1_frame"; 75 | const char DEFAULT_INFRA2_FRAME_ID[] = "camera_infra2_frame"; 76 | const char DEFAULT_COLOR_FRAME_ID[] = "camera_color_frame"; 77 | const char DEFAULT_FISHEYE_FRAME_ID[] = "camera_fisheye_frame"; 78 | const char DEFAULT_IMU_FRAME_ID[] = "camera_imu_frame"; 79 | const char DEFAULT_DEPTH_OPTICAL_FRAME_ID[] = "camera_depth_optical_frame"; 80 | const char DEFAULT_INFRA1_OPTICAL_FRAME_ID[] = "camera_infra1_optical_frame"; 81 | const char DEFAULT_INFRA2_OPTICAL_FRAME_ID[] = "camera_infra2_optical_frame"; 82 | const char DEFAULT_COLOR_OPTICAL_FRAME_ID[] = "camera_color_optical_frame"; 83 | const char DEFAULT_FISHEYE_OPTICAL_FRAME_ID[] = "camera_fisheye_optical_frame"; 84 | const char DEFAULT_ACCEL_OPTICAL_FRAME_ID[] = "camera_accel_optical_frame"; 85 | const char DEFAULT_GYRO_OPTICAL_FRAME_ID[] = "camera_gyro_optical_frame"; 86 | const char DEFAULT_IMU_OPTICAL_FRAME_ID[] = "camera_imu_optical_frame"; 87 | } // namespace realsense_ros2_camera 88 | #endif // REALSENSE_ROS2_CAMERA__CONSTANTS_HPP_ 89 | -------------------------------------------------------------------------------- /realsense_ros2_camera/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.5) 2 | project(realsense_ros2_camera) 3 | 4 | # ROS2 Flags 5 | if(NOT CMAKE_CXX_STANDARD) 6 | set(CMAKE_CXX_STANDARD 14) 7 | endif() 8 | 9 | if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") 10 | add_compile_options(-Wall -Wextra -Wpedantic) 11 | endif() 12 | 13 | # Compiler Defense Flags 14 | if(UNIX OR APPLE) 15 | # Linker flags. 16 | if(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL "Intel") 17 | # GCC specific flags. ICC is compatible with them. 18 | set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -z noexecstack -z relro -z now") 19 | set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -z noexecstack -z relro -z now") 20 | elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang") 21 | # In Clang, -z flags are not compatible, they need to be passed to linker via -Wl. 22 | set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now") 23 | set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now") 24 | endif() 25 | 26 | # Compiler flags. 27 | if(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU") 28 | # GCC specific flags. 29 | if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.9 OR CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL 4.9) 30 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIE -fstack-protector-strong") 31 | else() 32 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIE -fstack-protector") 33 | endif() 34 | elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang") 35 | # Clang is compatbile with some of the flags. 36 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIE -fstack-protector") 37 | elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL "Intel") 38 | # Same as above, with exception that ICC compilation crashes with -fPIE option, even 39 | # though it uses -pie linker option that require -fPIE during compilation. Checksec 40 | # shows that it generates correct PIE anyway if only -pie is provided. 41 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstack-protector") 42 | endif() 43 | 44 | # Generic flags. 45 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -fno-operator-names -Wformat -Wformat-security -Wall") 46 | # Dot not forward c++ flag to GPU beucause it is not supported 47 | set( CUDA_PROPAGATE_HOST_FLAGS OFF ) 48 | set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -D_FORTIFY_SOURCE=2") 49 | set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -pie") 50 | endif() 51 | 52 | if(CMAKE_BUILD_TYPE EQUAL "RELEASE") 53 | message(STATUS "Create Release Build.") 54 | set(CMAKE_CXX_FLAGS "-O2 ${CMAKE_CXX_FLAGS}") 55 | else() 56 | message(STATUS "Create Debug Build.") 57 | endif() 58 | 59 | set(CMAKE_CXX_FLAGS "-fPIE -fPIC -D_FORTIFY_SOURCE=2 -fstack-protector -Wformat -Wformat-security -Wall ${CMAKE_CXX_FLAGS}") 60 | 61 | find_package(ament_cmake REQUIRED) 62 | find_package(builtin_interfaces REQUIRED) 63 | find_package(cv_bridge REQUIRED) 64 | find_package(image_transport REQUIRED) 65 | find_package(librealsense2 REQUIRED) 66 | find_package(rclcpp REQUIRED) 67 | find_package(realsense_camera_msgs REQUIRED) 68 | find_package(std_msgs REQUIRED) 69 | find_package(sensor_msgs REQUIRED) 70 | find_package(tf2_ros REQUIRED) 71 | find_package(tf2 REQUIRED) 72 | 73 | include_directories( 74 | include 75 | ) 76 | 77 | add_executable(${PROJECT_NAME} 78 | include/${PROJECT_NAME}/constants.hpp 79 | src/realsense_camera_node.cpp 80 | ) 81 | 82 | ament_target_dependencies(${PROJECT_NAME} 83 | cv_bridge 84 | image_transport 85 | librealsense2 86 | rclcpp 87 | realsense_camera_msgs 88 | std_msgs 89 | sensor_msgs 90 | tf2 91 | tf2_ros 92 | ) 93 | 94 | # Install binaries 95 | install(TARGETS ${PROJECT_NAME} 96 | RUNTIME DESTINATION bin 97 | ) 98 | 99 | install(TARGETS ${PROJECT_NAME} 100 | DESTINATION lib/${PROJECT_NAME} 101 | ) 102 | 103 | # Install header files 104 | install( 105 | DIRECTORY include/ 106 | DESTINATION include 107 | ) 108 | 109 | # Install launch files. 110 | install(DIRECTORY 111 | launch 112 | DESTINATION share/${PROJECT_NAME}/ 113 | ) 114 | 115 | if(BUILD_TESTING) 116 | find_package(ament_lint_auto REQUIRED) 117 | find_package(OpenCV REQUIRED) 118 | 119 | ament_lint_auto_find_test_dependencies() 120 | 121 | set(REALSENSE_DEVICE_PLUGIN FALSE) 122 | if(${REALSENSE_DEVICE_PLUGIN}) 123 | ament_add_gtest(test_api test/test_api.cpp) 124 | endif() 125 | if(TARGET test_api) 126 | target_include_directories(test_api PUBLIC 127 | ${${PROJECT_NAME}_INCLUDE_DIRS} 128 | ) 129 | ament_target_dependencies(test_api 130 | OpenCV 131 | rclcpp 132 | sensor_msgs 133 | tf2 134 | tf2_ros) 135 | endif() 136 | endif() 137 | 138 | ament_package() 139 | -------------------------------------------------------------------------------- /realsense_ros2_camera/CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 2 | Changelog for package realsense_ros2_camera 3 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 4 | 5 | 2.0.3 (2018-12-11) 6 | ------------------ 7 | * Merge pull request `#18 `_ from nuclearsandwich/add-dependencies-to-package.xml 8 | Add eigen as a build dependency for ros2debian 9 | * Add eigen as a build dependency. 10 | * Contributors: Chris Ye, Steven! Ragnarök 11 | 12 | 2.0.4 (2019-05-30) 13 | ------------------ 14 | * Merge pull request `#50 `_ from ahuizxc/dashing 15 | fix compiling failure after ros2 core updated and remove some warnings. 16 | * fix compiling failure after ros2 core updated and remove some warnings. 17 | * Merge pull request `#44 `_ from ahuizxc/master 18 | add depthimage to laser scan launch file 19 | * add depthimage to laser scan 20 | * enable librealsensev2.17.1 21 | enable librealsensev2.17.1 22 | * update package.xml and readme 23 | * enable librealsensev2.17.1 24 | * Merge pull request `#39 `_ from challen-zhou/master 25 | Add options to enable/disable color-aligned point cloud 26 | * Change aligned pointcloud format from XYZ to XYZRGB 27 | * Add options to enable/disable color-aligned point cloud 28 | Since the default 2 point cloud process cost too much computation, 29 | add option to enable/disable(default) color-aligned point cloud. 30 | * Merge pull request `#36 `_ from challen-zhou/master 31 | Add pointcloud aligned with color 32 | * Add pointcloud aligned with color 33 | Add pointcloud generate from depth which aligned to color, 34 | no RGB channel since this pointcloud one-to-one matches with color image. 35 | * Merge pull request `#32 `_ from challen-zhou/master 36 | Update depth alignment function to align with librealsense 37 | * Merge pull request `#30 `_ from ahuizxc/master 38 | pass the format test and fix ctrl+c error 39 | * Update depth alignment function to align with librealsense 40 | Update depth alignment function to align with librealsense since 41 | librealsense already upstreamed to ROS2/Crystal, also we can benifit from 42 | the future librealsense optimization. 43 | * fix ctrl+c error 44 | * pass the format test 45 | * Merge pull request `#29 `_ from ahuizxc/master 46 | fixed bug that camera not work when enable_depth=False 47 | * fixed bug that camera not work when enable_depth=False 48 | * Merge pull request `#23 `_ from intel/fix_ctest 49 | fix error when run CTest 50 | * fix error when run CTest 51 | * set Realsense device not exist by default, disable testapi when Realsense device not plugin 52 | * fix format error when run CTest 53 | * Merge pull request `#22 `_ from RachelRen05/update_readme 54 | update project to install debain package dependency 55 | * update readme.md 56 | * update project to install debain package dependency 57 | * add rviz default configuration and support ros2 launch 58 | * update maintainer 59 | * 2.0.3 60 | * update changelog 61 | * Merge pull request `#18 `_ from nuclearsandwich/add-dependencies-to-package.xml 62 | Add eigen as a build dependency for ros2debian 63 | * Add eigen as a build dependency. 64 | * Contributors: Chris Ye, Gary Liu, RachelRen05, Sharron LIU, Steven! Ragnarök, Xiaojun Huang, ahuizxc, challenzhou 65 | 66 | 2.0.2 (2018-12-07) 67 | ------------------ 68 | * Merge pull request `#15 `_ from intel/pointer_api 69 | create a Subscriber using a raw pointer 70 | * create a Subscriber using a raw pointer 71 | std::shared_ptr is not safe, replace with new pointer API from image_transport (https://github.com/ros-perception/image_common/pull/104) 72 | Signed-off-by: Chris Ye 73 | * Merge pull request `#16 `_ from intel/enable_realsense 74 | update librealsense dependence 75 | * update Readme to modify librealsense guide 76 | * added librealsense2 dependence 77 | Signed-off-by: Chris Ye 78 | * create wrapper class PipelineSyncer to work around librealsense 2.16 feature, removing operator() from class asynchronous_syncer 79 | merge from https://github.com/IntelRealSense/librealsense/commit/985f3b75539ec08a78db69c9f8f2505d3d0c2e40 80 | Fix no match for call to '(rs2::asynchronous_syncer) (rs2::frame)' issue: https://github.com/IntelRealSense/librealsense/issues/2314 81 | Signed-off-by: Chris Ye 82 | * Merge pull request `#13 `_ from yechun1/image_transport 83 | publish raw image by image_transport publisher 84 | * fix typo and add transport in Readme 85 | Signed-off-by: Chris Ye 86 | * publish raw image by image_transport publisher 87 | Signed-off-by: Chris Ye 88 | * Merge pull request `#14 `_ from yechun1/fix_build_issue 89 | add semicolon after RCLCPP_WARN to fix build issue 90 | * add semicolon after RCLCPP_WARN to fix build issue 91 | Signed-off-by: Chris Ye 92 | * Merge pull request `#12 `_ from sharronliu/master 93 | camera node: replaced RCUTILS with RCLCPP logger 94 | * camera node: replaced RCUTILS with RCLCPP logger 95 | Signed-off-by: Sharron LIU 96 | * Merge pull request `#11 `_ from ahuizxc/aligned_depth 97 | enable aligned depth image 98 | * corrected the xml version 99 | * keep the topics' name same as ros1, add option _align_depth to enable or disable aligned_depth_to_color 100 | * fix frame_id of aligned depth image 101 | * fixed format error 102 | * enable aligned depth image 103 | * Contributors: Chris Ye, Sharron LIU, ahuizxc 104 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | DISCONTINUATION OF PROJECT 2 | 3 | This project will no longer be maintained by Intel. 4 | 5 | Intel has ceased development and contributions including, but not limited to, maintenance, bug fixes, new releases, or updates, to this project. 6 | 7 | Intel no longer accepts patches to this project. 8 | 9 | If you have an ongoing need to use this project, are interested in independently developing it, or would like to maintain patches for the open source software community, please create your own fork of this project. 10 | 11 | Contact: webadmin@linux.intel.com 12 | # ROS2 Wrapper for Intel® RealSense™ Devices 13 | These are packages for using Intel RealSense cameras (D400 series) with ROS2. 14 | 15 | **Note: If you want T265 supported, please check out [this branch](https://github.com/intel/ros2_intel_realsense/tree/refactor)** 16 | 17 | ## Installation Instructions 18 | 19 | The following instructions were verified with ROS2 Dashing on **Ubutnu 18.04**. 20 | 21 | ### Dependencies 22 | #### Install ROS2 packages [ros-dashing-desktop](https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians) 23 | 24 | #### Install ROS2 dependences 25 | ```bash 26 | sudo apt-get install ros-dashing-cv-bridge ros-dashing-librealsense2 ros-dashing-message-filters ros-dashing-image-transport 27 | ``` 28 | Or you may install from sources: 29 | * [cv_bridge](https://github.com/ros-perception/vision_opencv/tree/ros2/cv_bridge) 30 | * [Intel® RealSense™ SDK 2.0](https://github.com/IntelRealSense/librealsense.git) Currently, we support librealsense master branch. 31 | * [ros2_message_filters](https://github.com/ros2/message_filters) 32 | * [ros2 image_transport](https://github.com/ros-perception/image_common/tree/ros2) 33 | 34 | #### Install Other non-ROS debian packages 35 | ``` 36 | sudo apt-get install -y libssl-dev libusb-1.0-0-dev pkg-config libgtk-3-dev 37 | sudo apt-get install -y libglfw3-dev libgl1-mesa-dev libglu1-mesa-dev 38 | ``` 39 | * libssl-dev 40 | * libusb-1.0-0-dev 41 | * pkg-config 42 | * libgtk-3-dev 43 | * libglfw3-dev 44 | * libgl1-mesa-dev 45 | * libglu1-mesa-dev 46 | 47 | ### Install ros2_intel_realsense binary packages 48 | ``` 49 | sudo apt-get install ros-dashing-realsense-camera-msgs ros-dashing-realsense-ros2-camera 50 | ``` 51 | The ros2_intel_realsense packages installation have been completed. You could jump to [Usage Instructions](https://github.com/intel/ros2_intel_realsense#usage-instructions) for executing, you could also install ros2_intel_realsense from source for more features. 52 | 53 | ### Install ros2_intel_realsense from source 54 | ```bash 55 | #get code 56 | mkdir -p ~/ros2_ws/src 57 | cd ~/ros2_ws/src 58 | git clone https://github.com/intel/ros2_intel_realsense.git 59 | 60 | #build 61 | cd ~/ros2_ws 62 | source /opt/ros/dashing/setup.bash 63 | colcon build --base-paths src/ros2_intel_realsense 64 | ``` 65 | 66 | ## Usage Instructions 67 | 68 | ### Start the camera node 69 | To start the camera node in ROS2, plug in the camera, then type the following command: 70 | 71 | ```bash 72 | source /opt/ros/dashing/setup.bash 73 | source ~/ros2_ws/install/local_setup.bash 74 | # To launch with "ros2 run" 75 | ros2 run realsense_ros2_camera realsense_ros2_camera 76 | # OR, to invoke the executable directly 77 | realsense_ros2_camera 78 | ``` 79 | 80 | This will stream all camera sensors and publish on the appropriate ROS2 topics. PointCloud2 is enabled by default, till we provide ROS2 python launch options. 81 | 82 | ### Published Topics 83 | Rectified depth image: [/camera/depth/image_rect_raw](https://github.com/ros2/common_interfaces/blob/master/sensor_msgs/msg/Image.msg) 84 | 85 | Color image: [/camera/color/image_raw](https://github.com/ros2/common_interfaces/blob/master/sensor_msgs/msg/Image.msg) 86 | 87 | Rectified infra1 image: [/camera/infra1/image_rect_raw](https://github.com/ros2/common_interfaces/blob/master/sensor_msgs/msg/Image.msg) 88 | 89 | Rectified infra2 image: [/camera/infra2/image_rect_raw](https://github.com/ros2/common_interfaces/blob/master/sensor_msgs/msg/Image.msg) 90 | 91 | Depth registered point cloud: [/camera/aligned_depth_to_color/color/points](https://github.com/ros2/common_interfaces/blob/master/sensor_msgs/msg/PointCloud2.msg) 92 | 93 | ### Visualize Depth Aligned (i.e. Depth Registered) Point Cloud 94 | 95 | To start the camera node in ROS2 and view the depth aligned pointcloud in rviz: 96 | ```bash 97 | source /opt/ros/dashing/setup.bash 98 | source ~/ros2_ws/install/local_setup.bash 99 | # console #1 launch rviz2 100 | ros2 run rviz2 rviz2 -d realsense_ros2_camera/rviz/ros2.rviz 101 | # console #2 launch realsense_ros2_camera 102 | ros2 run realsense_ros2_camera realsense_ros2_camera 103 | ``` 104 | 105 | This will launch [RViz](http://wiki.ros.org/rviz) and display the five streams: color, depth, infra1, infra2, pointcloud. 106 | 107 | ![realsense_ros2_camera visualization results](https://github.com/intel/ros2_intel_realsense/raw/master/realsense_ros2_camera/rviz/ros2_rviz.png "realsense_ros2_camera visualization results") 108 | 109 | ### Run tests 110 | ```Shell 111 | colcon test --base-paths src/ros2_intel_realsense 112 | ``` 113 | 114 | ## Known Issues 115 | * This ROS2 node does not currently provide any dynamic reconfigure support for camera properties/presets. 116 | * We support Ubuntu Linux Bionic Beaver 18.04 on 64-bit, but not support Mac OS X 10.12 (Sierra) and Windows 10 yet. 117 | 118 | ## Todo 119 | A few features to be ported from the latest ROS [realsense](https://github.com/intel-ros/realsense.git) 120 | * Preset/Controls 121 | 122 | ## License 123 | Copyright 2018 Intel Corporation 124 | 125 | Licensed under the Apache License, Version 2.0 (the "License"); 126 | you may not use this project except in compliance with the License. 127 | You may obtain a copy of the License at 128 | 129 | http://www.apache.org/licenses/LICENSE-2.0 130 | 131 | Unless required by applicable law or agreed to in writing, software 132 | distributed under the License is distributed on an "AS IS" BASIS, 133 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 134 | See the License for the specific language governing permissions and 135 | limitations under the License. 136 | 137 | **Other names and brands may be claimed as the property of others* 138 | 139 | Any security issue should be reported using process at https://01.org/security 140 | 141 | -------------------------------------------------------------------------------- /realsense_ros2_camera/launch/default.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz_common/Displays 3 | Help Height: 0 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /Grid1 10 | - /PointCloud21 11 | - /Image1 12 | Splitter Ratio: 0.5 13 | Tree Height: 222 14 | - Class: rviz_common/Selection 15 | Name: Selection 16 | - Class: rviz_common/Tool Properties 17 | Expanded: 18 | - /2D Nav Goal1 19 | - /Publish Point1 20 | Name: Tool Properties 21 | Splitter Ratio: 0.5886790156364441 22 | - Class: rviz_common/Views 23 | Expanded: 24 | - /Current View1 25 | Name: Views 26 | Splitter Ratio: 0.5 27 | Visualization Manager: 28 | Class: "" 29 | Displays: 30 | - Alpha: 0.5 31 | Cell Size: 1 32 | Class: rviz_default_plugins/Grid 33 | Color: 160; 160; 164 34 | Enabled: true 35 | Line Style: 36 | Line Width: 0.029999999329447746 37 | Value: Lines 38 | Name: Grid 39 | Normal Cell Count: 0 40 | Offset: 41 | X: 0 42 | Y: 0 43 | Z: 0 44 | Plane: XY 45 | Plane Cell Count: 10 46 | Reference Frame: 47 | Value: true 48 | - Alpha: 1 49 | Autocompute Intensity Bounds: true 50 | Autocompute Value Bounds: 51 | Max Value: 10 52 | Min Value: -10 53 | Value: true 54 | Axis: Z 55 | Channel Name: intensity 56 | Class: rviz_default_plugins/PointCloud2 57 | Color: 255; 255; 255 58 | Color Transformer: RGB8 59 | Decay Time: 0 60 | Enabled: true 61 | Invert Rainbow: false 62 | Max Color: 255; 255; 255 63 | Max Intensity: 4096 64 | Min Color: 0; 0; 0 65 | Min Intensity: 0 66 | Name: PointCloud2 67 | Position Transformer: XYZ 68 | Queue Size: 10 69 | Selectable: true 70 | Size (Pixels): 3 71 | Size (m): 0.009999999776482582 72 | Style: Flat Squares 73 | Topic: /camera/depth/color/points 74 | Unreliable: false 75 | Use Fixed Frame: true 76 | Use rainbow: true 77 | Value: true 78 | - Class: rviz_default_plugins/Image 79 | Enabled: true 80 | Max Value: 1 81 | Median window: 5 82 | Min Value: 0 83 | Name: Image 84 | Normalize Range: true 85 | Queue Size: 10 86 | Topic: /camera/color/image_raw 87 | Unreliable: false 88 | Value: true 89 | - Class: rviz_default_plugins/Image 90 | Enabled: true 91 | Max Value: 1 92 | Median window: 5 93 | Min Value: 0 94 | Name: Image 95 | Normalize Range: true 96 | Queue Size: 10 97 | Topic: /camera/depth/image_rect_raw 98 | Unreliable: false 99 | Value: true 100 | - Class: rviz_default_plugins/Image 101 | Enabled: true 102 | Max Value: 1 103 | Median window: 5 104 | Min Value: 0 105 | Name: Image 106 | Normalize Range: true 107 | Queue Size: 10 108 | Topic: /camera/infra1/image_rect_raw 109 | Unreliable: false 110 | Value: true 111 | - Class: rviz_default_plugins/Image 112 | Enabled: true 113 | Max Value: 1 114 | Median window: 5 115 | Min Value: 0 116 | Name: Image 117 | Normalize Range: true 118 | Queue Size: 10 119 | Topic: /camera/infra2/image_rect_raw 120 | Unreliable: false 121 | Value: true 122 | Enabled: true 123 | Global Options: 124 | Background Color: 48; 48; 48 125 | Fixed Frame: camera_depth_optical_frame 126 | Frame Rate: 30 127 | Name: root 128 | Tools: 129 | - Class: rviz_default_plugins/MoveCamera 130 | - Class: rviz_default_plugins/Select 131 | - Class: rviz_default_plugins/FocusCamera 132 | - Class: rviz_default_plugins/Measure 133 | Line color: 128; 128; 0 134 | - Class: rviz_default_plugins/SetInitialPose 135 | Topic: /initialpose 136 | - Class: rviz_default_plugins/SetGoal 137 | Topic: /move_base_simple/goal 138 | - Class: rviz_default_plugins/PublishPoint 139 | Single click: true 140 | Topic: /clicked_point 141 | Transformation: 142 | Current: 143 | Class: rviz_default_plugins/TF 144 | Value: true 145 | Views: 146 | Current: 147 | Class: rviz_default_plugins/Orbit 148 | Distance: 1.0510121583938599 149 | Enable Stereo Rendering: 150 | Stereo Eye Separation: 0.05999999865889549 151 | Stereo Focal Distance: 1 152 | Swap Stereo Eyes: false 153 | Value: false 154 | Focal Point: 155 | X: -0.18814913928508759 156 | Y: -0.17941315472126007 157 | Z: 0.14549313485622406 158 | Focal Shape Fixed Size: true 159 | Focal Shape Size: 0.05000000074505806 160 | Invert Z Axis: false 161 | Name: Current View 162 | Near Clip Distance: 0.009999999776482582 163 | Pitch: -1.5697963237762451 164 | Target Frame: 165 | Value: Orbit (rviz_default_plugins) 166 | Yaw: 4.730405330657959 167 | Saved: ~ 168 | Window Geometry: 169 | Displays: 170 | collapsed: false 171 | Height: 1025 172 | Hide Left Dock: false 173 | Hide Right Dock: true 174 | Image: 175 | collapsed: false 176 | QMainWindow State: 000000ff00000000fd000000040000000000000156000003a7fc0200000010fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d0000011b000000c900fffffffb0000000a0049006d006100670065010000015e0000009b0000002800fffffffb0000000a0049006d00610067006501000001ff000000b20000002800fffffffb0000000a0049006d00610067006501000002b70000009a0000002800fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000003570000008d0000002800fffffffb0000000a0049006d00610067006501000001940000005d0000000000000000fb0000000a0049006d00610067006501000001f70000007a0000000000000000fb0000000a0049006d00610067006501000002770000009d0000000000000000fb0000000a0049006d006100670065010000031a000000ca0000000000000000000000010000010f000001effc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003d000001ef000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b20000000000000000000000020000073d000000a9fc0100000002fb0000000a0049006d00610067006503000001c5000000bb000001f8000001b0fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004420000003efc0100000002fb0000000800540069006d00650100000000000004420000000000000000fb0000000800540069006d00650100000000000004500000000000000000000005e1000003a700000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 177 | Selection: 178 | collapsed: false 179 | Tool Properties: 180 | collapsed: false 181 | Views: 182 | collapsed: true 183 | Width: 1853 184 | X: 67 185 | Y: 27 186 | -------------------------------------------------------------------------------- /realsense_ros2_camera/rviz/ros2.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz_common/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /TF1/Frames1 10 | - /TF1/Tree1 11 | Splitter Ratio: 0.3735294044017792 12 | Tree Height: 849 13 | - Class: rviz_common/Selection 14 | Name: Selection 15 | - Class: rviz_common/Tool Properties 16 | Expanded: 17 | - /2D Nav Goal1 18 | - /Publish Point1 19 | Name: Tool Properties 20 | Splitter Ratio: 0.5886790156364441 21 | - Class: rviz_common/Views 22 | Expanded: 23 | - /Current View1 24 | Name: Views 25 | Splitter Ratio: 0.5 26 | Visualization Manager: 27 | Class: "" 28 | Displays: 29 | - Alpha: 0.5 30 | Cell Size: 1 31 | Class: rviz_default_plugins/Grid 32 | Color: 160; 160; 164 33 | Enabled: true 34 | Line Style: 35 | Line Width: 0.029999999329447746 36 | Value: Lines 37 | Name: Grid 38 | Normal Cell Count: 0 39 | Offset: 40 | X: 0 41 | Y: 0 42 | Z: 0 43 | Plane: XY 44 | Plane Cell Count: 10 45 | Reference Frame: 46 | Value: true 47 | - Class: rviz_default_plugins/Image 48 | Enabled: true 49 | Max Value: 1 50 | Median window: 5 51 | Min Value: 0 52 | Name: Image 53 | Normalize Range: true 54 | Queue Size: 10 55 | Topic: /camera/color/image_raw 56 | Unreliable: false 57 | Value: true 58 | - Alpha: 1 59 | Autocompute Intensity Bounds: true 60 | Autocompute Value Bounds: 61 | Max Value: 0.5696823596954346 62 | Min Value: 0.07105058431625366 63 | Value: true 64 | Axis: Z 65 | Channel Name: intensity 66 | Class: rviz_default_plugins/PointCloud2 67 | Color: 255; 255; 255 68 | Color Transformer: RGB8 69 | Decay Time: 0 70 | Enabled: true 71 | Invert Rainbow: false 72 | Max Color: 255; 255; 255 73 | Max Intensity: 4096 74 | Min Color: 0; 0; 0 75 | Min Intensity: 0 76 | Name: PointCloud2 77 | Position Transformer: XYZ 78 | Queue Size: 10 79 | Selectable: true 80 | Size (Pixels): 3 81 | Size (m): 0.009999999776482582 82 | Style: Points 83 | Topic: /camera/aligned_depth_to_color/color/points 84 | Unreliable: true 85 | Use Fixed Frame: true 86 | Use rainbow: true 87 | Value: true 88 | - Class: rviz_default_plugins/TF 89 | Enabled: true 90 | Frame Timeout: 15 91 | Frames: 92 | All Enabled: false 93 | camera_color_frame: 94 | Value: false 95 | camera_color_optical_frame: 96 | Value: false 97 | camera_depth_frame: 98 | Value: false 99 | camera_depth_optical_frame: 100 | Value: false 101 | camera_infra1_frame: 102 | Value: false 103 | camera_infra1_optical_frame: 104 | Value: false 105 | camera_infra2_frame: 106 | Value: false 107 | camera_infra2_optical_frame: 108 | Value: false 109 | camera_link: 110 | Value: false 111 | Marker Scale: 1 112 | Name: TF 113 | Show Arrows: false 114 | Show Axes: true 115 | Show Names: false 116 | Tree: 117 | camera_link: 118 | camera_color_frame: 119 | camera_color_optical_frame: 120 | {} 121 | camera_depth_frame: 122 | camera_depth_optical_frame: 123 | {} 124 | camera_infra1_frame: 125 | camera_infra1_optical_frame: 126 | {} 127 | camera_infra2_frame: 128 | camera_infra2_optical_frame: 129 | {} 130 | Update Interval: 0 131 | Value: true 132 | - Class: rviz_default_plugins/Image 133 | Enabled: true 134 | Max Value: 1 135 | Median window: 5 136 | Min Value: 0 137 | Name: Image 138 | Normalize Range: true 139 | Queue Size: 10 140 | Topic: /camera/aligned_depth_to_color/image_raw 141 | Unreliable: false 142 | Value: true 143 | - Class: rviz_default_plugins/Image 144 | Enabled: true 145 | Max Value: 1 146 | Median window: 5 147 | Min Value: 0 148 | Name: Image 149 | Normalize Range: true 150 | Queue Size: 10 151 | Topic: /camera/infra1/image_rect_raw 152 | Unreliable: false 153 | Value: true 154 | - Class: rviz_default_plugins/Image 155 | Enabled: true 156 | Max Value: 1 157 | Median window: 5 158 | Min Value: 0 159 | Name: Image 160 | Normalize Range: true 161 | Queue Size: 10 162 | Topic: /camera/infra2/image_rect_raw 163 | Unreliable: false 164 | Value: true 165 | Enabled: true 166 | Global Options: 167 | Background Color: 48; 48; 48 168 | Fixed Frame: camera_link 169 | Frame Rate: 30 170 | Name: root 171 | Tools: 172 | - Class: rviz_default_plugins/MoveCamera 173 | - Class: rviz_default_plugins/Select 174 | - Class: rviz_default_plugins/FocusCamera 175 | - Class: rviz_default_plugins/Measure 176 | Line color: 128; 128; 0 177 | - Class: rviz_default_plugins/SetInitialPose 178 | Topic: /initialpose 179 | - Class: rviz_default_plugins/SetGoal 180 | Topic: /move_base_simple/goal 181 | - Class: rviz_default_plugins/PublishPoint 182 | Single click: true 183 | Topic: /clicked_point 184 | Transformation: 185 | Current: 186 | Class: rviz_default_plugins/TF 187 | Value: true 188 | Views: 189 | Current: 190 | Class: rviz_default_plugins/Orbit 191 | Distance: 0.18296696245670319 192 | Enable Stereo Rendering: 193 | Stereo Eye Separation: 0.05999999865889549 194 | Stereo Focal Distance: 1 195 | Swap Stereo Eyes: false 196 | Value: false 197 | Focal Point: 198 | X: 0.07538603246212006 199 | Y: 0.0022639664821326733 200 | Z: -0.07651539146900177 201 | Focal Shape Fixed Size: true 202 | Focal Shape Size: 0.05000000074505806 203 | Invert Z Axis: false 204 | Name: Current View 205 | Near Clip Distance: 0.009999999776482582 206 | Pitch: -0.22979623079299927 207 | Target Frame: 208 | Value: Orbit (rviz) 209 | Yaw: 3.1654090881347656 210 | Saved: ~ 211 | Window Geometry: 212 | Displays: 213 | collapsed: false 214 | Height: 1056 215 | Hide Left Dock: false 216 | Hide Right Dock: true 217 | Image: 218 | collapsed: false 219 | QMainWindow State: 000000ff00000000fd000000040000000000000156000003dcfc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000027000003dc000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d006100670065030000010c000000a0000001c100000177fb0000000a0049006d006100670065030000058a000000a1000001d90000016bfb0000000a0049006d00610067006503000005890000021a000001d700000184fb0000000a0049006d006100670065030000010500000228000001c90000017afb0000000a0049006d0061006700650300000583000000a3000001e00000016ffb0000000a0049006d006100670065030000058300000224000001de00000177000000010000010f000003dcfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a005600690065007700730000000027000003dc000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004420000003efc0100000002fb0000000800540069006d00650100000000000004420000000000000000fb0000000800540069006d00650100000000000004500000000000000000000005e3000003dc00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 220 | Selection: 221 | collapsed: false 222 | Tool Properties: 223 | collapsed: false 224 | Views: 225 | collapsed: true 226 | Width: 1855 227 | X: 65 228 | Y: 24 229 | -------------------------------------------------------------------------------- /realsense_ros2_camera/test/test_api.cpp: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2018 Intel Corporation. All Rights Reserved 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // cpplint: c system headers 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | // cpplint: c++ system headers 27 | #include 28 | #include 29 | #include 30 | 31 | bool g_enable_color = true; 32 | bool g_color_recv = false; 33 | float g_color_avg = 0.0f; 34 | 35 | bool g_enable_depth = true; 36 | bool g_depth_recv = false; 37 | 38 | bool g_enable_infrared1 = true; 39 | bool g_infrared1_recv = false; 40 | float g_infrared1_avg = 0.0f; 41 | 42 | bool g_enable_infrared2 = true; 43 | bool g_infrared2_recv = false; 44 | float g_infrared2_avg = 0.0f; 45 | 46 | bool g_enable_pc = true; 47 | bool g_pc_recv = false; 48 | float g_pc_depth_avg = 0.0f; 49 | 50 | bool g_tf_recv = false; 51 | bool g_tf_color = false; 52 | 53 | int g_fps = 0; 54 | float g_latency = 0.0f; 55 | 56 | int encoding2Mat(const std::string & encoding) 57 | { 58 | std::map map_encoding = 59 | { 60 | {"mono8", CV_8UC1}, 61 | {"8UC1", CV_8UC1}, 62 | {"bgr8", CV_8UC3}, 63 | {"mono16", CV_16SC1}, 64 | {"rgba8", CV_8UC4}, 65 | {"bgra8", CV_8UC4}, 66 | {"32FC1", CV_32FC1}, 67 | {"rgb8", CV_8UC3}, 68 | {"16UC1", CV_16UC1}, 69 | }; 70 | if (map_encoding.find(encoding) != map_encoding.end()) { 71 | return map_encoding[encoding]; 72 | } else { 73 | throw std::runtime_error("Unsupported encoding type"); 74 | } 75 | } 76 | 77 | void imageDepthCallback(const sensor_msgs::msg::Image::SharedPtr) 78 | { 79 | g_depth_recv = true; 80 | } 81 | 82 | void imageColorCallback(const sensor_msgs::msg::Image::SharedPtr msg) 83 | { 84 | static unsigned int fps = 0; 85 | rclcpp::Clock ros_clock(RCL_ROS_TIME); 86 | static rclcpp::Time start = ros_clock.now(); 87 | rclcpp::Time t(msg->header.stamp); 88 | g_latency = (ros_clock.now() - t).nanoseconds() / 1000000000.0f; 89 | cv::Mat frame(msg->height, msg->width, encoding2Mat(msg->encoding), 90 | const_cast(msg->data.data()), msg->step); 91 | 92 | double color_total = 0.0; 93 | int channel = 0; 94 | cv::Scalar mean = cv::mean(frame); 95 | for (auto val : mean.val) { 96 | color_total += val; 97 | channel += 1; 98 | } 99 | 100 | if (channel > 0) { 101 | g_color_avg = color_total / channel; 102 | } 103 | 104 | g_color_recv = true; 105 | 106 | ++fps; 107 | if ((ros_clock.now() - start).nanoseconds() / 1000000.0f >= 1000) { 108 | g_fps = fps; 109 | fps = 0; 110 | start = ros_clock.now(); 111 | } 112 | } 113 | 114 | void imageInfrared1Callback(const sensor_msgs::msg::Image::SharedPtr msg) 115 | { 116 | cv::Mat frame(msg->height, msg->width, encoding2Mat(msg->encoding), 117 | const_cast(msg->data.data()), msg->step); 118 | 119 | double infrared1_total = 0.0; 120 | int channel = 0; 121 | cv::Scalar mean = cv::mean(frame); 122 | for (auto val : mean.val) { 123 | infrared1_total += val; 124 | channel += 1; 125 | } 126 | 127 | if (channel > 0) { 128 | g_infrared1_avg = infrared1_total / channel; 129 | } 130 | 131 | g_infrared1_recv = true; 132 | } 133 | 134 | void imageInfrared2Callback(const sensor_msgs::msg::Image::SharedPtr msg) 135 | { 136 | cv::Mat frame(msg->height, msg->width, encoding2Mat(msg->encoding), 137 | const_cast(msg->data.data()), msg->step); 138 | 139 | uchar * infrared2_data = frame.data; 140 | double infrared2_total = 0.0; 141 | int infrared2_count = 1; 142 | 143 | for (unsigned int i = 0; i < msg->height * msg->width; i++) { 144 | if (*infrared2_data > 0 && *infrared2_data < 255) { 145 | infrared2_total += *infrared2_data; 146 | infrared2_count++; 147 | } 148 | infrared2_data++; 149 | } 150 | if (infrared2_count != 1) { 151 | g_infrared2_avg = static_cast(infrared2_total / infrared2_count); 152 | } 153 | 154 | g_infrared2_recv = true; 155 | } 156 | 157 | void pcCallback(const sensor_msgs::msg::PointCloud2::SharedPtr msg) 158 | { 159 | double pc_depth_total = 0.0; 160 | int pc_depth_count = 0; 161 | sensor_msgs::PointCloud2Iterator iter_z(*msg, "z"); 162 | for (unsigned int i = 0; i < msg->width * msg->height; ++i) { 163 | double pc_depth = std::ceil(*iter_z); 164 | if ((0.0 < pc_depth) && (pc_depth <= 10.0f * 100.0f)) { 165 | pc_depth_total += pc_depth; 166 | pc_depth_count++; 167 | } 168 | ++iter_z; 169 | } 170 | if (pc_depth_count != 0) { 171 | g_pc_depth_avg = static_cast(pc_depth_total / pc_depth_count); 172 | } 173 | 174 | g_pc_recv = true; 175 | } 176 | 177 | void tfCallback(const tf2_msgs::msg::TFMessage::SharedPtr msg) 178 | { 179 | for (auto tf : msg->transforms) { 180 | if (tf.header.frame_id == "camera_color_frame") { 181 | g_tf_color = true; 182 | } 183 | } 184 | g_tf_recv = true; 185 | } 186 | 187 | TEST(TestAPI, testDepthStream) { 188 | if (g_enable_depth) { 189 | EXPECT_TRUE(g_depth_recv); 190 | } else { 191 | EXPECT_FALSE(g_depth_recv); 192 | } 193 | } 194 | 195 | TEST(TestAPI, testColorStream) { 196 | if (g_enable_color) { 197 | EXPECT_GT(g_color_avg, 0); 198 | EXPECT_TRUE(g_color_recv); 199 | } else { 200 | EXPECT_FALSE(g_color_recv); 201 | } 202 | } 203 | 204 | TEST(TestAPI, testInfrared1Stream) { 205 | if (g_enable_infrared1) { 206 | EXPECT_GT(g_infrared1_avg, 0); 207 | EXPECT_TRUE(g_infrared1_recv); 208 | } else { 209 | EXPECT_FALSE(g_infrared1_recv); 210 | } 211 | } 212 | 213 | TEST(TestAPI, testInfrared2Stream) { 214 | if (g_enable_infrared2) { 215 | EXPECT_GT(g_infrared2_avg, 0); 216 | EXPECT_TRUE(g_infrared2_recv); 217 | } else { 218 | EXPECT_FALSE(g_infrared2_recv); 219 | } 220 | } 221 | 222 | TEST(TestAPI, testPointCloud) { 223 | if (g_enable_pc) { 224 | EXPECT_GT(g_pc_depth_avg, 0); 225 | EXPECT_TRUE(g_pc_recv); 226 | } else { 227 | EXPECT_FALSE(g_pc_recv); 228 | } 229 | } 230 | 231 | TEST(TestAPI, testTF) { 232 | EXPECT_TRUE(g_tf_recv); 233 | EXPECT_TRUE(g_tf_color); 234 | } 235 | 236 | TEST(TestAPI, testLatency) { 237 | EXPECT_TRUE(g_latency > 0.0f); 238 | } 239 | 240 | TEST(TestAPI, testFPS) { 241 | EXPECT_GT(g_fps, 0); 242 | } 243 | 244 | int main(int argc, char * argv[]) try 245 | { 246 | testing::InitGoogleTest(&argc, argv); 247 | rclcpp::init(argc, argv); 248 | auto node = rclcpp::Node::make_shared("realsense_test"); 249 | auto sub1 = node->create_subscription("camera/depth/image_rect_raw", 250 | imageDepthCallback, rmw_qos_profile_default); 251 | 252 | auto sub2 = node->create_subscription("camera/color/image_raw", 253 | imageColorCallback, rmw_qos_profile_default); 254 | 255 | auto sub3 = node->create_subscription("camera/infra1/image_rect_raw", 256 | imageInfrared1Callback, rmw_qos_profile_default); 257 | 258 | auto sub4 = node->create_subscription("camera/infra2/image_rect_raw", 259 | imageInfrared2Callback, rmw_qos_profile_default); 260 | 261 | auto sub5 = node->create_subscription("camera/depth/color/points", 262 | pcCallback, rmw_qos_profile_default); 263 | 264 | auto sub6 = node->create_subscription("tf_static", 265 | tfCallback, rmw_qos_profile_default); 266 | 267 | system("realsense_ros2_camera &"); 268 | 269 | rclcpp::WallRate loop_rate(50); 270 | for (int i = 0; i < 300; ++i) { 271 | if (!rclcpp::ok()) { 272 | break; // Break for ctrl-c 273 | } 274 | rclcpp::spin_some(node); 275 | loop_rate.sleep(); 276 | } 277 | 278 | system("killall realsense_ros2_camera &"); 279 | return RUN_ALL_TESTS(); 280 | } catch (...) { 281 | } 282 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2018 Intel Corporation 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /realsense_ros2_camera/src/realsense_camera_node.cpp: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2018 Intel Corporation. All Rights Reserved 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // cpplint: c system headers 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | #include 33 | #include 34 | #include 35 | #include 36 | #include 37 | // cpplint: c++ system headers 38 | #include 39 | #include 40 | #include 41 | #include 42 | #include 43 | #include 44 | #include 45 | #include 46 | #include 47 | // cpplint: other headers 48 | #include "realsense_ros2_camera/constants.hpp" 49 | #include "realsense_camera_msgs/msg/imu_info.hpp" 50 | #include "realsense_camera_msgs/msg/extrinsics.hpp" 51 | 52 | 53 | #define REALSENSE_ROS_EMBEDDED_VERSION_STR (VAR_ARG_STRING(VERSION: REALSENSE_ROS_MAJOR_VERSION. \ 54 | REALSENSE_ROS_MINOR_VERSION.REALSENSE_ROS_PATCH_VERSION)) 55 | constexpr auto realsense_ros2_camera_version = REALSENSE_ROS_EMBEDDED_VERSION_STR; 56 | using realsense_camera_msgs::msg::Extrinsics; 57 | using realsense_camera_msgs::msg::IMUInfo; 58 | 59 | namespace realsense_ros2_camera 60 | { 61 | using stream_index_pair = std::pair; 62 | 63 | const stream_index_pair COLOR{RS2_STREAM_COLOR, 0}; 64 | const stream_index_pair DEPTH{RS2_STREAM_DEPTH, 0}; 65 | const stream_index_pair INFRA1{RS2_STREAM_INFRARED, 1}; 66 | const stream_index_pair INFRA2{RS2_STREAM_INFRARED, 2}; 67 | const stream_index_pair FISHEYE{RS2_STREAM_FISHEYE, 0}; 68 | const stream_index_pair GYRO{RS2_STREAM_GYRO, 0}; 69 | const stream_index_pair ACCEL{RS2_STREAM_ACCEL, 0}; 70 | 71 | const std::vector> IMAGE_STREAMS = {{{DEPTH, INFRA1, INFRA2}, 72 | {COLOR}, {FISHEYE}}}; 73 | 74 | const std::vector> HID_STREAMS = {{GYRO, ACCEL}}; 75 | 76 | rs2::device _dev; 77 | inline void signalHandler(int signum) 78 | { 79 | std::cout << strsignal(signum) << " Signal is received! Terminate RealSense Node...\n"; 80 | auto sens = _dev.query_sensors(); 81 | for (auto it = sens.begin(); it != sens.end(); it++) { 82 | it->stop(); 83 | it->close(); 84 | } 85 | rclcpp::shutdown(); 86 | exit(signum); 87 | } 88 | 89 | class PipelineSyncer : public rs2::asynchronous_syncer 90 | { 91 | public: 92 | void operator()(rs2::frame f) const 93 | { 94 | invoke(std::move(f)); 95 | } 96 | }; 97 | 98 | class RealSenseCameraNode : public rclcpp::Node 99 | { 100 | public: 101 | RealSenseCameraNode() 102 | : Node("RealSenseCameraNode", 103 | rclcpp::NodeOptions().automatically_declare_parameters_from_overrides(true)), 104 | _ros_clock(RCL_ROS_TIME), 105 | _serial_no(""), 106 | _base_frame_id(""), 107 | qos(rclcpp::QoSInitialization::from_rmw(rmw_qos_profile_default)), 108 | _intialize_time_base(false) 109 | { 110 | RCLCPP_INFO(logger_, "RealSense ROS v%s", REALSENSE_ROS_VERSION_STR); 111 | RCLCPP_INFO(logger_, "Running with LibRealSense v%s", RS2_API_VERSION_STR); 112 | 113 | signal(SIGINT, signalHandler); 114 | auto severity = rs2_log_severity::RS2_LOG_SEVERITY_ERROR; 115 | tryGetLogSeverity(severity); 116 | if (rs2_log_severity::RS2_LOG_SEVERITY_DEBUG == severity) { 117 | console_bridge::setLogLevel(console_bridge::CONSOLE_BRIDGE_LOG_DEBUG); 118 | } 119 | 120 | rs2::log_to_console(severity); 121 | 122 | // Types for depth stream 123 | _format[DEPTH] = RS2_FORMAT_Z16; // libRS type 124 | _image_format[DEPTH] = CV_16UC1; // CVBridge type 125 | _encoding[DEPTH] = sensor_msgs::image_encodings::TYPE_16UC1; // ROS message type 126 | _unit_step_size[DEPTH] = sizeof(uint16_t); // sensor_msgs::ImagePtr row step size 127 | _stream_name[DEPTH] = "depth"; 128 | 129 | // Infrared stream - Left 130 | _format[INFRA1] = RS2_FORMAT_Y8; // libRS type 131 | _image_format[INFRA1] = CV_8UC1; // CVBridge type 132 | _encoding[INFRA1] = sensor_msgs::image_encodings::TYPE_8UC1; // ROS message type 133 | _unit_step_size[INFRA1] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size 134 | _stream_name[INFRA1] = "infra1"; 135 | 136 | // Infrared stream - Right 137 | _format[INFRA2] = RS2_FORMAT_Y8; // libRS type 138 | _image_format[INFRA2] = CV_8UC1; // CVBridge type 139 | _encoding[INFRA2] = sensor_msgs::image_encodings::TYPE_8UC1; // ROS message type 140 | _unit_step_size[INFRA2] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size 141 | _stream_name[INFRA2] = "infra2"; 142 | 143 | // Types for color stream 144 | _format[COLOR] = RS2_FORMAT_RGB8; // libRS type 145 | _image_format[COLOR] = CV_8UC3; // CVBridge type 146 | _encoding[COLOR] = sensor_msgs::image_encodings::RGB8; // ROS message type 147 | _unit_step_size[COLOR] = 3; // sensor_msgs::ImagePtr row step size 148 | _stream_name[COLOR] = "color"; 149 | 150 | // Types for fisheye stream 151 | _format[FISHEYE] = RS2_FORMAT_RAW8; // libRS type 152 | _image_format[FISHEYE] = CV_8UC1; // CVBridge type 153 | _encoding[FISHEYE] = sensor_msgs::image_encodings::TYPE_8UC1; // ROS message type 154 | _unit_step_size[FISHEYE] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size 155 | _stream_name[FISHEYE] = "fisheye"; 156 | 157 | // Types for Motion-Module streams 158 | _format[GYRO] = RS2_FORMAT_MOTION_XYZ32F; // libRS type 159 | _image_format[GYRO] = CV_8UC1; // CVBridge type 160 | _encoding[GYRO] = sensor_msgs::image_encodings::TYPE_8UC1; // ROS message type 161 | _unit_step_size[GYRO] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size 162 | _stream_name[GYRO] = "gyro"; 163 | 164 | _format[ACCEL] = RS2_FORMAT_MOTION_XYZ32F; // libRS type 165 | _image_format[ACCEL] = CV_8UC1; // CVBridge type 166 | _encoding[ACCEL] = sensor_msgs::image_encodings::TYPE_8UC1; // ROS message type 167 | _unit_step_size[ACCEL] = sizeof(uint8_t); // sensor_msgs::ImagePtr row step size 168 | _stream_name[ACCEL] = "accel"; 169 | } 170 | 171 | virtual ~RealSenseCameraNode() 172 | {} 173 | 174 | virtual void onInit() 175 | { 176 | getParameters(); 177 | setupDevice(); 178 | setupPublishers(); 179 | setupStreams(); 180 | rclcpp::sleep_for(std::chrono::nanoseconds(2000000000)); 181 | timer_ = this->create_wall_timer(std::chrono::seconds(1), std::bind(&RealSenseCameraNode::publishStaticTransforms, this)); 182 | RCLCPP_INFO(logger_, "RealSense Node Is Up!"); 183 | } 184 | 185 | private: 186 | void getParameters() 187 | { 188 | RCLCPP_INFO(logger_, "getParameters..."); 189 | 190 | this->get_parameter_or("enable_pointcloud", _pointcloud, POINTCLOUD); 191 | this->get_parameter_or("enable_aligned_pointcloud", _align_pointcloud, ALIGN_POINTCLOUD); 192 | // this->get_parameter_or("enable_sync", _sync_frames, SYNC_FRAMES); 193 | this->get_parameter_or("enable_depth", _enable[DEPTH], ENABLE_DEPTH); 194 | this->get_parameter_or("enable_aligned_depth", _align_depth, ALIGN_DEPTH); 195 | this->get_parameter_or("enable_infra1", _enable[INFRA1], ENABLE_INFRA1); 196 | this->get_parameter_or("enable_infra2", _enable[INFRA2], ENABLE_INFRA2); 197 | if (!_enable[DEPTH]) { 198 | _pointcloud = false; 199 | _align_depth = false; 200 | _enable[INFRA1] = false; 201 | _enable[INFRA2] = false; 202 | } 203 | 204 | if (!_enable[DEPTH] || !_align_depth) { 205 | _align_pointcloud = false; 206 | } 207 | 208 | if (_pointcloud || _align_depth) { 209 | _sync_frames = true; 210 | } else { 211 | _sync_frames = false; 212 | } 213 | this->get_parameter("serial_no", _serial_no); 214 | 215 | this->get_parameter_or("depth_width", _width[DEPTH], DEPTH_WIDTH); 216 | this->get_parameter_or("depth_height", _height[DEPTH], DEPTH_HEIGHT); 217 | this->get_parameter_or("depth_fps", _fps[DEPTH], DEPTH_FPS); 218 | 219 | this->get_parameter_or("infra1_width", _width[INFRA1], INFRA1_WIDTH); 220 | this->get_parameter_or("infra1_height", _height[INFRA1], INFRA1_HEIGHT); 221 | this->get_parameter_or("infra1_fps", _fps[INFRA1], INFRA1_FPS); 222 | 223 | this->get_parameter_or("infra2_width", _width[INFRA2], INFRA2_WIDTH); 224 | this->get_parameter_or("infra2_height", _height[INFRA2], INFRA2_HEIGHT); 225 | this->get_parameter_or("infra2_fps", _fps[INFRA2], INFRA2_FPS); 226 | 227 | this->get_parameter_or("color_width", _width[COLOR], COLOR_WIDTH); 228 | this->get_parameter_or("color_height", _height[COLOR], COLOR_HEIGHT); 229 | this->get_parameter_or("color_fps", _fps[COLOR], COLOR_FPS); 230 | this->get_parameter_or("enable_color", _enable[COLOR], ENABLE_COLOR); 231 | 232 | this->get_parameter_or("fisheye_width", _width[FISHEYE], FISHEYE_WIDTH); 233 | this->get_parameter_or("fisheye_height", _height[FISHEYE], FISHEYE_HEIGHT); 234 | this->get_parameter_or("fisheye_fps", _fps[FISHEYE], FISHEYE_FPS); 235 | this->get_parameter_or("enable_fisheye", _enable[FISHEYE], ENABLE_FISHEYE); 236 | 237 | this->get_parameter_or("gyro_fps", _fps[GYRO], GYRO_FPS); 238 | this->get_parameter_or("accel_fps", _fps[ACCEL], ACCEL_FPS); 239 | this->get_parameter_or("enable_imu", _enable[GYRO], ENABLE_IMU); 240 | this->get_parameter_or("enable_imu", _enable[ACCEL], ENABLE_IMU); 241 | 242 | this->get_parameter_or("base_frame_id", _base_frame_id, 243 | std::string(DEFAULT_BASE_FRAME_ID)); 244 | this->get_parameter_or("depth_frame_id", _frame_id[DEPTH], 245 | std::string(DEFAULT_DEPTH_FRAME_ID)); 246 | this->get_parameter_or("infra1_frame_id", _frame_id[INFRA1], 247 | std::string(DEFAULT_INFRA1_FRAME_ID)); 248 | this->get_parameter_or("infra2_frame_id", _frame_id[INFRA2], 249 | std::string(DEFAULT_INFRA2_FRAME_ID)); 250 | this->get_parameter_or("color_frame_id", _frame_id[COLOR], 251 | std::string(DEFAULT_COLOR_FRAME_ID)); 252 | this->get_parameter_or("fisheye_frame_id", _frame_id[FISHEYE], 253 | std::string(DEFAULT_FISHEYE_FRAME_ID)); 254 | this->get_parameter_or("imu_gyro_frame_id", _frame_id[GYRO], 255 | std::string(DEFAULT_IMU_FRAME_ID)); 256 | this->get_parameter_or("imu_accel_frame_id", _frame_id[ACCEL], 257 | std::string(DEFAULT_IMU_FRAME_ID)); 258 | 259 | this->get_parameter_or("depth_optical_frame_id", _optical_frame_id[DEPTH], 260 | std::string(DEFAULT_DEPTH_OPTICAL_FRAME_ID)); 261 | this->get_parameter_or("infra1_optical_frame_id", _optical_frame_id[INFRA1], 262 | std::string(DEFAULT_INFRA1_OPTICAL_FRAME_ID)); 263 | this->get_parameter_or("infra2_optical_frame_id", _optical_frame_id[INFRA2], 264 | std::string(DEFAULT_INFRA2_OPTICAL_FRAME_ID)); 265 | this->get_parameter_or("color_optical_frame_id", _optical_frame_id[COLOR], 266 | std::string(DEFAULT_COLOR_OPTICAL_FRAME_ID)); 267 | this->get_parameter_or("fisheye_optical_frame_id", _optical_frame_id[FISHEYE], 268 | std::string(DEFAULT_FISHEYE_OPTICAL_FRAME_ID)); 269 | this->get_parameter_or("gyro_optical_frame_id", _optical_frame_id[GYRO], 270 | std::string(DEFAULT_GYRO_OPTICAL_FRAME_ID)); 271 | this->get_parameter_or("accel_optical_frame_id", _optical_frame_id[ACCEL], 272 | std::string(DEFAULT_ACCEL_OPTICAL_FRAME_ID)); 273 | } 274 | 275 | void setupDevice() 276 | { 277 | RCLCPP_INFO(logger_, "setupDevice..."); 278 | try { 279 | _ctx.reset(new rs2::context()); 280 | 281 | auto list = _ctx->query_devices(); 282 | if (0 == list.size()) { 283 | _ctx.reset(); 284 | RCLCPP_ERROR(logger_, "No RealSense devices were found! Terminate RealSense Node..."); 285 | rclcpp::shutdown(); 286 | exit(1); 287 | } 288 | 289 | // Take the first device in the list. 290 | // Add an ability to get the specific device to work with from outside. 291 | _dev = list.front(); 292 | _ctx->set_devices_changed_callback([this](rs2::event_information & info) 293 | { 294 | if (info.was_removed(_dev)) { 295 | RCLCPP_FATAL(logger_, "The device has been disconnected! Terminate RealSense Node..."); 296 | rclcpp::shutdown(); 297 | exit(1); 298 | } 299 | }); 300 | 301 | auto camera_name = _dev.get_info(RS2_CAMERA_INFO_NAME); 302 | RCLCPP_INFO(logger_, "Device Name: %s", camera_name); 303 | 304 | _serial_no = _dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER); 305 | RCLCPP_INFO(logger_, "Device Serial No: %s", _serial_no.c_str()); 306 | 307 | auto fw_ver = _dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION); 308 | RCLCPP_INFO(logger_, "Device FW version: %s", fw_ver); 309 | 310 | auto pid = _dev.get_info(RS2_CAMERA_INFO_PRODUCT_ID); 311 | RCLCPP_INFO(logger_, "Device Product ID: %s", pid); 312 | 313 | RCLCPP_INFO(logger_, "Sync Mode: %s", ((_sync_frames) ? "On" : "Off")); 314 | 315 | auto dev_sensors = _dev.query_sensors(); 316 | 317 | RCLCPP_INFO(logger_, "Device Sensors: "); 318 | for (auto && elem : dev_sensors) { 319 | std::string module_name = elem.get_info(RS2_CAMERA_INFO_NAME); 320 | if ("Stereo Module" == module_name || "Coded-Light Depth Sensor" == module_name) { 321 | auto sen = new rs2::sensor(elem); 322 | _sensors[DEPTH] = std::unique_ptr(sen); 323 | _sensors[INFRA1] = std::unique_ptr(sen); 324 | _sensors[INFRA2] = std::unique_ptr(sen); 325 | } else if ("RGB Camera" == module_name) { 326 | _sensors[COLOR] = std::unique_ptr(new rs2::sensor(elem)); 327 | } else if ("Wide FOV Camera" == module_name) { 328 | _sensors[FISHEYE] = std::unique_ptr(new rs2::sensor(elem)); 329 | } else if ("Motion Module" == module_name) { 330 | auto hid_sensor = new rs2::sensor(elem); 331 | _sensors[GYRO] = std::unique_ptr(hid_sensor); 332 | _sensors[ACCEL] = std::unique_ptr(hid_sensor); 333 | } else { 334 | RCLCPP_ERROR(logger_, 335 | "Module Name \"%s\" isn't supported by LibRealSense! Terminate RealSense Node...", 336 | module_name); 337 | rclcpp::shutdown(); 338 | exit(1); 339 | } 340 | RCLCPP_INFO(logger_, "%s was found.", std::string(elem.get_info( 341 | RS2_CAMERA_INFO_NAME)).c_str()); 342 | } 343 | 344 | // Update "enable" map 345 | std::vector> streams(IMAGE_STREAMS); 346 | streams.insert(streams.end(), HID_STREAMS.begin(), HID_STREAMS.end()); 347 | for (auto & elem : streams) { 348 | for (auto & stream_index : elem) { 349 | if (true == _enable[stream_index] && _sensors.find(stream_index) == _sensors.end()) { 350 | // check if device supports the enabled stream 351 | RCLCPP_INFO(logger_, "%s sensor isn't supported by current device! -- Skipping...", 352 | rs2_stream_to_string(stream_index.first)); 353 | _enable[stream_index] = false; 354 | } 355 | } 356 | } 357 | } catch (const std::exception & ex) { 358 | RCLCPP_ERROR(logger_, "An exception has been thrown: %s", ex.what()); 359 | throw; 360 | } catch (...) { 361 | RCLCPP_ERROR(logger_, "Unknown exception has occured!"); 362 | throw; 363 | } 364 | } 365 | 366 | void setupPublishers() 367 | { 368 | RCLCPP_INFO(logger_, "setupPublishers..."); 369 | 370 | if (true == _enable[DEPTH]) { 371 | _image_publishers[DEPTH] = image_transport::create_publisher( 372 | this, "camera/depth/image_rect_raw"); 373 | _info_publisher[DEPTH] = this->create_publisher( 374 | "camera/depth/camera_info", 1); 375 | 376 | if (_pointcloud) { 377 | _pointcloud_publisher = this->create_publisher( 378 | "camera/depth/color/points", 1); 379 | } 380 | 381 | if (_align_depth) { 382 | _align_depth_publisher = image_transport::create_publisher( 383 | this, "camera/aligned_depth_to_color/image_raw"); 384 | _align_depth_camera_publisher = this->create_publisher( 385 | "camera/aligned_depth_to_color/camera_info", 1); 386 | } 387 | 388 | if (_align_pointcloud && _align_depth) { 389 | _align_pointcloud_publisher = this->create_publisher( 390 | "camera/aligned_depth_to_color/color/points", 1); 391 | } 392 | } 393 | 394 | if (true == _enable[INFRA1]) { 395 | _image_publishers[INFRA1] = image_transport::create_publisher( 396 | this, "camera/infra1/image_rect_raw"); 397 | _info_publisher[INFRA1] = this->create_publisher( 398 | "camera/infra1/camera_info", 1); 399 | } 400 | 401 | if (true == _enable[INFRA2]) { 402 | _image_publishers[INFRA2] = image_transport::create_publisher( 403 | this, "camera/infra2/image_rect_raw"); 404 | _info_publisher[INFRA2] = this->create_publisher( 405 | "camera/infra2/camera_info", 1); 406 | } 407 | 408 | if (true == _enable[COLOR]) { 409 | _image_publishers[COLOR] = image_transport::create_publisher( 410 | this, "camera/color/image_raw"); 411 | _info_publisher[COLOR] = this->create_publisher( 412 | "camera/color/camera_info", 1); 413 | } 414 | 415 | if (true == _enable[FISHEYE] && 416 | true == _enable[DEPTH]) 417 | { 418 | _image_publishers[FISHEYE] = image_transport::create_publisher( 419 | this, "camera/fisheye/image_raw"); 420 | _info_publisher[FISHEYE] = this->create_publisher( 421 | "camera/fisheye/camera_info", 1); 422 | _fe_to_depth_publisher = this->create_publisher( 423 | "camera/extrinsics/fisheye2depth", qos); 424 | } 425 | 426 | if (true == _enable[GYRO]) { 427 | _imu_publishers[GYRO] = this->create_publisher("camera/gyro/sample", 428 | 100); 429 | _imu_info_publisher[GYRO] = this->create_publisher( 430 | "camera/gyro/imu_info", qos); 431 | } 432 | 433 | if (true == _enable[ACCEL]) { 434 | _imu_publishers[ACCEL] = this->create_publisher("camera/accel/sample", 435 | 100); 436 | _imu_info_publisher[ACCEL] = this->create_publisher( 437 | "camera/accel/imu_info", qos); 438 | } 439 | 440 | if (true == _enable[FISHEYE] && 441 | (true == _enable[GYRO] || 442 | true == _enable[ACCEL])) 443 | { 444 | _fe_to_imu_publisher = this->create_publisher( 445 | "camera/extrinsics/fisheye2imu", qos); 446 | } 447 | _static_tf_broadcaster_ = 448 | std::make_shared(shared_from_this()); 449 | } 450 | 451 | void setupStreams() 452 | { 453 | RCLCPP_INFO(logger_, "setupStreams..."); 454 | try { 455 | for (auto & streams : IMAGE_STREAMS) { 456 | for (auto & elem : streams) { 457 | if (true == _enable[elem]) { 458 | auto & sens = _sensors[elem]; 459 | auto profiles = sens->get_stream_profiles(); 460 | for (auto & profile : profiles) { 461 | auto video_profile = profile.as(); 462 | if (video_profile.format() == _format[elem] && 463 | video_profile.width() == _width[elem] && 464 | video_profile.height() == _height[elem] && 465 | video_profile.fps() == _fps[elem] && 466 | video_profile.stream_index() == elem.second) 467 | { 468 | _enabled_profiles[elem].push_back(profile); 469 | 470 | _image[elem] = 471 | cv::Mat(_width[elem], _height[elem], _image_format[elem], cv::Scalar(0, 0, 0)); 472 | RCLCPP_INFO(logger_, "%s stream is enabled - width: %d, height: %d, fps: %d", 473 | _stream_name[elem].c_str(), _width[elem], _height[elem], _fps[elem]); 474 | break; 475 | } 476 | } 477 | if (_enabled_profiles.find(elem) == _enabled_profiles.end()) { 478 | RCLCPP_WARN(logger_, "Given stream configuration is not supported by the device!"); 479 | RCLCPP_WARN(logger_, "Stream: %s, Format: %d, Width: %d, Height: %d, FPS: %d", 480 | rs2_stream_to_string(elem.first), _format[elem], _width[elem], _height[elem], 481 | _fps[elem]); 482 | _enable[elem] = false; 483 | } 484 | } 485 | } 486 | } 487 | 488 | // Publish image stream info 489 | for (auto & profiles : _enabled_profiles) { 490 | for (auto & profile : profiles.second) { 491 | auto video_profile = profile.as(); 492 | updateStreamCalibData(video_profile); 493 | } 494 | } 495 | 496 | auto frame_callback = [this](rs2::frame frame) 497 | { 498 | // We compute a ROS timestamp which is based on an initial ROS time at point of first 499 | // frame, and the incremental timestamp from the camera. 500 | // In sync mode the timestamp is based on ROS time 501 | if (false == _intialize_time_base) { 502 | _intialize_time_base = true; 503 | _ros_time_base = _ros_clock.now(); 504 | _camera_time_base = frame.get_timestamp(); 505 | } 506 | 507 | rclcpp::Time t; 508 | if (_sync_frames) { 509 | t = _ros_clock.now(); 510 | } else { 511 | uint64_t elapsed_camera_ns = (/*ms*/ frame.get_timestamp() - /*ms*/ _camera_time_base) * 512 | 1000000; 513 | t = rclcpp::Time(_ros_time_base.nanoseconds() + elapsed_camera_ns, RCL_ROS_TIME); 514 | } 515 | auto is_color_frame_arrived = false; 516 | auto is_depth_frame_arrived = false; 517 | rs2::frame depth_frame; 518 | if (frame.is()) { 519 | RCLCPP_DEBUG(logger_, "Frameset arrived"); 520 | auto frameset = frame.as(); 521 | for (auto it = frameset.begin(); it != frameset.end(); ++it) { 522 | auto f = (*it); 523 | auto stream_type = f.get_profile().stream_type(); 524 | if (RS2_STREAM_COLOR == stream_type) { 525 | is_color_frame_arrived = true; 526 | } else if (RS2_STREAM_DEPTH == stream_type) { 527 | depth_frame = f; 528 | is_depth_frame_arrived = true; 529 | } 530 | 531 | RCLCPP_DEBUG(logger_, 532 | "Frameset contain %s frame. frame_number: %llu ; frame_TS: %f ; ros_TS(NSec): %lu", 533 | rs2_stream_to_string(stream_type), frame.get_frame_number(), 534 | frame.get_timestamp(), t.nanoseconds()); 535 | publishFrame(f, t); 536 | } 537 | 538 | if (_align_depth && is_depth_frame_arrived && is_color_frame_arrived) { 539 | RCLCPP_DEBUG(logger_, "publishAlignedDepthTopic(...)"); 540 | publishAlignedDepthImg(frame, t); 541 | } 542 | 543 | if (_pointcloud && is_depth_frame_arrived && is_color_frame_arrived) { 544 | RCLCPP_DEBUG(logger_, "publishPCTopic(...)"); 545 | publishPCTopic(t); 546 | } 547 | 548 | if (_align_depth && _align_pointcloud && is_depth_frame_arrived && 549 | is_color_frame_arrived) 550 | { 551 | RCLCPP_DEBUG(logger_, "publishAlignedPCTopic(...)"); 552 | publishAlignedPCTopic(t); 553 | } 554 | 555 | } else { 556 | auto stream_type = frame.get_profile().stream_type(); 557 | RCLCPP_DEBUG(logger_, 558 | "%s video frame arrived. frame_number: %llu ; frame_TS: %f ; ros_TS(NSec): %lu", 559 | rs2_stream_to_string(stream_type), frame.get_frame_number(), 560 | frame.get_timestamp(), t.nanoseconds()); 561 | publishFrame(frame, t); 562 | } 563 | }; 564 | 565 | // Streaming IMAGES 566 | for (auto & streams : IMAGE_STREAMS) { 567 | std::vector profiles; 568 | for (auto & elem : streams) { 569 | if (!_enabled_profiles[elem].empty()) { 570 | profiles.insert(profiles.begin(), 571 | _enabled_profiles[elem].begin(), 572 | _enabled_profiles[elem].end()); 573 | } 574 | } 575 | 576 | if (!profiles.empty()) { 577 | auto stream = streams.front(); 578 | auto & sens = _sensors[stream]; 579 | sens->open(profiles); 580 | 581 | if (DEPTH == stream) { 582 | auto depth_sensor = sens->as(); 583 | _depth_scale_meters = depth_sensor.get_depth_scale(); 584 | } 585 | 586 | if (_sync_frames) { 587 | sens->start(_syncer); 588 | } else { 589 | sens->start(frame_callback); 590 | } 591 | } 592 | } // end for 593 | 594 | if (_sync_frames) { 595 | _syncer.start(frame_callback); 596 | } 597 | 598 | // Streaming HID 599 | for (const auto streams : HID_STREAMS) { 600 | for (auto & elem : streams) { 601 | if (true == _enable[elem]) { 602 | auto & sens = _sensors[elem]; 603 | auto profiles = sens->get_stream_profiles(); 604 | for (rs2::stream_profile & profile : profiles) { 605 | if (profile.fps() == _fps[elem] && 606 | profile.format() == _format[elem]) 607 | { 608 | _enabled_profiles[elem].push_back(profile); 609 | break; 610 | } 611 | } 612 | } 613 | } 614 | } 615 | 616 | auto gyro_profile = _enabled_profiles.find(GYRO); 617 | auto accel_profile = _enabled_profiles.find(ACCEL); 618 | 619 | if (gyro_profile != _enabled_profiles.end() && 620 | accel_profile != _enabled_profiles.end()) 621 | { 622 | std::vector profiles; 623 | profiles.insert(profiles.begin(), gyro_profile->second.begin(), gyro_profile->second.end()); 624 | profiles.insert(profiles.begin(), accel_profile->second.begin(), 625 | accel_profile->second.end()); 626 | auto & sens = _sensors[GYRO]; 627 | sens->open(profiles); 628 | 629 | sens->start([this](rs2::frame frame) { 630 | auto stream = frame.get_profile().stream_type(); 631 | if (false == _intialize_time_base) { 632 | return; 633 | } 634 | 635 | RCLCPP_DEBUG(logger_, "Frame arrived: stream: %s ; index: %d ; Timestamp Domain: %s", 636 | rs2_stream_to_string(frame.get_profile().stream_type()), 637 | frame.get_profile().stream_index(), 638 | rs2_timestamp_domain_to_string(frame.get_frame_timestamp_domain())); 639 | 640 | auto stream_index = (stream == GYRO.first) ? GYRO : ACCEL; 641 | // if (0 != _info_publisher[stream_index].getNumSubscribers() || 642 | // 0 != _imu_publishers[stream_index].getNumSubscribers()) 643 | { 644 | uint64_t elapsed_camera_ns = (frame.get_timestamp() - _camera_time_base) * 1000000; 645 | rclcpp::Time t(_ros_time_base.nanoseconds() + elapsed_camera_ns, RCL_ROS_TIME); 646 | 647 | auto imu_msg = sensor_msgs::msg::Imu(); 648 | imu_msg.header.frame_id = _optical_frame_id[stream_index]; 649 | imu_msg.orientation.x = 0.0; 650 | imu_msg.orientation.y = 0.0; 651 | imu_msg.orientation.z = 0.0; 652 | imu_msg.orientation.w = 0.0; 653 | imu_msg.orientation_covariance = {-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0}; 654 | 655 | auto axes = *(reinterpret_cast(frame.get_data())); 656 | if (GYRO == stream_index) { 657 | imu_msg.angular_velocity.x = axes.x; 658 | imu_msg.angular_velocity.y = axes.y; 659 | imu_msg.angular_velocity.z = axes.z; 660 | } else if (ACCEL == stream_index) { 661 | imu_msg.linear_acceleration.x = axes.x; 662 | imu_msg.linear_acceleration.y = axes.y; 663 | imu_msg.linear_acceleration.z = axes.z; 664 | } 665 | _seq[stream_index] += 1; 666 | imu_msg.header.stamp = t; 667 | _imu_publishers[stream_index]->publish(imu_msg); 668 | RCLCPP_DEBUG(logger_, "Publish %s stream", rs2_stream_to_string( 669 | frame.get_profile().stream_type())); 670 | } 671 | }); 672 | 673 | if (true == _enable[GYRO]) { 674 | RCLCPP_INFO(logger_, "%s stream is enabled - fps: %d", _stream_name[GYRO].c_str(), 675 | _fps[GYRO]); 676 | auto gyroInfo = getImuInfo(GYRO); 677 | _imu_info_publisher[GYRO]->publish(gyroInfo); 678 | } 679 | 680 | if (true == _enable[ACCEL]) { 681 | RCLCPP_INFO(logger_, "%s stream is enabled - fps: %d", 682 | _stream_name[ACCEL].c_str(), _fps[ACCEL]); 683 | auto accelInfo = getImuInfo(ACCEL); 684 | _imu_info_publisher[ACCEL]->publish(accelInfo); 685 | } 686 | } 687 | 688 | 689 | if (true == _enable[DEPTH] && 690 | true == _enable[FISHEYE]) 691 | { 692 | auto ex = getFisheye2DepthExtrinsicsMsg(); 693 | _fe_to_depth_publisher->publish(ex); 694 | } 695 | 696 | if (true == _enable[FISHEYE] && 697 | (_enable[GYRO] || _enable[ACCEL])) 698 | { 699 | auto ex = getFisheye2ImuExtrinsicsMsg(); 700 | _fe_to_imu_publisher->publish(ex); 701 | } 702 | } catch (const std::exception & ex) { 703 | RCLCPP_ERROR(logger_, "An exception has been thrown: %s", ex.what()); 704 | throw; 705 | } catch (...) { 706 | RCLCPP_ERROR(logger_, "Unknown exception has occured!"); 707 | throw; 708 | } 709 | } 710 | 711 | void updateStreamCalibData(const rs2::video_stream_profile & video_profile) 712 | { 713 | stream_index_pair stream_index{video_profile.stream_type(), video_profile.stream_index()}; 714 | auto intrinsic = video_profile.get_intrinsics(); 715 | _stream_intrinsics[stream_index] = intrinsic; 716 | 717 | _camera_info[stream_index].width = intrinsic.width; 718 | _camera_info[stream_index].height = intrinsic.height; 719 | _camera_info[stream_index].header.frame_id = _optical_frame_id[stream_index]; 720 | 721 | _camera_info[stream_index].k.at(0) = intrinsic.fx; 722 | _camera_info[stream_index].k.at(2) = intrinsic.ppx; 723 | _camera_info[stream_index].k.at(4) = intrinsic.fy; 724 | _camera_info[stream_index].k.at(5) = intrinsic.ppy; 725 | _camera_info[stream_index].k.at(8) = 1; 726 | 727 | _camera_info[stream_index].p.at(0) = _camera_info[stream_index].k.at(0); 728 | _camera_info[stream_index].p.at(1) = 0; 729 | _camera_info[stream_index].p.at(2) = _camera_info[stream_index].k.at(2); 730 | _camera_info[stream_index].p.at(3) = 0; 731 | _camera_info[stream_index].p.at(4) = 0; 732 | _camera_info[stream_index].p.at(5) = _camera_info[stream_index].k.at(4); 733 | _camera_info[stream_index].p.at(6) = _camera_info[stream_index].k.at(5); 734 | _camera_info[stream_index].p.at(7) = 0; 735 | _camera_info[stream_index].p.at(8) = 0; 736 | _camera_info[stream_index].p.at(9) = 0; 737 | _camera_info[stream_index].p.at(10) = 1; 738 | _camera_info[stream_index].p.at(11) = 0; 739 | 740 | 741 | // Why Depth to Color? 742 | if (stream_index == DEPTH && _enable[DEPTH] && _enable[COLOR]) { 743 | rs2::stream_profile depth_profile; 744 | if (!getEnabledProfile(DEPTH, depth_profile)) { 745 | RCLCPP_ERROR(logger_, "Depth profile not found!"); 746 | rclcpp::shutdown(); 747 | exit(1); 748 | } 749 | _depth2color_extrinsics = depth_profile.get_extrinsics_to(_enabled_profiles[COLOR].front()); 750 | // set depth to color translation values in Projection matrix (P) 751 | _camera_info[stream_index].p.at(3) = _depth2color_extrinsics.translation[0]; // Tx 752 | _camera_info[stream_index].p.at(7) = _depth2color_extrinsics.translation[1]; // Ty 753 | _camera_info[stream_index].p.at(11) = _depth2color_extrinsics.translation[2]; // Tz 754 | } 755 | 756 | _camera_info[stream_index].distortion_model = "plumb_bob"; 757 | 758 | // set R (rotation matrix) values to identity matrix 759 | _camera_info[stream_index].r.at(0) = 1.0; 760 | _camera_info[stream_index].r.at(1) = 0.0; 761 | _camera_info[stream_index].r.at(2) = 0.0; 762 | _camera_info[stream_index].r.at(3) = 0.0; 763 | _camera_info[stream_index].r.at(4) = 1.0; 764 | _camera_info[stream_index].r.at(5) = 0.0; 765 | _camera_info[stream_index].r.at(6) = 0.0; 766 | _camera_info[stream_index].r.at(7) = 0.0; 767 | _camera_info[stream_index].r.at(8) = 1.0; 768 | 769 | for (int i = 0; i < 5; i++) { 770 | _camera_info[stream_index].d.push_back(intrinsic.coeffs[i]); 771 | } 772 | } 773 | 774 | Eigen::Quaternionf rotationMatrixToQuaternion(float rotation[9]) const 775 | { 776 | Eigen::Matrix3f m; 777 | m << rotation[0], rotation[1], rotation[2], 778 | rotation[3], rotation[4], rotation[5], 779 | rotation[6], rotation[7], rotation[8]; 780 | Eigen::Quaternionf q(m); 781 | return q; 782 | } 783 | 784 | void publishStaticTransforms() 785 | { 786 | RCLCPP_DEBUG(logger_, "publishStaticTransforms..."); 787 | // Publish transforms for the cameras 788 | tf2::Quaternion q_c2co; 789 | geometry_msgs::msg::TransformStamped b2c_msg; // Base to Color 790 | geometry_msgs::msg::TransformStamped c2co_msg; // Color to Color_Optical 791 | 792 | tf2::Quaternion q_d2do; 793 | geometry_msgs::msg::TransformStamped b2d_msg; // Base to Depth 794 | geometry_msgs::msg::TransformStamped d2do_msg; // Depth to Depth_Optical 795 | 796 | tf2::Quaternion ir1_2_ir1o; 797 | geometry_msgs::msg::TransformStamped b2ir1_msg; // Base to IR1 798 | geometry_msgs::msg::TransformStamped ir1_2_ir1o_msg; // IR1 to IR1_Optical 799 | 800 | tf2::Quaternion ir2_2_ir2o; 801 | geometry_msgs::msg::TransformStamped b2ir2_msg; // Base to IR2 802 | geometry_msgs::msg::TransformStamped ir2_2_ir2o_msg; // IR2 to IR2_Optical 803 | 804 | // Get the current timestamp for all static transforms 805 | rclcpp::Time transform_ts_ = _ros_clock.now(); 806 | 807 | // The depth frame is used as the base frame. 808 | // Hence no additional transformation is done from base frame to depth frame. 809 | // Transform base frame to depth frame 810 | b2d_msg.header.stamp = transform_ts_; 811 | b2d_msg.header.frame_id = _base_frame_id; 812 | b2d_msg.child_frame_id = _frame_id[DEPTH]; 813 | b2d_msg.transform.translation.x = 0; 814 | b2d_msg.transform.translation.y = 0; 815 | b2d_msg.transform.translation.z = 0; 816 | b2d_msg.transform.rotation.x = 0; 817 | b2d_msg.transform.rotation.y = 0; 818 | b2d_msg.transform.rotation.z = 0; 819 | b2d_msg.transform.rotation.w = 1; 820 | _static_tf_broadcaster_->sendTransform(b2d_msg); 821 | 822 | // Transform depth frame to depth optical frame 823 | q_d2do.setRPY(-M_PI / 2, 0.0, -M_PI / 2); 824 | d2do_msg.header.stamp = transform_ts_; 825 | d2do_msg.header.frame_id = _frame_id[DEPTH]; 826 | d2do_msg.child_frame_id = _optical_frame_id[DEPTH]; 827 | d2do_msg.transform.translation.x = 0; 828 | d2do_msg.transform.translation.y = 0; 829 | d2do_msg.transform.translation.z = 0; 830 | d2do_msg.transform.rotation.x = q_d2do.getX(); 831 | d2do_msg.transform.rotation.y = q_d2do.getY(); 832 | d2do_msg.transform.rotation.z = q_d2do.getZ(); 833 | d2do_msg.transform.rotation.w = q_d2do.getW(); 834 | _static_tf_broadcaster_->sendTransform(d2do_msg); 835 | 836 | 837 | if (true == _enable[COLOR]) { 838 | // Transform base frame to color frame 839 | auto q = rotationMatrixToQuaternion(_depth2color_extrinsics.rotation); 840 | 841 | b2c_msg.header.stamp = transform_ts_; 842 | b2c_msg.header.frame_id = _base_frame_id; 843 | b2c_msg.child_frame_id = _frame_id[COLOR]; 844 | b2c_msg.transform.translation.x = _depth2color_extrinsics.translation[2]; 845 | b2c_msg.transform.translation.y = -_depth2color_extrinsics.translation[0]; 846 | b2c_msg.transform.translation.z = -_depth2color_extrinsics.translation[1]; 847 | b2c_msg.transform.rotation.x = q.x(); 848 | b2c_msg.transform.rotation.y = q.y(); 849 | b2c_msg.transform.rotation.z = q.z(); 850 | b2c_msg.transform.rotation.w = q.w(); 851 | _static_tf_broadcaster_->sendTransform(b2c_msg); 852 | 853 | // Transform color frame to color optical frame 854 | q_c2co.setRPY(-M_PI / 2, 0.0, -M_PI / 2); 855 | c2co_msg.header.stamp = transform_ts_; 856 | c2co_msg.header.frame_id = _frame_id[COLOR]; 857 | c2co_msg.child_frame_id = _optical_frame_id[COLOR]; 858 | c2co_msg.transform.translation.x = 0; 859 | c2co_msg.transform.translation.y = 0; 860 | c2co_msg.transform.translation.z = 0; 861 | c2co_msg.transform.rotation.x = q_c2co.getX(); 862 | c2co_msg.transform.rotation.y = q_c2co.getY(); 863 | c2co_msg.transform.rotation.z = q_c2co.getZ(); 864 | c2co_msg.transform.rotation.w = q_c2co.getW(); 865 | _static_tf_broadcaster_->sendTransform(c2co_msg); 866 | } 867 | 868 | if (_enable[DEPTH]) { 869 | rs2::stream_profile depth_profile; 870 | if (!getEnabledProfile(DEPTH, depth_profile)) { 871 | RCLCPP_ERROR(logger_, "Depth profile not found!"); 872 | rclcpp::shutdown(); 873 | exit(1); 874 | } 875 | if (true == _enable[INFRA1]) { 876 | auto d2ir1_extrinsics = depth_profile.get_extrinsics_to(_enabled_profiles[INFRA1].front()); 877 | auto q = rotationMatrixToQuaternion(d2ir1_extrinsics.rotation); 878 | 879 | // Transform base frame to infra1 880 | b2ir1_msg.header.stamp = transform_ts_; 881 | b2ir1_msg.header.frame_id = _base_frame_id; 882 | b2ir1_msg.child_frame_id = _frame_id[INFRA1]; 883 | b2ir1_msg.transform.translation.x = d2ir1_extrinsics.translation[2]; 884 | b2ir1_msg.transform.translation.y = -d2ir1_extrinsics.translation[0]; 885 | b2ir1_msg.transform.translation.z = -d2ir1_extrinsics.translation[1]; 886 | 887 | b2ir1_msg.transform.rotation.x = q.x(); 888 | b2ir1_msg.transform.rotation.y = q.y(); 889 | b2ir1_msg.transform.rotation.z = q.z(); 890 | b2ir1_msg.transform.rotation.w = q.w(); 891 | _static_tf_broadcaster_->sendTransform(b2ir1_msg); 892 | 893 | // Transform infra1 frame to infra1 optical frame 894 | ir1_2_ir1o.setRPY(-M_PI / 2, 0.0, -M_PI / 2); 895 | ir1_2_ir1o_msg.header.stamp = transform_ts_; 896 | ir1_2_ir1o_msg.header.frame_id = _frame_id[INFRA1]; 897 | ir1_2_ir1o_msg.child_frame_id = _optical_frame_id[INFRA1]; 898 | ir1_2_ir1o_msg.transform.translation.x = 0; 899 | ir1_2_ir1o_msg.transform.translation.y = 0; 900 | ir1_2_ir1o_msg.transform.translation.z = 0; 901 | ir1_2_ir1o_msg.transform.rotation.x = ir1_2_ir1o.getX(); 902 | ir1_2_ir1o_msg.transform.rotation.y = ir1_2_ir1o.getY(); 903 | ir1_2_ir1o_msg.transform.rotation.z = ir1_2_ir1o.getZ(); 904 | ir1_2_ir1o_msg.transform.rotation.w = ir1_2_ir1o.getW(); 905 | _static_tf_broadcaster_->sendTransform(ir1_2_ir1o_msg); 906 | } 907 | 908 | if (true == _enable[INFRA2]) { 909 | auto d2ir2_extrinsics = depth_profile.get_extrinsics_to(_enabled_profiles[INFRA2].front()); 910 | auto q = rotationMatrixToQuaternion(d2ir2_extrinsics.rotation); 911 | 912 | // Transform base frame to infra2 913 | b2ir2_msg.header.stamp = transform_ts_; 914 | b2ir2_msg.header.frame_id = _base_frame_id; 915 | b2ir2_msg.child_frame_id = _frame_id[INFRA2]; 916 | b2ir2_msg.transform.translation.x = d2ir2_extrinsics.translation[2]; 917 | b2ir2_msg.transform.translation.y = -d2ir2_extrinsics.translation[0]; 918 | b2ir2_msg.transform.translation.z = -d2ir2_extrinsics.translation[1]; 919 | b2ir2_msg.transform.rotation.x = q.x(); 920 | b2ir2_msg.transform.rotation.y = q.y(); 921 | b2ir2_msg.transform.rotation.z = q.z(); 922 | b2ir2_msg.transform.rotation.w = q.w(); 923 | _static_tf_broadcaster_->sendTransform(b2ir2_msg); 924 | 925 | // Transform infra2 frame to infra1 optical frame 926 | ir2_2_ir2o.setRPY(-M_PI / 2, 0.0, -M_PI / 2); 927 | ir2_2_ir2o_msg.header.stamp = transform_ts_; 928 | ir2_2_ir2o_msg.header.frame_id = _frame_id[INFRA2]; 929 | ir2_2_ir2o_msg.child_frame_id = _optical_frame_id[INFRA2]; 930 | ir2_2_ir2o_msg.transform.translation.x = 0; 931 | ir2_2_ir2o_msg.transform.translation.y = 0; 932 | ir2_2_ir2o_msg.transform.translation.z = 0; 933 | ir2_2_ir2o_msg.transform.rotation.x = ir2_2_ir2o.getX(); 934 | ir2_2_ir2o_msg.transform.rotation.y = ir2_2_ir2o.getY(); 935 | ir2_2_ir2o_msg.transform.rotation.z = ir2_2_ir2o.getZ(); 936 | ir2_2_ir2o_msg.transform.rotation.w = ir2_2_ir2o.getW(); 937 | _static_tf_broadcaster_->sendTransform(ir2_2_ir2o_msg); 938 | } 939 | } 940 | // Publish Fisheye TF 941 | } 942 | 943 | void alignFrame( 944 | const rs2_intrinsics & from_intrin, 945 | const rs2_intrinsics & other_intrin, 946 | rs2::frame from_image, 947 | uint32_t output_image_bytes_per_pixel, 948 | const rs2_extrinsics & from_to_other, 949 | std::vector & out_vec) 950 | { 951 | static const auto meter_to_mm = 0.001f; 952 | uint8_t * p_out_frame = out_vec.data(); 953 | 954 | static const auto blank_color = 0x00; 955 | memset(p_out_frame, blank_color, 956 | other_intrin.height * other_intrin.width * output_image_bytes_per_pixel); 957 | 958 | auto p_from_frame = reinterpret_cast(from_image.get_data()); 959 | auto from_stream_type = from_image.get_profile().stream_type(); 960 | float depth_units = ((from_stream_type == RS2_STREAM_DEPTH) ? _depth_scale_meters : 1.f); 961 | for (int from_y = 0; from_y < from_intrin.height; ++from_y) { 962 | int from_pixel_index = from_y * from_intrin.width; 963 | for (int from_x = 0; from_x < from_intrin.width; ++from_x, ++from_pixel_index) { 964 | // Skip over depth pixels with the value of zero 965 | float depth = 966 | (from_stream_type == 967 | RS2_STREAM_DEPTH) ? (depth_units * 968 | ((const uint16_t *)p_from_frame)[from_pixel_index]) : 1.f; 969 | if (depth) { 970 | // Map the top-left corner of the depth pixel onto the other image 971 | float from_pixel[2] = {from_x - 0.5f, from_y - 0.5f}, from_point[3], other_point[3], 972 | other_pixel[2]; 973 | rs2_deproject_pixel_to_point(from_point, &from_intrin, from_pixel, depth); 974 | rs2_transform_point_to_point(other_point, &from_to_other, from_point); 975 | rs2_project_point_to_pixel(other_pixel, &other_intrin, other_point); 976 | const int other_x0 = static_cast(other_pixel[0] + 0.5f); 977 | const int other_y0 = static_cast(other_pixel[1] + 0.5f); 978 | 979 | // Map the bottom-right corner of the depth pixel onto the other image 980 | from_pixel[0] = from_x + 0.5f; from_pixel[1] = from_y + 0.5f; 981 | rs2_deproject_pixel_to_point(from_point, &from_intrin, from_pixel, depth); 982 | rs2_transform_point_to_point(other_point, &from_to_other, from_point); 983 | rs2_project_point_to_pixel(other_pixel, &other_intrin, other_point); 984 | const int other_x1 = static_cast(other_pixel[0] + 0.5f); 985 | const int other_y1 = static_cast(other_pixel[1] + 0.5f); 986 | 987 | if (other_x0 < 0 || other_y0 < 0 || other_x1 >= other_intrin.width || 988 | other_y1 >= other_intrin.height) 989 | { 990 | continue; 991 | } 992 | 993 | for (int y = other_y0; y <= other_y1; ++y) { 994 | for (int x = other_x0; x <= other_x1; ++x) { 995 | int out_pixel_index = y * other_intrin.width + x; 996 | auto p_from_depth_frame = reinterpret_cast(p_from_frame); 997 | auto p_out_depth_frame = reinterpret_cast(p_out_frame); 998 | p_out_depth_frame[out_pixel_index] = p_from_depth_frame[from_pixel_index] * 999 | ( depth_units / meter_to_mm); 1000 | } 1001 | } 1002 | } 1003 | } 1004 | } 1005 | } 1006 | 1007 | rs2_extrinsics getRsExtrinsics( 1008 | const stream_index_pair & from_stream, 1009 | const stream_index_pair & to_stream) 1010 | { 1011 | auto & from = _enabled_profiles[from_stream].front(); 1012 | auto & to = _enabled_profiles[to_stream].front(); 1013 | return from.get_extrinsics_to(to); 1014 | } 1015 | 1016 | void publishAlignedDepthImg(rs2::frame depth_frame, const rclcpp::Time & t) 1017 | { 1018 | rs2::align align(RS2_STREAM_COLOR); 1019 | _aligned_frameset = depth_frame.apply_filter(align); 1020 | rs2::depth_frame aligned_depth = _aligned_frameset.get_depth_frame(); 1021 | RCLCPP_DEBUG(logger_, "aligned done!"); 1022 | 1023 | auto vf = aligned_depth.as(); 1024 | auto depth_image = cv::Mat(cv::Size(vf.get_width(), vf.get_height()), _image_format[DEPTH], 1025 | const_cast(vf.get_data()), cv::Mat::AUTO_STEP); 1026 | 1027 | sensor_msgs::msg::Image::SharedPtr img; 1028 | auto info_msg = _camera_info[DEPTH]; 1029 | img = cv_bridge::CvImage( 1030 | std_msgs::msg::Header(), sensor_msgs::image_encodings::TYPE_16UC1, depth_image).toImageMsg(); 1031 | auto image = aligned_depth.as(); 1032 | auto bpp = image.get_bytes_per_pixel(); 1033 | auto height = image.get_height(); 1034 | auto width = image.get_width(); 1035 | img->width = width; 1036 | img->height = height; 1037 | img->is_bigendian = false; 1038 | img->step = width * bpp; 1039 | img->header.frame_id = _optical_frame_id[COLOR]; 1040 | img->header.stamp = t; 1041 | _align_depth_publisher.publish(img); 1042 | _align_depth_camera_publisher->publish(info_msg); 1043 | } 1044 | 1045 | void publishPCTopic(const rclcpp::Time & t) 1046 | { 1047 | auto color_intrinsics = _stream_intrinsics[COLOR]; 1048 | auto image_depth16 = reinterpret_cast(_image[DEPTH].data); 1049 | auto depth_intrinsics = _stream_intrinsics[DEPTH]; 1050 | sensor_msgs::msg::PointCloud2 msg_pointcloud; 1051 | msg_pointcloud.header.stamp = t; 1052 | msg_pointcloud.header.frame_id = _optical_frame_id[DEPTH]; 1053 | msg_pointcloud.width = depth_intrinsics.width; 1054 | msg_pointcloud.height = depth_intrinsics.height; 1055 | msg_pointcloud.is_dense = true; 1056 | 1057 | sensor_msgs::PointCloud2Modifier modifier(msg_pointcloud); 1058 | 1059 | modifier.setPointCloud2Fields(4, 1060 | "x", 1, sensor_msgs::msg::PointField::FLOAT32, 1061 | "y", 1, sensor_msgs::msg::PointField::FLOAT32, 1062 | "z", 1, sensor_msgs::msg::PointField::FLOAT32, 1063 | "rgb", 1, sensor_msgs::msg::PointField::FLOAT32); 1064 | modifier.setPointCloud2FieldsByString(2, "xyz", "rgb"); 1065 | 1066 | sensor_msgs::PointCloud2Iterator iter_x(msg_pointcloud, "x"); 1067 | sensor_msgs::PointCloud2Iterator iter_y(msg_pointcloud, "y"); 1068 | sensor_msgs::PointCloud2Iterator iter_z(msg_pointcloud, "z"); 1069 | 1070 | sensor_msgs::PointCloud2Iterator iter_r(msg_pointcloud, "r"); 1071 | sensor_msgs::PointCloud2Iterator iter_g(msg_pointcloud, "g"); 1072 | sensor_msgs::PointCloud2Iterator iter_b(msg_pointcloud, "b"); 1073 | 1074 | float depth_point[3], color_point[3], color_pixel[2], scaled_depth; 1075 | unsigned char * color_data = _image[COLOR].data; 1076 | 1077 | // Fill the PointCloud2 fields 1078 | for (int y = 0; y < depth_intrinsics.height; ++y) { 1079 | for (int x = 0; x < depth_intrinsics.width; ++x) { 1080 | scaled_depth = static_cast(*image_depth16) * _depth_scale_meters; 1081 | float depth_pixel[2] = {static_cast(x), static_cast(y)}; 1082 | rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, depth_pixel, scaled_depth); 1083 | 1084 | if (depth_point[2] <= 0.f || depth_point[2] > 5.f) { 1085 | depth_point[0] = 0.f; 1086 | depth_point[1] = 0.f; 1087 | depth_point[2] = 0.f; 1088 | } 1089 | 1090 | *iter_x = depth_point[0]; 1091 | *iter_y = depth_point[1]; 1092 | *iter_z = depth_point[2]; 1093 | 1094 | rs2_transform_point_to_point(color_point, &_depth2color_extrinsics, depth_point); 1095 | rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point); 1096 | 1097 | if (color_pixel[1] < 0.f || color_pixel[1] > color_intrinsics.height || 1098 | color_pixel[0] < 0.f || color_pixel[0] > color_intrinsics.width) 1099 | { 1100 | // For out of bounds color data, default to a shade of blue in order to visually 1101 | // distinguish holes. This color value is same as the librealsense out of bounds color 1102 | // value. 1103 | *iter_r = static_cast(96); 1104 | *iter_g = static_cast(157); 1105 | *iter_b = static_cast(198); 1106 | } else { 1107 | auto i = static_cast(color_pixel[0]); 1108 | auto j = static_cast(color_pixel[1]); 1109 | 1110 | auto offset = i * 3 + j * color_intrinsics.width * 3; 1111 | *iter_r = static_cast(color_data[offset]); 1112 | *iter_g = static_cast(color_data[offset + 1]); 1113 | *iter_b = static_cast(color_data[offset + 2]); 1114 | } 1115 | 1116 | ++image_depth16; 1117 | ++iter_x; ++iter_y; ++iter_z; 1118 | ++iter_r; ++iter_g; ++iter_b; 1119 | } 1120 | } 1121 | _pointcloud_publisher->publish(msg_pointcloud); 1122 | } 1123 | 1124 | void publishAlignedPCTopic(const rclcpp::Time & t) 1125 | { 1126 | rs2::depth_frame aligned_depth = _aligned_frameset.get_depth_frame(); 1127 | 1128 | auto image_depth16 = reinterpret_cast(aligned_depth.get_data()); 1129 | auto depth_intrinsics = _stream_intrinsics[COLOR]; 1130 | unsigned char * color_data = _image[COLOR].data; 1131 | sensor_msgs::msg::PointCloud2 msg_pointcloud; 1132 | msg_pointcloud.header.stamp = t; 1133 | msg_pointcloud.header.frame_id = _optical_frame_id[COLOR]; 1134 | msg_pointcloud.width = depth_intrinsics.width; 1135 | msg_pointcloud.height = depth_intrinsics.height; 1136 | msg_pointcloud.is_dense = true; 1137 | 1138 | sensor_msgs::PointCloud2Modifier modifier(msg_pointcloud); 1139 | 1140 | modifier.setPointCloud2Fields(3, 1141 | "x", 1, sensor_msgs::msg::PointField::FLOAT32, 1142 | "y", 1, sensor_msgs::msg::PointField::FLOAT32, 1143 | "z", 1, sensor_msgs::msg::PointField::FLOAT32, 1144 | "rgb", 1, sensor_msgs::msg::PointField::FLOAT32); 1145 | modifier.setPointCloud2FieldsByString(2, "xyz", "rgb"); 1146 | 1147 | sensor_msgs::PointCloud2Iterator iter_x(msg_pointcloud, "x"); 1148 | sensor_msgs::PointCloud2Iterator iter_y(msg_pointcloud, "y"); 1149 | sensor_msgs::PointCloud2Iterator iter_z(msg_pointcloud, "z"); 1150 | 1151 | sensor_msgs::PointCloud2Iterator iter_r(msg_pointcloud, "r"); 1152 | sensor_msgs::PointCloud2Iterator iter_g(msg_pointcloud, "g"); 1153 | sensor_msgs::PointCloud2Iterator iter_b(msg_pointcloud, "b"); 1154 | 1155 | float std_nan = std::numeric_limits::quiet_NaN(); 1156 | float depth_point[3], scaled_depth; 1157 | 1158 | // Fill the PointCloud2 fields 1159 | for (int y = 0; y < depth_intrinsics.height; ++y) { 1160 | for (int x = 0; x < depth_intrinsics.width; ++x) { 1161 | scaled_depth = static_cast(*image_depth16) * _depth_scale_meters; 1162 | float depth_pixel[2] = {static_cast(x), static_cast(y)}; 1163 | rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, depth_pixel, scaled_depth); 1164 | auto iter_offset = x + y * depth_intrinsics.width; 1165 | 1166 | if (depth_point[2] <= 0.f || depth_point[2] > 5.f) { 1167 | *(iter_x + iter_offset) = std_nan; 1168 | *(iter_y + iter_offset) = std_nan; 1169 | *(iter_z + iter_offset) = std_nan; 1170 | *(iter_r + iter_offset) = static_cast(96); 1171 | *(iter_g + iter_offset) = static_cast(157); 1172 | *(iter_b + iter_offset) = static_cast(198); 1173 | } else { 1174 | *(iter_x + iter_offset) = depth_point[0]; 1175 | *(iter_y + iter_offset) = depth_point[1]; 1176 | *(iter_z + iter_offset) = depth_point[2]; 1177 | *(iter_r + iter_offset) = color_data[iter_offset * 3]; 1178 | *(iter_g + iter_offset) = color_data[iter_offset * 3 + 1]; 1179 | *(iter_b + iter_offset) = color_data[iter_offset * 3 + 2]; 1180 | } 1181 | 1182 | ++image_depth16; 1183 | } 1184 | } 1185 | 1186 | _align_pointcloud_publisher->publish(msg_pointcloud); 1187 | } 1188 | 1189 | 1190 | Extrinsics rsExtrinsicsToMsg(const rs2_extrinsics & extrinsics) const 1191 | { 1192 | Extrinsics extrinsicsMsg; 1193 | for (int i = 0; i < 9; ++i) { 1194 | extrinsicsMsg.rotation[i] = extrinsics.rotation[i]; 1195 | if (i < 3) { 1196 | extrinsicsMsg.translation[i] = extrinsics.translation[i]; 1197 | } 1198 | } 1199 | 1200 | return extrinsicsMsg; 1201 | } 1202 | 1203 | Extrinsics getFisheye2ImuExtrinsicsMsg() 1204 | { 1205 | auto & fisheye = _enabled_profiles[FISHEYE].front(); 1206 | auto & hid = _enabled_profiles[GYRO].front(); 1207 | Extrinsics extrinsicsMsg = rsExtrinsicsToMsg(fisheye.get_extrinsics_to(hid)); 1208 | extrinsicsMsg.header.frame_id = "fisheye2imu_extrinsics"; 1209 | return extrinsicsMsg; 1210 | } 1211 | 1212 | Extrinsics getFisheye2DepthExtrinsicsMsg() 1213 | { 1214 | auto & fisheye = _enabled_profiles[FISHEYE].front(); 1215 | auto & depth = _enabled_profiles[DEPTH].front(); 1216 | Extrinsics extrinsicsMsg = rsExtrinsicsToMsg(fisheye.get_extrinsics_to(depth)); 1217 | extrinsicsMsg.header.frame_id = "fisheye2depth_extrinsics"; 1218 | return extrinsicsMsg; 1219 | } 1220 | 1221 | struct float3 1222 | { 1223 | float x, y, z; 1224 | }; 1225 | 1226 | IMUInfo getImuInfo(const stream_index_pair & stream_index) 1227 | { 1228 | IMUInfo info; 1229 | #if (RS2_API_VERSION >= 20901) 1230 | auto sp = _enabled_profiles[stream_index].front().as(); 1231 | auto imuIntrinsics = sp.get_motion_intrinsics(); 1232 | #else 1233 | auto imuIntrinsics = _sensors[stream_index]->get_motion_intrinsics(stream_index.first); 1234 | #endif 1235 | if (GYRO == stream_index) { 1236 | info.header.frame_id = "imu_gyro"; 1237 | } else if (ACCEL == stream_index) { 1238 | info.header.frame_id = "imu_accel"; 1239 | } 1240 | 1241 | auto index = 0; 1242 | for (int i = 0; i < 3; ++i) { 1243 | for (int j = 0; j < 4; ++j) { 1244 | info.data[index] = imuIntrinsics.data[i][j]; 1245 | ++index; 1246 | } 1247 | info.noise_variances[i] = imuIntrinsics.noise_variances[i]; 1248 | info.bias_variances[i] = imuIntrinsics.bias_variances[i]; 1249 | } 1250 | return info; 1251 | } 1252 | 1253 | void tryGetLogSeverity(rs2_log_severity & severity) const 1254 | { 1255 | static const char * severity_var_name = "LRS_LOG_LEVEL"; 1256 | auto content = getenv(severity_var_name); 1257 | 1258 | if (content) { 1259 | std::string content_str(content); 1260 | std::transform(content_str.begin(), content_str.end(), content_str.begin(), ::toupper); 1261 | 1262 | for (uint32_t i = 0; i < RS2_LOG_SEVERITY_COUNT; i++) { 1263 | auto current = std::string(rs2_log_severity_to_string((rs2_log_severity)i)); 1264 | std::transform(current.begin(), current.end(), current.begin(), ::toupper); 1265 | if (content_str == current) { 1266 | severity = (rs2_log_severity)i; 1267 | break; 1268 | } 1269 | } 1270 | } 1271 | } 1272 | 1273 | void publishFrame(rs2::frame f, const rclcpp::Time & t) 1274 | { 1275 | RCLCPP_DEBUG(logger_, "publishFrame(...)"); 1276 | stream_index_pair stream{f.get_profile().stream_type(), f.get_profile().stream_index()}; 1277 | auto & image = _image[stream]; 1278 | image.data = const_cast(reinterpret_cast(f.get_data())); 1279 | ++(_seq[stream]); 1280 | auto & info_publisher = _info_publisher[stream]; 1281 | auto & image_publisher = _image_publishers[stream]; 1282 | // if (0 != info_publisher.getNumSubscribers() || 1283 | // 0 != image_publisher.getNumSubscribers()) 1284 | { 1285 | auto width = 0; 1286 | auto height = 0; 1287 | auto bpp = 1; 1288 | if (f.is()) { 1289 | auto image = f.as(); 1290 | width = image.get_width(); 1291 | height = image.get_height(); 1292 | bpp = image.get_bytes_per_pixel(); 1293 | } 1294 | 1295 | sensor_msgs::msg::Image::SharedPtr img; 1296 | img = cv_bridge::CvImage(std_msgs::msg::Header(), _encoding[stream], image).toImageMsg(); 1297 | img->width = width; 1298 | img->height = height; 1299 | img->is_bigendian = false; 1300 | img->step = width * bpp; 1301 | img->header.frame_id = _optical_frame_id[stream]; 1302 | img->header.stamp = t; 1303 | 1304 | auto & cam_info = _camera_info[stream]; 1305 | cam_info.header.stamp = t; 1306 | info_publisher->publish(cam_info); 1307 | 1308 | image_publisher.publish(img); 1309 | RCLCPP_DEBUG(logger_, "%s stream published", 1310 | rs2_stream_to_string(f.get_profile().stream_type())); 1311 | } 1312 | } 1313 | 1314 | bool getEnabledProfile(const stream_index_pair & stream_index, rs2::stream_profile & profile) 1315 | { 1316 | // Assuming that all D400 SKUs have depth sensor 1317 | auto profiles = _enabled_profiles[stream_index]; 1318 | auto it = std::find_if(profiles.begin(), profiles.end(), 1319 | [&](const rs2::stream_profile & profile) 1320 | {return profile.stream_type() == stream_index.first;}); 1321 | if (it == profiles.end()) { 1322 | return false; 1323 | } 1324 | 1325 | profile = *it; 1326 | return true; 1327 | } 1328 | 1329 | rclcpp::Clock _ros_clock; 1330 | std::unique_ptr _ctx; 1331 | 1332 | std::map> _sensors; 1333 | 1334 | std::string _serial_no; 1335 | float _depth_scale_meters; 1336 | 1337 | std::map _stream_intrinsics; 1338 | std::map _width; 1339 | std::map _height; 1340 | std::map _fps; 1341 | std::map _enable; 1342 | std::map _stream_name; 1343 | 1344 | std::shared_ptr _static_tf_broadcaster_; 1345 | 1346 | std::map _image_publishers; 1347 | std::map::SharedPtr> _imu_publishers; 1348 | std::map _image_format; 1349 | std::map _format; 1350 | std::map::SharedPtr> _info_publisher; 1352 | std::map::SharedPtr> _imu_info_publisher; 1354 | std::map _image; 1355 | std::map _encoding; 1356 | 1357 | std::string _base_frame_id; 1358 | std::map _frame_id; 1359 | std::map _optical_frame_id; 1360 | std::map _seq; 1361 | std::map _unit_step_size; 1362 | std::map _camera_info; 1363 | rclcpp::Publisher::SharedPtr _fe_to_depth_publisher, 1364 | _fe_to_imu_publisher; 1365 | 1366 | rclcpp::QoS qos; 1367 | bool _intialize_time_base; 1368 | double _camera_time_base; 1369 | std::map> _enabled_profiles; 1370 | 1371 | image_transport::Publisher _align_depth_publisher; 1372 | rclcpp::Publisher::SharedPtr _align_depth_camera_publisher; 1373 | 1374 | rclcpp::Publisher::SharedPtr _pointcloud_publisher; 1375 | rclcpp::Publisher::SharedPtr _align_pointcloud_publisher; 1376 | 1377 | rclcpp::Time _ros_time_base; 1378 | rclcpp::Logger logger_ = rclcpp::get_logger("RealSenseCameraNode"); 1379 | rclcpp::TimerBase::SharedPtr timer_; 1380 | bool _sync_frames; 1381 | bool _pointcloud; 1382 | bool _align_pointcloud; 1383 | bool _align_depth; 1384 | PipelineSyncer _syncer; 1385 | rs2_extrinsics _depth2color_extrinsics; 1386 | 1387 | rs2::frameset _aligned_frameset; 1388 | }; // end class 1389 | } // namespace realsense_ros2_camera 1390 | 1391 | int main(int argc, char * argv[]) 1392 | { 1393 | rclcpp::init(argc, argv); 1394 | auto node = std::make_shared(); 1395 | node->onInit(); 1396 | rclcpp::spin(node); 1397 | rclcpp::shutdown(); 1398 | return 0; 1399 | } 1400 | --------------------------------------------------------------------------------