├── .clang-format ├── .clang_complete ├── CMakeLists.txt ├── README.md ├── include └── disparity_to_point_cloud │ ├── depth_map_fusion.hpp │ └── disparity_to_point_cloud.hpp ├── launch ├── d2pcloud.launch └── depth_map_fusion.launch ├── package.xml ├── rviz └── d2pc.rviz └── src ├── depth_map_fusion.cpp ├── depth_map_fusion_node.cpp ├── disparity_to_point_cloud.cpp └── disparity_to_point_cloud_node.cpp /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | Language: Cpp 3 | BasedOnStyle: Google 4 | -------------------------------------------------------------------------------- /.clang_complete: -------------------------------------------------------------------------------- 1 | -Iinclude 2 | -I/opt/ros/indigo/include 3 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(disparity_to_point_cloud) 3 | 4 | # definition for c++11 5 | add_definitions(-std=c++11) 6 | ## Find catkin macros and libraries 7 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) 8 | ## is used, also find other catkin packages 9 | find_package(catkin REQUIRED COMPONENTS 10 | roscpp 11 | sensor_msgs 12 | std_msgs 13 | pcl_ros 14 | cv_bridge 15 | ) 16 | 17 | ## System dependencies are found with CMake's conventions 18 | find_package(PCL REQUIRED) 19 | find_package(OpenCV REQUIRED) 20 | 21 | ## Uncomment this if the package has a setup.py. This macro ensures 22 | ## modules and global scripts declared therein get installed 23 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html 24 | # catkin_python_setup() 25 | 26 | ################################################ 27 | ## Declare ROS messages, services and actions ## 28 | ################################################ 29 | 30 | ## To declare and build messages, services or actions from within this 31 | ## package, follow these steps: 32 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in 33 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). 34 | ## * In the file package.xml: 35 | ## * add a build_depend tag for "message_generation" 36 | ## * add a build_depend and a run_depend tag for each package in MSG_DEP_SET 37 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in 38 | ## but can be declared for certainty nonetheless: 39 | ## * add a run_depend tag for "message_runtime" 40 | ## * In this file (CMakeLists.txt): 41 | ## * add "message_generation" and every package in MSG_DEP_SET to 42 | ## find_package(catkin REQUIRED COMPONENTS ...) 43 | ## * add "message_runtime" and every package in MSG_DEP_SET to 44 | ## catkin_package(CATKIN_DEPENDS ...) 45 | ## * uncomment the add_*_files sections below as needed 46 | ## and list every .msg/.srv/.action file to be processed 47 | ## * uncomment the generate_messages entry below 48 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) 49 | 50 | ## Generate messages in the 'msg' folder 51 | # add_message_files( 52 | # FILES 53 | # Message1.msg 54 | # Message2.msg 55 | # ) 56 | 57 | ## Generate services in the 'srv' folder 58 | # add_service_files( 59 | # FILES 60 | # Service1.srv 61 | # Service2.srv 62 | # ) 63 | 64 | ## Generate actions in the 'action' folder 65 | # add_action_files( 66 | # FILES 67 | # Action1.action 68 | # Action2.action 69 | # ) 70 | 71 | ## Generate added messages and services with any dependencies listed here 72 | # generate_messages( 73 | # DEPENDENCIES 74 | # sensor_msgs# std_msgs 75 | # ) 76 | 77 | ################################################ 78 | ## Declare ROS dynamic reconfigure parameters ## 79 | ################################################ 80 | 81 | ## To declare and build dynamic reconfigure parameters within this 82 | ## package, follow these steps: 83 | ## * In the file package.xml: 84 | ## * add a build_depend and a run_depend tag for "dynamic_reconfigure" 85 | ## * In this file (CMakeLists.txt): 86 | ## * add "dynamic_reconfigure" to 87 | ## find_package(catkin REQUIRED COMPONENTS ...) 88 | ## * uncomment the "generate_dynamic_reconfigure_options" section below 89 | ## and list every .cfg file to be processed 90 | 91 | ## Generate dynamic reconfigure parameters in the 'cfg' folder 92 | # generate_dynamic_reconfigure_options( 93 | # cfg/DynReconf1.cfg 94 | # cfg/DynReconf2.cfg 95 | # ) 96 | 97 | ################################### 98 | ## catkin specific configuration ## 99 | ################################### 100 | ## The catkin_package macro generates cmake config files for your package 101 | ## Declare things to be passed to dependent projects 102 | ## INCLUDE_DIRS: uncomment this if you package contains header files 103 | ## LIBRARIES: libraries you create in this project that dependent projects also need 104 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need 105 | ## DEPENDS: system dependencies of this project that dependent projects also need 106 | catkin_package( 107 | INCLUDE_DIRS include 108 | LIBRARIES disparity_to_point_cloud depth_map_fusion 109 | CATKIN_DEPENDS roscpp sensor_msgs std_msgs 110 | # DEPENDS system_lib 111 | ) 112 | 113 | ########### 114 | ## Build ## 115 | ########### 116 | 117 | ## Specify additional locations of header files 118 | ## Your package locations should be listed before other locations 119 | # include_directories(include) 120 | include_directories( 121 | include 122 | ${catkin_INCLUDE_DIRS} 123 | ${PCL_INCLUDE_DIRS} 124 | ${OpenCV_INCLUDE_DIRS} 125 | ) 126 | 127 | ## Declare a C++ library 128 | add_library(disparity_to_point_cloud 129 | src/disparity_to_point_cloud.cpp 130 | ) 131 | add_library(depth_map_fusion 132 | src/depth_map_fusion.cpp 133 | ) 134 | target_link_libraries(disparity_to_point_cloud 135 | ${PCL_LIBRARIES} 136 | ${OpenCV_LIBS} 137 | ) 138 | target_link_libraries(depth_map_fusion 139 | ${PCL_LIBRARIES} 140 | ${OpenCV_LIBS} 141 | ) 142 | ## Add cmake target dependencies of the library 143 | ## as an example, code may need to be generated before libraries 144 | ## either from message generation or dynamic reconfigure 145 | add_dependencies(disparity_to_point_cloud ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 146 | add_dependencies(depth_map_fusion ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 147 | 148 | ## Declare a C++ executable 149 | add_executable(disparity_to_point_cloud_node src/disparity_to_point_cloud_node.cpp) 150 | add_executable(depth_map_fusion_node src/depth_map_fusion_node.cpp) 151 | 152 | ## Add cmake target dependencies of the executable 153 | ## same as for the library above 154 | # add_dependencies(disparity_to_point_cloud_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 155 | 156 | ## Specify libraries to link a library or executable target against 157 | target_link_libraries(disparity_to_point_cloud_node 158 | disparity_to_point_cloud 159 | ${catkin_LIBRARIES} 160 | ${PCL_LIBRARIES} 161 | ${OpenCV_LIBS} 162 | ) 163 | 164 | target_link_libraries(depth_map_fusion_node 165 | depth_map_fusion 166 | ${catkin_LIBRARIES} 167 | ${PCL_LIBRARIES} 168 | ${OpenCV_LIBS} 169 | ) 170 | 171 | ############# 172 | ## Install ## 173 | ############# 174 | 175 | # all install targets should use catkin DESTINATION variables 176 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html 177 | 178 | ## Mark executable scripts (Python etc.) for installation 179 | ## in contrast to setup.py, you can choose the destination 180 | # install(PROGRAMS 181 | # scripts/my_python_script 182 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 183 | # ) 184 | 185 | ## Mark executables and/or libraries for installation 186 | # install(TARGETS disparity_to_point_cloud disparity_to_point_cloud_node 187 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 188 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 189 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 190 | # ) 191 | 192 | ## Mark cpp header files for installation 193 | # install(DIRECTORY include/${PROJECT_NAME}/ 194 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} 195 | # FILES_MATCHING PATTERN "*.h" 196 | # PATTERN ".svn" EXCLUDE 197 | # ) 198 | 199 | ## Mark other files for installation (e.g. launch and bag files, etc.) 200 | # install(FILES 201 | # # myfile1 202 | # # myfile2 203 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} 204 | # ) 205 | 206 | ############# 207 | ## Testing ## 208 | ############# 209 | 210 | ## Add gtest based cpp test target and link libraries 211 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_disparity_to_point_cloud.cpp) 212 | # if(TARGET ${PROJECT_NAME}-test) 213 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) 214 | # endif() 215 | 216 | ## Add folders to be run by python nosetests 217 | # catkin_add_nosetests(test) 218 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # disparity_to_point_cloud 2 | 3 | A ROS node to compute a point cloud from a disparity map. 4 | 5 | ## Installation 6 | 7 | The disparity_to_point_cloud depends on the following packages: 8 | - cv_bridge 9 | ```bash 10 | sudo apt-get install ros-$ROS_DISTRO-cv-bridge 11 | ``` 12 | - pcl: follow the instruction [here](http://pointclouds.org/downloads/linux.html) 13 | - catkin build: follow the instruction [here](http://catkin-tools.readthedocs.io/en/latest/installing.html) 14 | 15 | Clone [this](https://github.com/simonegu/disparity_to_point_cloud) git repositories into your `catkin_ws/src` directory. 16 | 17 | You are now ready to build the package with: 18 | ```bash 19 | cd catkin_ws 20 | catkin build 21 | ``` 22 | 23 | ## Configuration 24 | You need to change the input remap in the launch file for your images and point cloud topics: 25 | - disparity image: https://github.com/PX4/disparity_to_point_cloud/blob/master/launch/d2pcloud.launch#L3 26 | - point cloud : https://github.com/PX4/disparity_to_point_cloud/blob/master/launch/d2pcloud.launch#L4 27 | 28 | ## Use 29 | After starting your ```roscore``` use this to launch disparity_to_point_cloud: 30 | ```bash 31 | roslaunch disparity_to_point_cloud d2pcloud.launch 32 | ``` 33 | -------------------------------------------------------------------------------- /include/disparity_to_point_cloud/depth_map_fusion.hpp: -------------------------------------------------------------------------------- 1 | /**************************************************************************** 2 | * 3 | * Copyright (c) 2016 PX4 Development Team. All rights reserved. 4 | * 5 | * Redistribution and use in source and binary forms, with or without 6 | * modification, are permitted provided that the following conditions 7 | * are met: 8 | * 9 | * 1. Redistributions of source code must retain the above copyright 10 | * notice, this list of conditions and the following disclaimer. 11 | * 2. Redistributions in binary form must reproduce the above copyright 12 | * notice, this list of conditions and the following disclaimer in 13 | * the documentation and/or other materials provided with the 14 | * distribution. 15 | * 3. Neither the name PX4 nor the names of its contributors may be 16 | * used to endorse or promote products derived from this software 17 | * without specific prior written permission. 18 | * 19 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 22 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 23 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 24 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 25 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 26 | * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 27 | * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 28 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 29 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | * POSSIBILITY OF SUCH DAMAGE. 31 | * 32 | ****************************************************************************/ 33 | /** 34 | * @file depth_map_fusion.hpp 35 | * 36 | * Class to perform a depth map fusion 37 | * 38 | * @author Vilhjálmur Vilhjálmsson 39 | */ 40 | 41 | #ifndef __DEPTH_MAP_FUSION_HPP__ 42 | #define __DEPTH_MAP_FUSION_HPP__ 43 | 44 | #include 45 | #include 46 | 47 | #include 48 | #include 49 | #include 50 | // #include 51 | #include 52 | #include 53 | #include 54 | #include 55 | #include 56 | #include 57 | #include 58 | #include 59 | #include 60 | 61 | namespace depth_map_fusion { 62 | 63 | class DepthMapFusion { 64 | private: 65 | ros::NodeHandle nh_; 66 | ros::Subscriber disparity1_sub_; 67 | ros::Subscriber disparity2_sub_; 68 | ros::Subscriber matching_score_1_sub_; 69 | ros::Subscriber matching_score_2_sub_; 70 | 71 | ros::Publisher fused_pub_; 72 | ros::Publisher cropped_depth_1_pub_; 73 | ros::Publisher cropped_depth_2_pub_; 74 | ros::Publisher cropped_score_1_pub_; 75 | ros::Publisher cropped_score_2_pub_; 76 | ros::Publisher cropped_score_combined_pub_; 77 | ros::Publisher grad_pub_; 78 | 79 | cv::Mat cropped_depth_1_; 80 | cv::Mat cropped_depth_2_; 81 | cv::Mat cropped_score_1_; 82 | cv::Mat cropped_score_2_; 83 | cv::Mat cropped_score_combined_; 84 | cv::Mat cropped_score_1_grad_; 85 | cv::Mat cropped_score_2_grad_; 86 | 87 | std::deque previous_errors_; 88 | int RAINBOW_WITH_BLACK = -1; 89 | int GRAY_SCALE = -2; 90 | 91 | public: 92 | int offset_x_ = 0; 93 | int offset_y_ = 0; 94 | double scaling_factor_ = 1.0; 95 | 96 | DepthMapFusion() : nh_("~") { 97 | disparity1_sub_ = 98 | nh_.subscribe("/disparity_1", 1, &DepthMapFusion::DisparityCb1, this); 99 | disparity2_sub_ = 100 | nh_.subscribe("/disparity_2", 1, &DepthMapFusion::DisparityCb2, this); 101 | matching_score_1_sub_ = nh_.subscribe( 102 | "/matching_score_1", 1, &DepthMapFusion::MatchingScoreCb1, this); 103 | matching_score_2_sub_ = nh_.subscribe( 104 | "/matching_score_2", 1, &DepthMapFusion::MatchingScoreCb2, this); 105 | 106 | cropped_depth_1_pub_ = 107 | nh_.advertise("/cropped_depth_1", 5); 108 | cropped_depth_2_pub_ = 109 | nh_.advertise("/cropped_depth_2", 5); 110 | cropped_score_1_pub_ = 111 | nh_.advertise("/cropped_score_1", 5); 112 | cropped_score_2_pub_ = 113 | nh_.advertise("/cropped_score_2", 5); 114 | fused_pub_ = nh_.advertise("/fused_depth_map", 5); 115 | cropped_score_combined_pub_ = 116 | nh_.advertise("/combined_score", 5); 117 | grad_pub_ = nh_.advertise("/gradient", 5); 118 | 119 | if (!nh_.getParam("offset_x", offset_x_)) { 120 | ROS_WARN("Failed to load parameter offset_x"); 121 | } 122 | if (!nh_.getParam("offset_y", offset_y_)) { 123 | ROS_WARN("Failed to load parameter offset_y"); 124 | } 125 | }; 126 | 127 | void DisparityCb1(const sensor_msgs::ImageConstPtr &msg); 128 | void DisparityCb2(const sensor_msgs::ImageConstPtr &msg); 129 | void MatchingScoreCb1(const sensor_msgs::ImageConstPtr &msg); 130 | void MatchingScoreCb2(const sensor_msgs::ImageConstPtr &msg); 131 | 132 | void publishFusedDepthMap(const sensor_msgs::ImageConstPtr &msg); 133 | 134 | int getFusedDistance(int i, int j); 135 | int weightedAverage(int dist1, int dist2, int score1, int score2); 136 | int maxDist(int dist1, int dist2, int score1, int score2); 137 | int maxDistUnlessBlack(int dist1, int dist2, int score1, int score2); 138 | int betterScore(int dist1, int dist2, int score1, int score2); 139 | int onlyGood1(int dist1, int dist2, int score1, int score2); 140 | int onlyGoodAvg(int dist1, int dist2, int score1, int score2); 141 | int overlap(int dist1, int dist2, int score1, int score2); 142 | int blackToWhite(int dist1, int dist2, int score1, int score2); 143 | int gradFilter(int dist1, int dist2, int score1, int score2, int grad1, 144 | int grad2); 145 | 146 | cv::Mat cropMat(const cv::Mat &mat, int left, int right, int top, int bottom); 147 | cv::Mat cropToSquare(const cv::Mat &mat, int offset_x = 0, int offset_y = 0); 148 | cv::Mat rotateMat(const cv::Mat &mat); 149 | 150 | void publishWithColor(const sensor_msgs::ImageConstPtr &msg, 151 | const cv::Mat &mat, const ros::Publisher &publisher, 152 | int colormap); 153 | 154 | void colorizeDepth(const cv::Mat &gray, cv::Mat &rgb); 155 | }; 156 | 157 | } // depth_map_fusion 158 | 159 | #endif // __DEPTH_MAP_FUSION_HPP__ 160 | -------------------------------------------------------------------------------- /include/disparity_to_point_cloud/disparity_to_point_cloud.hpp: -------------------------------------------------------------------------------- 1 | /**************************************************************************** 2 | * 3 | * Copyright (c) 2016 PX4 Development Team. All rights reserved. 4 | * 5 | * Redistribution and use in source and binary forms, with or without 6 | * modification, are permitted provided that the following conditions 7 | * are met: 8 | * 9 | * 1. Redistributions of source code must retain the above copyright 10 | * notice, this list of conditions and the following disclaimer. 11 | * 2. Redistributions in binary form must reproduce the above copyright 12 | * notice, this list of conditions and the following disclaimer in 13 | * the documentation and/or other materials provided with the 14 | * distribution. 15 | * 3. Neither the name PX4 nor the names of its contributors may be 16 | * used to endorse or promote products derived from this software 17 | * without specific prior written permission. 18 | * 19 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 22 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 23 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 24 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 25 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 26 | * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 27 | * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 28 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 29 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | * POSSIBILITY OF SUCH DAMAGE. 31 | * 32 | ****************************************************************************/ 33 | /** 34 | * @file disparity_to_point_cloud.hpp 35 | * 36 | * Class to convert a disparity map in to a point cloud 37 | * 38 | * @author Simone Guscetti 39 | * @author Vilhjálmur Vilhjálmsson 40 | */ 41 | 42 | #ifndef __DISPARITY_TO_POINT_CLOUD_HPP__ 43 | #define __DISPARITY_TO_POINT_CLOUD_HPP__ 44 | 45 | #include 46 | #include 47 | #include 48 | #include 49 | #include 50 | #include 51 | // PCL specific includes 52 | #include 53 | #include 54 | #include 55 | #include 56 | #include 57 | #include 58 | 59 | namespace d2pc { 60 | class Disparity2PCloud { 61 | private: 62 | ros::NodeHandle nh_; 63 | ros::Publisher p_cloud_pub_; 64 | ros::Subscriber disparity_sub_; 65 | // TODO import this coefficeint with the calibration file or camera info topic 66 | double fx_ = 714.24; 67 | double fy_ = 713.5; 68 | double cx_ = 376; 69 | double cy_ = 240; 70 | // double base_line_ = 0.043; // odroid stereo 71 | double base_line_ = 0.09; // Omni-stereo 72 | cv::Mat Q_; 73 | 74 | public: 75 | Disparity2PCloud() : nh_("~") { 76 | printf("Constructor start\n"); 77 | disparity_sub_ = 78 | nh_.subscribe("/disparity", 1, &Disparity2PCloud::DisparityCb, this); 79 | 80 | p_cloud_pub_ = 81 | nh_.advertise("/point_cloud", 1, this); 82 | 83 | // Get Ros parameters 84 | nh_.param("fx_", fx_, 714.24); 85 | nh_.param("fy_", fy_, 713.5); 86 | nh_.param("cx_", cx_, 376); 87 | nh_.param("cy_", cy_, 240); 88 | nh_.param("base_line_", base_line_, 0.09); 89 | 90 | cv::Mat K = (cv::Mat_(3, 3) << fx_, 0, cx_, 0, fy_, cy_, 0, 0, 1); 91 | cv::Mat distCoeff1 = (cv::Mat_(5, 1) << 0.0, 0.0, 0.0, 0.0, 0.0); 92 | cv::Mat R = cv::Mat::eye(3, 3, CV_64FC1); 93 | cv::Mat t = (cv::Mat_(3, 1) << -base_line_, 0, 0); 94 | Q_ = cv::Mat::eye(4, 4, CV_64FC1); 95 | 96 | cv::Mat R1 = cv::Mat::eye(3, 3, CV_64FC1); 97 | cv::Mat R2 = cv::Mat::eye(3, 3, CV_64FC1); 98 | cv::Mat P1 = cv::Mat::eye(3, 4, CV_64FC1); 99 | cv::Mat P2 = cv::Mat::eye(3, 4, CV_64FC1); 100 | printf("Defining matrices\n"); 101 | cv::Size s; 102 | s.height = 480; 103 | s.width = 752; 104 | cv::stereoRectify(K, distCoeff1, K, distCoeff1, s, R, t, R1, R2, P1, P2, Q_); 105 | printf("stereoRectify\n"); 106 | }; 107 | // virtual ~Disparity2PCloud(); 108 | void DisparityCb(const sensor_msgs::ImageConstPtr &msg); 109 | }; 110 | } /* d2pc */ 111 | 112 | #endif /* end of include guard: __DISPARITY_TO_POINT_CLOUD_HPP__ */ 113 | -------------------------------------------------------------------------------- /launch/d2pcloud.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /launch/depth_map_fusion.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | disparity_to_point_cloud 4 | 0.0.0 5 | The disparity_to_point_cloud package 6 | 7 | 8 | Simone Guscetti 9 | Vilhjálmur Vilhjálmsson 10 | 11 | 12 | 13 | 14 | 15 | BSD 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | Simone Guscetti 27 | Vilhjálmur Vilhjálmsson 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | catkin 42 | roscpp 43 | sensor_msgs 44 | std_msgs 45 | libpcl-all-dev 46 | pcl_ros 47 | cv_bridge 48 | tf 49 | libpcl-all 50 | roscpp 51 | sensor_msgs 52 | std_msgs 53 | cv_bridge 54 | pcl_ros 55 | tf 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /rviz/d2pc.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /TF1 10 | - /TF1/Frames1 11 | - /Image1 12 | - /depthmap1 13 | Splitter Ratio: 0.787791 14 | Tree Height: 279 15 | - Class: rviz/Selection 16 | Name: Selection 17 | - Class: rviz/Tool Properties 18 | Expanded: 19 | - /2D Pose Estimate1 20 | - /2D Nav Goal1 21 | - /Publish Point1 22 | Name: Tool Properties 23 | Splitter Ratio: 0.588679 24 | - Class: rviz/Views 25 | Expanded: 26 | - /Current View1 27 | Name: Views 28 | Splitter Ratio: 0.5 29 | - Class: rviz/Time 30 | Experimental: false 31 | Name: Time 32 | SyncMode: 0 33 | SyncSource: PointCloud2 34 | Visualization Manager: 35 | Class: "" 36 | Displays: 37 | - Alpha: 0.5 38 | Cell Size: 1 39 | Class: rviz/Grid 40 | Color: 160; 160; 164 41 | Enabled: true 42 | Line Style: 43 | Line Width: 0.03 44 | Value: Lines 45 | Name: Grid 46 | Normal Cell Count: 0 47 | Offset: 48 | X: 0 49 | Y: 0 50 | Z: 0 51 | Plane: XY 52 | Plane Cell Count: 10 53 | Reference Frame: 54 | Value: true 55 | - Class: rviz/TF 56 | Enabled: true 57 | Frame Timeout: 15 58 | Frames: 59 | All Enabled: false 60 | camera_optical_frame: 61 | Value: true 62 | fcu: 63 | Value: false 64 | fcu_utm: 65 | Value: false 66 | stereo_cam::stereo_camera_link: 67 | Value: true 68 | world: 69 | Value: true 70 | Marker Scale: 1 71 | Name: TF 72 | Show Arrows: true 73 | Show Axes: true 74 | Show Names: true 75 | Tree: 76 | world: 77 | fcu: 78 | {} 79 | fcu_utm: 80 | {} 81 | stereo_cam::stereo_camera_link: 82 | camera_optical_frame: 83 | {} 84 | Update Interval: 0 85 | Value: true 86 | - Class: rviz/Image 87 | Enabled: true 88 | Image Topic: /gazebo/omni_rig/cam0_image 89 | Max Value: 1 90 | Median window: 5 91 | Min Value: 0 92 | Name: Image 93 | Normalize Range: true 94 | Queue Size: 2 95 | Transport Hint: raw 96 | Unreliable: false 97 | Value: true 98 | - Class: rviz/Image 99 | Enabled: true 100 | Image Topic: /cam_1/image_raw 101 | Max Value: 1 102 | Median window: 5 103 | Min Value: 0 104 | Name: depthmap 105 | Normalize Range: true 106 | Queue Size: 2 107 | Transport Hint: raw 108 | Unreliable: false 109 | Value: true 110 | - Alpha: 1 111 | Autocompute Intensity Bounds: true 112 | Autocompute Value Bounds: 113 | Max Value: 10 114 | Min Value: -10 115 | Value: true 116 | Axis: Z 117 | Channel Name: intensity 118 | Class: rviz/PointCloud2 119 | Color: 255; 255; 255 120 | Color Transformer: Intensity 121 | Decay Time: 0 122 | Enabled: true 123 | Invert Rainbow: false 124 | Max Color: 255; 255; 255 125 | Max Intensity: 4096 126 | Min Color: 0; 0; 0 127 | Min Intensity: 0 128 | Name: PointCloud2 129 | Position Transformer: XYZ 130 | Queue Size: 10 131 | Selectable: true 132 | Size (Pixels): 3 133 | Size (m): 0.01 134 | Style: Flat Squares 135 | Topic: /omi_cam/point_cloud 136 | Unreliable: false 137 | Use Fixed Frame: true 138 | Use rainbow: true 139 | Value: true 140 | - Class: rviz/MarkerArray 141 | Enabled: true 142 | Marker Topic: /occupied_cells_vis_array 143 | Name: MarkerArray 144 | Namespaces: 145 | {} 146 | Queue Size: 100 147 | Value: true 148 | - Alpha: 1 149 | Autocompute Intensity Bounds: true 150 | Autocompute Value Bounds: 151 | Max Value: 10 152 | Min Value: -10 153 | Value: true 154 | Axis: Z 155 | Channel Name: intensity 156 | Class: rviz/PointCloud2 157 | Color: 255; 255; 255 158 | Color Transformer: "" 159 | Decay Time: 0 160 | Enabled: true 161 | Invert Rainbow: false 162 | Max Color: 255; 255; 255 163 | Max Intensity: 4096 164 | Min Color: 0; 0; 0 165 | Min Intensity: 0 166 | Name: PointCloud2 167 | Position Transformer: "" 168 | Queue Size: 10 169 | Selectable: true 170 | Size (Pixels): 3 171 | Size (m): 0.01 172 | Style: Flat Squares 173 | Topic: /omi_cam/plane_cloud 174 | Unreliable: false 175 | Use Fixed Frame: true 176 | Use rainbow: true 177 | Value: true 178 | Enabled: true 179 | Global Options: 180 | Background Color: 48; 48; 48 181 | Fixed Frame: world 182 | Frame Rate: 30 183 | Name: root 184 | Tools: 185 | - Class: rviz/Interact 186 | Hide Inactive Objects: true 187 | - Class: rviz/MoveCamera 188 | - Class: rviz/Select 189 | - Class: rviz/FocusCamera 190 | - Class: rviz/Measure 191 | - Class: rviz/SetInitialPose 192 | Topic: /initialpose 193 | - Class: rviz/SetGoal 194 | Topic: /move_base_simple/goal 195 | - Class: rviz/PublishPoint 196 | Single click: true 197 | Topic: /clicked_point 198 | Value: true 199 | Views: 200 | Current: 201 | Class: rviz/Orbit 202 | Distance: 2.62492 203 | Enable Stereo Rendering: 204 | Stereo Eye Separation: 0.06 205 | Stereo Focal Distance: 1 206 | Swap Stereo Eyes: false 207 | Value: false 208 | Focal Point: 209 | X: 0 210 | Y: 0 211 | Z: 0 212 | Name: Current View 213 | Near Clip Distance: 0.01 214 | Pitch: 0.5202 215 | Target Frame: camera_optical_frame 216 | Value: Orbit (rviz) 217 | Yaw: 6.23993 218 | Saved: ~ 219 | Window Geometry: 220 | Displays: 221 | collapsed: false 222 | Height: 1029 223 | Hide Left Dock: false 224 | Hide Right Dock: false 225 | Image: 226 | collapsed: false 227 | QMainWindow State: 000000ff00000000fd00000004000000000000016a00000360fc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006400fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000028000001a6000000dd00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000001d4000000d60000001600fffffffb0000001000640065007000740068006d0061007001000002b0000000d80000001600ffffff000000010000010f00000360fc0200000004fc0000002800000360000000b000fffffffa000000010100000002fb0000000a0049006d0061006700650000000000ffffffff0000000000000000fb0000000a0056006900650077007301000005200000010f0000010f00fffffffb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a0049006d00610067006503000003fc0000029d000001dd0000017afb0000001200530065006c0065006300740069006f006e010000025a000000b20000000000000000000000020000062f000000b0fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000062f00000059fc0100000002fc000000000000062f000002f600fffffffa000000010200000002fb0000001000640065007000740068006d006100700000000000ffffffff0000000000000000fb0000000800540069006d006501000003a90000003e0000003b00fffffffb0000000800540069006d00650100000000000004500000000000000000000003aa0000036000000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 228 | Selection: 229 | collapsed: false 230 | Time: 231 | collapsed: false 232 | Tool Properties: 233 | collapsed: false 234 | Views: 235 | collapsed: false 236 | Width: 1583 237 | X: 55 238 | Y: 14 239 | depthmap: 240 | collapsed: false 241 | -------------------------------------------------------------------------------- /src/depth_map_fusion.cpp: -------------------------------------------------------------------------------- 1 | /**************************************************************************** 2 | * 3 | * Copyright (c) 2016 PX4 Development Team. All rights reserved. 4 | * 5 | * Redistribution and use in source and binary forms, with or without 6 | * modification, are permitted provided that the following conditions 7 | * are met: 8 | * 9 | * 1. Redistributions of source code must retain the above copyright 10 | * notice, this list of conditions and the following disclaimer. 11 | * 2. Redistributions in binary form must reproduce the above copyright 12 | * notice, this list of conditions and the following disclaimer in 13 | * the documentation and/or other materials provided with the 14 | * distribution. 15 | * 3. Neither the name PX4 nor the names of its contributors may be 16 | * used to endorse or promote products derived from this software 17 | * without specific prior written permission. 18 | * 19 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 22 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 23 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 24 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 25 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 26 | * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 27 | * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 28 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 29 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | * POSSIBILITY OF SUCH DAMAGE. 31 | * 32 | ****************************************************************************/ 33 | /** 34 | * @file depth_map_fusion.cpp 35 | * 36 | * Class to perform a depth map fusion 37 | * 38 | * @author Vilhjálmur Vilhjálmsson 39 | */ 40 | 41 | #include "disparity_to_point_cloud/depth_map_fusion.hpp" 42 | 43 | namespace depth_map_fusion { 44 | 45 | // Callbacks 46 | void DepthMapFusion::DisparityCb1(const sensor_msgs::ImageConstPtr &msg) { 47 | cv_bridge::CvImagePtr disparity = cv_bridge::toCvCopy(*msg, "mono8"); 48 | cropped_depth_1_ = cropToSquare(disparity->image, offset_x_, offset_y_); 49 | 50 | publishWithColor(msg, cropped_depth_1_, cropped_depth_1_pub_, 51 | RAINBOW_WITH_BLACK); 52 | } 53 | 54 | void DepthMapFusion::DisparityCb2(const sensor_msgs::ImageConstPtr &msg) { 55 | cv_bridge::CvImagePtr disparity = cv_bridge::toCvCopy(*msg, "mono8"); 56 | cv::Mat rot_mat = rotateMat(disparity->image); 57 | cropped_depth_2_ = cropToSquare(rot_mat, -offset_x_, -offset_y_); 58 | 59 | publishWithColor(msg, cropped_depth_2_, cropped_depth_2_pub_, 60 | RAINBOW_WITH_BLACK); 61 | publishFusedDepthMap(msg); 62 | } 63 | 64 | void DepthMapFusion::MatchingScoreCb1(const sensor_msgs::ImageConstPtr &msg) { 65 | cv_bridge::CvImagePtr disparity = cv_bridge::toCvCopy(*msg, "mono8"); 66 | cropped_score_1_ = cropToSquare(disparity->image, offset_x_, offset_y_); 67 | 68 | // cropped_score_1_grad_ should eventually have high values around horizontal 69 | // lines 70 | cv::GaussianBlur(cropped_score_1_, cropped_score_1_grad_, cv::Size(13, 13), 71 | 3.0); 72 | cv::Sobel(cropped_score_1_grad_, cropped_score_1_grad_, -1, 0, 2, 7, 0.03); 73 | threshold(cropped_score_1_grad_, cropped_score_1_grad_, 30, 255, 0); 74 | cv::GaussianBlur(cropped_score_1_grad_, cropped_score_1_grad_, 75 | cv::Size(21, 21), 10.0); 76 | cropped_score_1_grad_ = cropped_score_1_ + 2 * cropped_score_1_grad_; 77 | cropped_score_1_ = cropped_score_1_grad_; 78 | 79 | publishWithColor(msg, cropped_score_1_, cropped_score_1_pub_, GRAY_SCALE); 80 | } 81 | 82 | void DepthMapFusion::MatchingScoreCb2(const sensor_msgs::ImageConstPtr &msg) { 83 | cv_bridge::CvImagePtr disparity = cv_bridge::toCvCopy(*msg, "mono8"); 84 | cv::Mat rot_mat = rotateMat(disparity->image); 85 | cropped_score_2_ = cropToSquare(rot_mat, -offset_x_, -offset_y_); 86 | 87 | // cropped_score_2_grad_ should eventually have high values around vertical 88 | // lines 89 | cv::GaussianBlur(cropped_score_2_, cropped_score_2_grad_, cv::Size(13, 13), 90 | 3.0); 91 | cv::Sobel(cropped_score_2_grad_, cropped_score_2_grad_, -1, 2, 0, 7, 0.03); 92 | threshold(cropped_score_2_grad_, cropped_score_2_grad_, 30, 255, 0); 93 | cv::GaussianBlur(cropped_score_2_grad_, cropped_score_2_grad_, 94 | cv::Size(21, 21), 10.0); 95 | cropped_score_2_grad_ = cropped_score_2_ + 2 * cropped_score_2_grad_; 96 | cropped_score_2_ = cropped_score_2_grad_; 97 | 98 | publishWithColor(msg, cropped_score_2_, cropped_score_2_pub_, GRAY_SCALE); 99 | } 100 | 101 | // Fuses together the last depthmaps and publishes 102 | // If messages don't arrive sequencially they may have different time stamps 103 | void DepthMapFusion::publishFusedDepthMap( 104 | const sensor_msgs::ImageConstPtr &msg) { 105 | cv_bridge::CvImagePtr disparity = cv_bridge::toCvCopy(*msg, "mono8"); 106 | disparity->image = cropToSquare(disparity->image); 107 | 108 | if (cropped_depth_1_.empty() || cropped_depth_2_.empty() || 109 | cropped_score_1_.empty() || cropped_score_2_.empty()) { 110 | return; // Have not recieved all depth maps and scores 111 | } 112 | 113 | cropped_score_combined_ = cropped_score_1_; 114 | // For each pixel, calculate a new distance 115 | for (int i = 0; i < cropped_depth_2_.rows; ++i) { 116 | for (int j = 0; j < cropped_depth_2_.cols; ++j) { 117 | disparity->image.at(i, j) = getFusedDistance(i, j); 118 | unsigned char combined_confidence = 119 | std::min(cropped_score_1_grad_.at(i, j), 120 | cropped_score_2_grad_.at(i, j)); 121 | cropped_score_combined_.at(i, j) = combined_confidence; 122 | } 123 | } 124 | cv::medianBlur(disparity->image, disparity->image, 3); 125 | 126 | publishWithColor(msg, cropped_score_combined_, cropped_score_combined_pub_, 127 | GRAY_SCALE); 128 | 129 | // Just remove the borders, probably overkill 130 | disparity->image = cropMat(disparity->image, 0, 40, 30, 10); 131 | 132 | publishWithColor(msg, disparity->image, grad_pub_, RAINBOW_WITH_BLACK); 133 | 134 | sensor_msgs::Image fused_image; 135 | disparity->toImageMsg(fused_image); 136 | fused_pub_.publish(fused_image); 137 | 138 | // Print out the error between the two score images 139 | // int curr_error = cv::norm(cropped_score_1_, cropped_depth_2_); 140 | // previous_errors_. push_front(curr_error); 141 | // if (previous_errors_.size() > 10) { 142 | // previous_errors_.pop_back(); 143 | // double sum_error = std::accumulate(previous_errors_.begin(), 144 | // previous_errors_.end(), 0); 145 | // std::cout << sum_error / 10.0 << std::endl; 146 | // } 147 | } 148 | 149 | // Returns the fused depth for pixel (i,j) 150 | int DepthMapFusion::getFusedDistance(int i, int j) { 151 | int dist1 = cropped_depth_1_.at(i, j); 152 | int dist2 = cropped_depth_2_.at(i, j); 153 | int score1 = cropped_score_1_.at(i, j); 154 | int score2 = cropped_score_2_.at(i, j); 155 | int grad1 = cropped_score_1_grad_.at(i, j); 156 | int grad2 = cropped_score_2_grad_.at(i, j); 157 | 158 | // Choose which fusion function to use 159 | return gradFilter(dist1, dist2, score1, score2, grad1, grad2); 160 | } 161 | 162 | int DepthMapFusion::weightedAverage(int dist1, int dist2, int score1, 163 | int score2) { 164 | int weight1 = std::max(0.01, 1.0 - (0.5 * score1)); 165 | int weight2 = std::max(0.01, 1.0 - (0.5 * score2)); 166 | return (weight1 * dist1 + weight2 * dist2) / (weight1 + weight2); 167 | } 168 | 169 | int DepthMapFusion::maxDist(int dist1, int dist2, int score1, int score2) { 170 | // Lower value means larger distance 171 | return std::min(dist1, dist2); 172 | } 173 | 174 | int DepthMapFusion::maxDistUnlessBlack(int dist1, int dist2, int score1, 175 | int score2) { 176 | if (dist1 == 0 || dist2 == 0) { 177 | return std::max(dist1, dist2); 178 | } 179 | return std::min(dist1, dist2); 180 | } 181 | 182 | int DepthMapFusion::betterScore(int dist1, int dist2, int score1, int score2) { 183 | if (score1 < score2) { 184 | // A small score is better 185 | return dist1; 186 | } 187 | return dist2; 188 | } 189 | 190 | int DepthMapFusion::onlyGood1(int dist1, int dist2, int score1, int score2) { 191 | if (score2 < 50) { 192 | // A small score is better 193 | return dist2; 194 | } 195 | return 0; 196 | } 197 | 198 | int DepthMapFusion::onlyGoodAvg(int dist1, int dist2, int score1, int score2) { 199 | if (score1 < 100 && score2 < 100) { 200 | return (dist1 + dist2) / 2; 201 | } 202 | return 0; 203 | } 204 | 205 | int DepthMapFusion::overlap(int dist1, int dist2, int score1, int score2) { 206 | if (score1 < score2 && score1 < 20) { 207 | // A small score is better 208 | return 150; 209 | } else if (score2 < score1 && score2 < 20) { 210 | return 255; 211 | } 212 | return 0; 213 | } 214 | 215 | int DepthMapFusion::blackToWhite(int dist1, int dist2, int score1, int score2) { 216 | return 255 - score1; 217 | } 218 | 219 | int DepthMapFusion::gradFilter(int dist1, int dist2, int score1, int score2, 220 | int grad1, int grad2) { 221 | int thres = 100; 222 | int tooClose = 230; 223 | 224 | float relative_diff = float(dist1) / float(dist2); 225 | 226 | if (score1 < score2 && score1 < thres && dist1 < tooClose) { 227 | return dist1; 228 | } else if (score2 < score1 && score2 < thres && dist2 < tooClose) { 229 | return dist2; 230 | } else if (0.8 < relative_diff && relative_diff < 1.25 && 231 | score1 < 1.25 * thres && score2 < 1.25 * thres) { 232 | return float(dist1 + dist2) / 2.0; 233 | } 234 | return 0; 235 | } 236 | 237 | cv::Mat DepthMapFusion::cropMat(const cv::Mat &mat, int left, int right, 238 | int top, int bottom) { 239 | int num_cols = mat.cols - left - right; 240 | int num_rows = mat.rows - top - bottom; 241 | cv::Rect region = cv::Rect(left, top, num_cols, num_rows); 242 | return mat(region); 243 | } 244 | 245 | // Returns a new square matrix, which is the center of mat 246 | // Shrinks the larger dimension of mat until it is square 247 | cv::Mat DepthMapFusion::cropToSquare(const cv::Mat &mat, int offset_x, 248 | int offset_y) { 249 | cv::Rect region; 250 | int num_cols = mat.cols - std::abs(offset_x); 251 | int num_rows = mat.rows - std::abs(offset_y); 252 | int n = std::min(mat.cols, mat.rows) - 253 | std::max(std::abs(offset_x), std::abs(offset_y_)); 254 | 255 | if (num_cols < num_rows) { 256 | int start_col = std::max(0, offset_x); 257 | int start_row = std::max(0, offset_y + (num_rows - num_cols) / 2); 258 | region = cv::Rect(start_col, start_row, n, n); 259 | } else { 260 | int start_col = std::max(0, offset_x + (num_cols - num_rows) / 2); 261 | int start_row = std::max(0, offset_y); 262 | region = cv::Rect(start_col, start_row, n, n); 263 | } 264 | return mat(region); 265 | } 266 | 267 | // Rotate mat 90 deg clockwise 268 | cv::Mat DepthMapFusion::rotateMat(const cv::Mat &mat) { 269 | cv::Mat rot_mat; 270 | cv::transpose(mat, rot_mat); 271 | cv::flip(rot_mat, rot_mat, 1); 272 | return rot_mat; 273 | } 274 | 275 | void DepthMapFusion::publishWithColor(const sensor_msgs::ImageConstPtr &msg, 276 | const cv::Mat &mat, 277 | const ros::Publisher &publisher, 278 | int colormap) { 279 | if (colormap == GRAY_SCALE) { 280 | // publish original mat 281 | cv_bridge::CvImage out_msg; 282 | out_msg.header = msg->header; 283 | out_msg.encoding = "mono8"; 284 | out_msg.image = mat; 285 | publisher.publish(out_msg.toImageMsg()); 286 | return; 287 | } 288 | 289 | cv::Mat colored_mat; 290 | if (colormap == RAINBOW_WITH_BLACK) { 291 | colorizeDepth(mat, colored_mat); 292 | } else { 293 | cv::applyColorMap(mat, colored_mat, colormap); 294 | } 295 | 296 | cv_bridge::CvImage out_msg; 297 | out_msg.header = msg->header; 298 | out_msg.encoding = "rgb8"; 299 | out_msg.image = colored_mat; 300 | 301 | publisher.publish(out_msg.toImageMsg()); 302 | } 303 | 304 | // Colorizes gray scale from blue to red, whithout changing black pixels 305 | void DepthMapFusion::colorizeDepth(const cv::Mat &gray, cv::Mat &rgb) { 306 | double maxDisp = 255; 307 | float S = 1.f; 308 | float V = 1.f; 309 | 310 | rgb.create(gray.size(), CV_8UC3); 311 | rgb = cv::Scalar::all(0); 312 | 313 | if (maxDisp < 1) return; 314 | 315 | for (int y = 0; y < gray.rows; y++) { 316 | for (int x = 0; x < gray.cols; x++) { 317 | unsigned char d = 40 + 0.8 * gray.at(y, x); 318 | unsigned int H = 319 | 255 - ((unsigned char)maxDisp - d) * 280 / (unsigned char)maxDisp; 320 | unsigned int hi = (H / 60) % 6; 321 | 322 | float f = H / 60.f - H / 60; 323 | float p = V * (1 - S); 324 | float q = V * (1 - f * S); 325 | float t = V * (1 - (1 - f) * S); 326 | 327 | cv::Point3f res; 328 | 329 | if (hi == 0) // R = V, G = t, B = p 330 | res = cv::Point3f(p, t, V); 331 | if (hi == 1) // R = q, G = V, B = p 332 | res = cv::Point3f(p, V, q); 333 | if (hi == 2) // R = p, G = V, B = t 334 | res = cv::Point3f(t, V, p); 335 | if (hi == 3) // R = p, G = q, B = V 336 | res = cv::Point3f(V, q, p); 337 | if (hi == 4) // R = t, G = p, B = V 338 | res = cv::Point3f(V, p, t); 339 | if (hi == 5) // R = V, G = p, B = q 340 | res = cv::Point3f(q, p, V); 341 | 342 | unsigned char b = 343 | (unsigned char)(std::max(0.f, std::min(res.x, 1.f)) * 255.f); 344 | unsigned char g = 345 | (unsigned char)(std::max(0.f, std::min(res.y, 1.f)) * 255.f); 346 | unsigned char r = 347 | (unsigned char)(std::max(0.f, std::min(res.z, 1.f)) * 255.f); 348 | 349 | rgb.at >(y, x) = 350 | cv::Point3_(b, g, r); 351 | 352 | if (d == 40) { 353 | rgb.at >(y, x) = 354 | cv::Point3_(0, 0, 0); 355 | } 356 | } 357 | } 358 | } 359 | 360 | // void erode() { 361 | // cv_bridge::CvImagePtr disparity = cv_bridge::toCvCopy(*msg, "mono8"); 362 | // cropped_score_1_ = cropToSquare(disparity->image, offset_x_, offset_y_); 363 | // cv::GaussianBlur(cropped_score_1_, cropped_score_1_grad_, cv::Size(5,5), 364 | // 10.0); 365 | // threshold(cropped_score_1_grad_, cropped_score_1_grad_, 50, 255, 1); 366 | // cv::GaussianBlur(cropped_score_1_grad_, cropped_score_1_grad_, 367 | // cv::Size(11,11), 10.0); 368 | // cv::Sobel(cropped_score_1_grad_, cropped_score_1_grad_, -1, 0, 2, 7, 0.03); 369 | // threshold(cropped_score_1_grad_, cropped_score_1_grad_, 50, 255, 0); 370 | 371 | // cv::Mat horizontalStructure = cv::getStructuringElement(cv::MORPH_RECT, 372 | // cv::Size(40,1)); 373 | // // Apply morphology operations 374 | // cv::erode(cropped_score_1_grad_, cropped_score_1_grad_, 375 | // horizontalStructure, cv::Point(-1, -1)); 376 | // cv::dilate(cropped_score_1_grad_, cropped_score_1_grad_, 377 | // horizontalStructure, cv::Point(-1, -1)); 378 | // cv::GaussianBlur(cropped_score_1_grad_, cropped_score_1_grad_, 379 | // cv::Size(21,21), 10.0); 380 | // cropped_score_1_grad_ = cropped_score_1_ + cropped_score_1_grad_; 381 | // } 382 | 383 | // cv::GaussianBlur(cropped_score_1_, cropped_score_1_grad_, cv::Size(5,5), 384 | // 10.0); 385 | // threshold(cropped_score_1_grad_, cropped_score_1_grad_, 50, 255, 1); 386 | // cv::Mat horizontalStructure = cv::getStructuringElement(cv::MORPH_RECT, 387 | // cv::Size(1,20)); 388 | // // Apply morphology operations 389 | // cv::erode(cropped_score_1_grad_, cropped_score_1_grad_, horizontalStructure, 390 | // cv::Point(-1, -1)); 391 | 392 | // cv::Mat kernelH(5, 5, CV_32F, float(0)); 393 | // kernelH.at(0,2) = 0.5f; 394 | // kernelH.at(1,2) = 0.5f; 395 | // kernelH.at(3,2) = 0.5f; 396 | // kernelH.at(4,2) = 0.5f; 397 | // kernelH.at(2,0) = -0.5f; 398 | // kernelH.at(2,1) = -0.5f; 399 | // kernelH.at(2,2) = 1.0f; 400 | // kernelH.at(2,3) = -0.5f; 401 | // kernelH.at(2,4) = -0.5f; 402 | // cv::filter2D(disparity->image, disparity->image, -1, kernelH); 403 | 404 | } // depth_map_fusion 405 | -------------------------------------------------------------------------------- /src/depth_map_fusion_node.cpp: -------------------------------------------------------------------------------- 1 | /**************************************************************************** 2 | * 3 | * Copyright (c) 2016 PX4 Development Team. All rights reserved. 4 | * 5 | * Redistribution and use in source and binary forms, with or without 6 | * modification, are permitted provided that the following conditions 7 | * are met: 8 | * 9 | * 1. Redistributions of source code must retain the above copyright 10 | * notice, this list of conditions and the following disclaimer. 11 | * 2. Redistributions in binary form must reproduce the above copyright 12 | * notice, this list of conditions and the following disclaimer in 13 | * the documentation and/or other materials provided with the 14 | * distribution. 15 | * 3. Neither the name PX4 nor the names of its contributors may be 16 | * used to endorse or promote products derived from this software 17 | * without specific prior written permission. 18 | * 19 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 22 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 23 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 24 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 25 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 26 | * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 27 | * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 28 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 29 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | * POSSIBILITY OF SUCH DAMAGE. 31 | * 32 | ****************************************************************************/ 33 | /** 34 | * @file depth_map_fusion_node.cpp 35 | * 36 | * Node which start the fusion class 37 | * 38 | * @author Vilhjálmur Vilhjálmsson 39 | */ 40 | 41 | #include "disparity_to_point_cloud/depth_map_fusion.hpp" 42 | 43 | #include 44 | 45 | int main(int argc, char *argv[]) { 46 | ros::init(argc, argv, "depth_map_fusion"); 47 | depth_map_fusion::DepthMapFusion depth_map_fusion; 48 | // Endless loop 49 | ros::spin(); 50 | return 0; 51 | } 52 | -------------------------------------------------------------------------------- /src/disparity_to_point_cloud.cpp: -------------------------------------------------------------------------------- 1 | /**************************************************************************** 2 | * 3 | * Copyright (c) 2016 PX4 Development Team. All rights reserved. 4 | * 5 | * Redistribution and use in source and binary forms, with or without 6 | * modification, are permitted provided that the following conditions 7 | * are met: 8 | * 9 | * 1. Redistributions of source code must retain the above copyright 10 | * notice, this list of conditions and the following disclaimer. 11 | * 2. Redistributions in binary form must reproduce the above copyright 12 | * notice, this list of conditions and the following disclaimer in 13 | * the documentation and/or other materials provided with the 14 | * distribution. 15 | * 3. Neither the name PX4 nor the names of its contributors may be 16 | * used to endorse or promote products derived from this software 17 | * without specific prior written permission. 18 | * 19 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 22 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 23 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 24 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 25 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 26 | * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 27 | * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 28 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 29 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | * POSSIBILITY OF SUCH DAMAGE. 31 | * 32 | ****************************************************************************/ 33 | /** 34 | * @file disparity_to_point_cloud.cpp 35 | * 36 | * Class to convert a disparity map in to a point cloud 37 | * 38 | * @author Simone Guscetti 39 | * @author Vilhjálmur Vilhjálmsson 40 | */ 41 | 42 | #include "disparity_to_point_cloud/disparity_to_point_cloud.hpp" 43 | 44 | namespace d2pc { 45 | 46 | void Disparity2PCloud::DisparityCb(const sensor_msgs::ImageConstPtr &msg) { 47 | printf("start \n"); 48 | pcl::PointCloud::Ptr cloud(new pcl::PointCloud); 49 | printf("new point cloud \n"); 50 | cv_bridge::CvImagePtr disparity = cv_bridge::toCvCopy(*msg, "mono8"); 51 | printf("toCvCopy \n"); 52 | cv::Size s = disparity->image.size(); 53 | cv::Vec3f *pv; 54 | 55 | cv::Mat median_filtered(s, CV_8U); 56 | // cv::Mat median_filtered = disparity->image; 57 | cv::medianBlur(disparity->image, median_filtered, 11); 58 | printf("medianBlur \n"); 59 | 60 | cv::Mat real_disparity(s, CV_32FC1); 61 | median_filtered.convertTo(real_disparity, CV_32FC1, 1.0 / 8.0); 62 | 63 | cv::Mat image3D(s, CV_32FC3); 64 | cv::reprojectImageTo3D(real_disparity, image3D, Q_); 65 | printf("reprojectImageTo3D \n"); 66 | 67 | // cv::Mat medianFilterd = disparity->image; 68 | 69 | // form 40 to width-40 : to remove outlire on the border 70 | for (int v = 40; v < s.height - 40; v++) { 71 | pv = image3D.ptr(v); 72 | for (int u = 40; u < s.width - 40; u++) { 73 | cv::Vec3f value = pv[u]; 74 | cloud->points.push_back(pcl::PointXYZ(value[0], value[1], value[2])); 75 | } 76 | } 77 | printf("cloud push_back \n"); 78 | 79 | cloud->width = cloud->points.size(); 80 | cloud->height = 1; 81 | cloud->is_dense = false; 82 | printf("Cloud size: %d\n", cloud->points.size()); 83 | // send point cloud 84 | sensor_msgs::PointCloud2 output; 85 | pcl::toROSMsg(*cloud, output); 86 | // does not work on the rosbag with the tf broadcaster for the camera position 87 | output.header.stamp = disparity->header.stamp; 88 | // TODO: create ros param for this 89 | output.header.frame_id = "/camera_optical_frame"; 90 | p_cloud_pub_.publish(output); 91 | printf("publish\n"); 92 | } 93 | 94 | } /* d2pc */ 95 | -------------------------------------------------------------------------------- /src/disparity_to_point_cloud_node.cpp: -------------------------------------------------------------------------------- 1 | /**************************************************************************** 2 | * 3 | * Copyright (c) 2016 PX4 Development Team. All rights reserved. 4 | * 5 | * Redistribution and use in source and binary forms, with or without 6 | * modification, are permitted provided that the following conditions 7 | * are met: 8 | * 9 | * 1. Redistributions of source code must retain the above copyright 10 | * notice, this list of conditions and the following disclaimer. 11 | * 2. Redistributions in binary form must reproduce the above copyright 12 | * notice, this list of conditions and the following disclaimer in 13 | * the documentation and/or other materials provided with the 14 | * distribution. 15 | * 3. Neither the name PX4 nor the names of its contributors may be 16 | * used to endorse or promote products derived from this software 17 | * without specific prior written permission. 18 | * 19 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 22 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 23 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 24 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 25 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 26 | * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 27 | * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 28 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 29 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | * POSSIBILITY OF SUCH DAMAGE. 31 | * 32 | ****************************************************************************/ 33 | /** 34 | * @file disparity_to_point_cloud_node.cpp 35 | * 36 | * Node which start the convertetion class 37 | * 38 | * @author Simone Guscetti 39 | * @author Vilhjálmur Vilhjálmsson 40 | */ 41 | 42 | #include "disparity_to_point_cloud/disparity_to_point_cloud.hpp" 43 | 44 | #include 45 | 46 | int main(int argc, char *argv[]) { 47 | ros::init(argc, argv, "disparity_to_point_cloud"); 48 | d2pc::Disparity2PCloud d2pcloud; 49 | // Endless loop 50 | ros::spin(); 51 | return 0; 52 | } 53 | --------------------------------------------------------------------------------