├── .gitignore ├── CMakeLists.txt ├── LICENSE ├── README.md ├── config ├── config_aracati2017.yaml ├── config_bruce.yaml └── config_sim.yaml ├── docker ├── compose.yaml └── diso.DockerFile ├── img ├── 1.gif └── 2.png ├── include └── cv_bridge_slam │ ├── cv_bridge.h │ └── rgb_colors.h ├── launch ├── aracati2017.launch ├── bruce.launch ├── marker.rviz ├── odom.rviz ├── sim.launch ├── sonar_odometry.rviz └── sonar_pointcloud.rviz ├── package.xml └── src ├── Frame.cpp ├── Frame.h ├── LocalMapping.cpp ├── LocalMapping.h ├── MapPoint.cpp ├── MapPoint.h ├── OptimizationType.cpp ├── OptimizationType.h ├── RepubGT.cpp ├── System.cpp ├── System.h ├── Track.cpp ├── Track.h ├── cv_bridge.cpp ├── direct_semidense.cpp ├── direct_semidense_ros.cpp ├── main.cpp ├── nanoflann.hpp ├── pyramid_test.cpp ├── rgb_colors.cpp ├── run_aracati2017.cpp └── traj_se3_align.cpp /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(direct_sonar_odometry) 3 | 4 | ## Compile as C++17, supported in ROS Kinetic and newer 5 | set(CMAKE_CXX_STANDARD 17) 6 | SET(ROS_BUILD_TYPE RelWithDebInfo) 7 | SET(CMAKE_BUILD_TYPE RelWithDebInfo) 8 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fopenmp") 9 | 10 | ## Find catkin macros and libraries 11 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) 12 | ## is used, also find other catkin packages 13 | find_package(catkin REQUIRED COMPONENTS 14 | geometry_msgs 15 | image_transport 16 | nav_msgs 17 | roscpp 18 | rospy 19 | sensor_msgs 20 | std_msgs 21 | tf 22 | image_transport 23 | ) 24 | #set(Eigen3_DIR ${PROJECT_SOURCE_DIR}/Thirdparty/eigen3.3.4) 25 | find_package(Eigen3 3.3 REQUIRED) 26 | #set(OpenCV_DIR /home/da/project/Thirdparty/opencv3.2/share/OpenCV) 27 | find_package(OpenCV 3.2 REQUIRED) 28 | find_package(PCL REQUIRED) 29 | find_package(fmt REQUIRED) 30 | ## System dependencies are found with CMake's conventions 31 | # find_package(Boost REQUIRED COMPONENTS system) 32 | 33 | 34 | ## Uncomment this if the package has a setup.py. This macro ensures 35 | ## modules and global scripts declared therein get installed 36 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html 37 | # catkin_python_setup() 38 | 39 | ################################################ 40 | ## Declare ROS messages, services and actions ## 41 | ################################################ 42 | 43 | ## To declare and build messages, services or actions from within this 44 | ## package, follow these steps: 45 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in 46 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). 47 | ## * In the file package.xml: 48 | ## * add a build_depend tag for "message_generation" 49 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET 50 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in 51 | ## but can be declared for certainty nonetheless: 52 | ## * add a exec_depend tag for "message_runtime" 53 | ## * In this file (CMakeLists.txt): 54 | ## * add "message_generation" and every package in MSG_DEP_SET to 55 | ## find_package(catkin REQUIRED COMPONENTS ...) 56 | ## * add "message_runtime" and every package in MSG_DEP_SET to 57 | ## catkin_package(CATKIN_DEPENDS ...) 58 | ## * uncomment the add_*_files sections below as needed 59 | ## and list every .msg/.srv/.action file to be processed 60 | ## * uncomment the generate_messages entry below 61 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) 62 | 63 | ## Generate messages in the 'msg' folder 64 | # add_message_files( 65 | # FILES 66 | # Message1.msg 67 | # Message2.msg 68 | # ) 69 | 70 | ## Generate services in the 'srv' folder 71 | # add_service_files( 72 | # FILES 73 | # Service1.srv 74 | # Service2.srv 75 | # ) 76 | 77 | ## Generate actions in the 'action' folder 78 | # add_action_files( 79 | # FILES 80 | # Action1.action 81 | # Action2.action 82 | # ) 83 | 84 | ## Generate added messages and services with any dependencies listed here 85 | # generate_messages( 86 | # DEPENDENCIES 87 | # geometry_msgs# nav_msgs# sensor_msgs# std_msgs 88 | # ) 89 | 90 | ################################################ 91 | ## Declare ROS dynamic reconfigure parameters ## 92 | ################################################ 93 | 94 | ## To declare and build dynamic reconfigure parameters within this 95 | ## package, follow these steps: 96 | ## * In the file package.xml: 97 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure" 98 | ## * In this file (CMakeLists.txt): 99 | ## * add "dynamic_reconfigure" to 100 | ## find_package(catkin REQUIRED COMPONENTS ...) 101 | ## * uncomment the "generate_dynamic_reconfigure_options" section below 102 | ## and list every .cfg file to be processed 103 | 104 | ## Generate dynamic reconfigure parameters in the 'cfg' folder 105 | # generate_dynamic_reconfigure_options( 106 | # cfg/DynReconf1.cfg 107 | # cfg/DynReconf2.cfg 108 | # ) 109 | 110 | ################################### 111 | ## catkin specific configuration ## 112 | ################################### 113 | ## The catkin_package macro generates cmake config files for your package 114 | ## Declare things to be passed to dependent projects 115 | ## INCLUDE_DIRS: uncomment this if your package contains header files 116 | ## LIBRARIES: libraries you create in this project that dependent projects also need 117 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need 118 | ## DEPENDS: system dependencies of this project that dependent projects also need 119 | catkin_package( 120 | # INCLUDE_DIRS include 121 | # LIBRARIES direct_sonar_odometry 122 | # CATKIN_DEPENDS geometry_msgs image_transport nav_msgs roscpp rospy sensor_msgs std_msgs 123 | # DEPENDS system_lib 124 | ) 125 | 126 | ########### 127 | ## Build ## 128 | ########### 129 | 130 | ## Specify additional locations of header files 131 | ## Your package locations should be listed before other locations 132 | include_directories( 133 | # include 134 | ${PROJECT_SOURCE_DIR}/Thirdparty/g2o/include 135 | ${PROJECT_SOURCE_DIR}/include/cv_bridge_slam 136 | ${catkin_INCLUDE_DIRS} 137 | ${PCL_INCLUDE_DIRS} 138 | /home/da/project/ros/direct_sonar_ws/src/direct_sonar_odometry/Thirdparty/eigen3.3.4/include/eigen3 139 | ) 140 | link_directories( 141 | # ${PROJECT_SOURCE_DIR}/Thirdparty/g2o/lib 142 | ${PROJECT_SOURCE_DIR}/src/Thirdparty/g2o/lib 143 | ${PCL_LIBRARY_DIRS}) 144 | add_definitions(${PCL_DEFINITIONS}) 145 | set(G2O_LIBS g2o_core g2o_types_sba g2o_types_slam3d g2o_opengl_helper g2o_solver_csparse g2o_stuff g2o_csparse_extension) 146 | 147 | 148 | ## Declare a C++ library 149 | # add_library(${PROJECT_NAME} 150 | # src/${PROJECT_NAME}/direct_sonar_odometry.cpp 151 | # ) 152 | 153 | ## Add cmake target dependencies of the library 154 | ## as an example, code may need to be generated before libraries 155 | ## either from message generation or dynamic reconfigure 156 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 157 | 158 | ## Declare a C++ executable 159 | ## With catkin_make all packages are built within a single CMake context 160 | ## The recommended prefix ensures that target names across packages don't collide 161 | # add_executable(${PROJECT_NAME}_node src/direct_sonar_odometry_node.cpp) 162 | #add_executable(direct_sonar_odometry_node src/cv_bridge.cpp src/rgb_colors.cpp src/direct_semidense_ros.cpp 163 | # ) 164 | #target_link_libraries(direct_sonar_odometry_node ${catkin_LIBRARIES} ${OpenCV_LIBS} ${G2O_LIBS}) 165 | 166 | add_library(direct_sonar_odometry_lib src/cv_bridge.cpp src/rgb_colors.cpp 167 | src/System.cpp 168 | src/System.h 169 | src/Frame.cpp 170 | src/Frame.h 171 | src/MapPoint.cpp 172 | src/MapPoint.h 173 | src/Track.cpp 174 | src/Track.h 175 | src/OptimizationType.cpp 176 | src/OptimizationType.h 177 | src/nanoflann.hpp 178 | src/LocalMapping.cpp 179 | src/LocalMapping.h) 180 | target_link_libraries(direct_sonar_odometry_lib ${catkin_LIBRARIES} ${OpenCV_LIBS} ${G2O_LIBS} fmt::fmt-header-only) 181 | 182 | add_executable(aracati2017_node src/run_aracati2017.cpp) 183 | target_link_libraries(aracati2017_node direct_sonar_odometry_lib) 184 | 185 | #add_executable(sim_node src/run_sim.cpp) 186 | #target_link_libraries(sim_node direct_sonar_odometry_lib) 187 | 188 | 189 | add_executable(traj_align src/traj_se3_align.cpp) 190 | target_link_libraries(traj_align ${catkin_LIBRARIES} ${OpenCV_LIBS} ${G2O_LIBS} fmt::fmt-header-only) 191 | 192 | add_executable(repub_gt src/RepubGT.cpp) 193 | target_link_libraries(repub_gt ${catkin_LIBRARIES}) 194 | 195 | #add_executable(direct_sonar_odometry_test src/direct_semidense.cpp) 196 | #target_link_libraries(direct_sonar_odometry_test ${catkin_LIBRARIES} ${OpenCV_LIBS} ${G2O_LIBS}) 197 | # 198 | #add_executable(pyramid_test src/pyramid_test.cpp src/nanoflann.hpp) 199 | #target_link_libraries(pyramid_test ${catkin_LIBRARIES} ${OpenCV_LIBS} ${G2O_LIBS}) 200 | 201 | ## Rename C++ executable without prefix 202 | ## The above recommended prefix causes long target names, the following renames the 203 | ## target back to the shorter version for ease of user use 204 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" 205 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") 206 | 207 | ## Add cmake target dependencies of the executable 208 | ## same as for the library above 209 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 210 | 211 | ## Specify libraries to link a library or executable target against 212 | # target_link_libraries(${PROJECT_NAME}_node 213 | # ${catkin_LIBRARIES} 214 | # ) 215 | 216 | ############# 217 | ## Install ## 218 | ############# 219 | 220 | # all install targets should use catkin DESTINATION variables 221 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html 222 | 223 | ## Mark executable scripts (Python etc.) for installation 224 | ## in contrast to setup.py, you can choose the destination 225 | # catkin_install_python(PROGRAMS 226 | # scripts/my_python_script 227 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 228 | # ) 229 | 230 | ## Mark executables for installation 231 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html 232 | # install(TARGETS ${PROJECT_NAME}_node 233 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 234 | # ) 235 | 236 | ## Mark libraries for installation 237 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html 238 | # install(TARGETS ${PROJECT_NAME} 239 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 240 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 241 | # RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} 242 | # ) 243 | 244 | ## Mark cpp header files for installation 245 | # install(DIRECTORY include/${PROJECT_NAME}/ 246 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} 247 | # FILES_MATCHING PATTERN "*.h" 248 | # PATTERN ".svn" EXCLUDE 249 | # ) 250 | 251 | ## Mark other files for installation (e.g. launch and bag files, etc.) 252 | # install(FILES 253 | # # myfile1 254 | # # myfile2 255 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} 256 | # ) 257 | 258 | ############# 259 | ## Testing ## 260 | ############# 261 | 262 | ## Add gtest based cpp test target and link libraries 263 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_direct_sonar_odometry.cpp) 264 | # if(TARGET ${PROJECT_NAME}-test) 265 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) 266 | # endif() 267 | 268 | ## Add folders to be run by python nosetests 269 | # catkin_add_nosetests(test) 270 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DISO: Direct Imaging Sonar Odometry 2 | ![](img/1.gif) 3 | 4 | Welcome to the **Direct Imaging Sonar Odometry (DISO)** system repository! 5 | 6 | DISO is an advanced sonar odometry framework developed to enhance the accuracy and reliability of underwater navigation. It estimates the relative transformation between two sonar frames by minimizing aggregated sonar intensity errors at points with high intensity gradients. Key features include: 7 | - **Diect Sonar Optimization**: Optimizes transformations between two sonar frames by minizing the overall acoustic intensity error. 8 | - **Multi-sensor Window Optimization**: Optimizes transformations across multiple frames for robust trajectory estimation. 9 | - **Data Association Strategy**: Efficiently matches corresponding sonar points to enhance accuracy. 10 | - **Acoustic Intensity Outlier Rejection**: Filters out anomalous sonar readings for improved reliability. 11 | 12 | ## Linux Installation 13 | We recommand using our docker compose file to build DISO and reproduce experiment result. 14 | 15 | Installation has been test on **Ubuntu 20.04** and **ROS Noetic**. 16 | 17 | ### DISO Installation 18 | ```bash 19 | mkdir -p diso_ws/src 20 | cd diso_ws/src 21 | git clone https://github.com/SenseRoboticsLab/DISO.git 22 | cd ./DISO/docker 23 | xhost + 24 | docker compose up 25 | ``` 26 | It may take a few minutes to build the docker. You should have a rviz window jump out when it's done as follows: 27 | ![](img/2.png) 28 | 29 | ### Dataset Preparation 30 | Thanks the authors of [Aracati2017](https://github.com/matheusbg8/aracati2017) dataset for releasing their data. We updated the plot function to visualize our results on the aerial images. 31 | 32 | Open a new terminal and follow the instruction to play data and visualize DISO: 33 | ```bash 34 | #go to src of your catkin workspace 35 | cd diso_ws/src 36 | git clone https://github.com/SenseRoboticsLab/Aracati2017_DISO.git 37 | ``` 38 | Then download bag file from [Aracati2017 google drive](https://drive.google.com/file/d/1dbpfd3jElTdHmnceKE5RL8hzU-BDYaW-/view?usp=sharing), then place the bag file in the **diso_ws/src/Aracati2017_DISO/bags** folder. 39 | ```bash 40 | cd ./Aracati2017_DISO/docker 41 | docker compose up 42 | ``` 43 | ## Paper 44 | For more information, please read our [paper](https://ieeexplore.ieee.org/document/10611064) 45 | 46 | ## Citation 47 | ``` 48 | @INPROCEEDINGS{10611064, 49 | author={Xu, Shida and Zhang, Kaicheng and Hong, Ziyang and Liu, Yuanchang and Wang, Sen}, 50 | booktitle={2024 IEEE International Conference on Robotics and Automation (ICRA)}, 51 | title={DISO: Direct Imaging Sonar Odometry}, 52 | year={2024}, 53 | pages={8573-8579}, 54 | doi={10.1109/ICRA57147.2024.10611064}} 55 | 56 | ``` 57 | 58 | ## License 59 | 60 | DISO is released under a GPLv3 license. For commercial purposes of DISO , please contact: sen.wang@imperial.ac.uk -------------------------------------------------------------------------------- /config/config_aracati2017.yaml: -------------------------------------------------------------------------------- 1 | %YAML:1.0 2 | 3 | SonarTopic: "/son" 4 | #OdomTopic: "/pose_gt" 5 | OdomTopic: "/odom_pose" 6 | Range: 48.2896 # range was scaled by SIM3 alignment with GT because of the sacle issue mentioned in the paper 7 | GradientThreshold: 170.0 # 0~255 8 | PyramidLayer: 3 9 | FOV: 130.0 10 | GradientInlierThreshold: 50.0 11 | LossThreshold: 100 12 | #transformation matrix from sonar to body frame 13 | Tbs: !!opencv-matrix 14 | rows: 4 15 | cols: 4 16 | dt: f 17 | # data: [ 0.996087228, -0.088375529, 0.000000000, 0.692565226, 18 | # 0.088375529, 0.996087228, -0.000000000, -0.961352574, 19 | # 0.000000000, 0.000000000, 1.000000000, -0.000000000, 20 | # 0.000000000, 0.000000000, 0.000000000, 1.000000000 ] 21 | data: [ 1.000000000, 0.000000000, 0.000000000, 0.0, 22 | 0.000000000, 1.000000000, 0.000000000, 0.0, 23 | 0.000000000, 0.000000000, 1.000000000, 0.0, 24 | 0.000000000, 0.000000000, 0.000000000, 1.0 ] 25 | # data: [ 1.000000000, 0.000000000, 0.000000000, 0.0, #bruce 26 | # 0.000000000, 0.000000000, -1.000000000, 0.0, 27 | # 0.000000000, 1.000000000, 0.000000000, 0.0, 28 | # 0.000000000, 0.000000000, 0.000000000, 1.0 ] 29 | #SonarTopic: "/rexrov/blueview_p900/sonar_image" 30 | #OdomTopic: "/rexrov/pose_gt" 31 | #Range: 10.0 32 | #GradientThreshold: 300.0 33 | #PyramidLayer: 3 34 | #FOV: 90.0 -------------------------------------------------------------------------------- /config/config_bruce.yaml: -------------------------------------------------------------------------------- 1 | %YAML:1.0 2 | 3 | SonarTopic: "/bruce/slam/feature_extraction/feature_img" 4 | #OdomTopic: "/pose_gt" 5 | OdomTopic: "/bruce/slam/localization/odom" 6 | Range: 29.94 7 | GradientThreshold: 170.0 # 0~255 8 | PyramidLayer: 3 9 | FOV: 130.0 10 | GradientInlierThreshold: 50.0 11 | LossThreshold: 100 12 | #transformation matrix from sonar to body frame 13 | Tbs: !!opencv-matrix 14 | rows: 4 15 | cols: 4 16 | dt: f 17 | # data: [ 0.996087228, -0.088375529, 0.000000000, 0.692565226, 18 | # 0.088375529, 0.996087228, -0.000000000, -0.961352574, 19 | # 0.000000000, 0.000000000, 1.000000000, -0.000000000, 20 | # 0.000000000, 0.000000000, 0.000000000, 1.000000000 ] 21 | data: [ 1.000000000, 0.000000000, 0.000000000, 0.0, 22 | 0.000000000, 0.000000000, 1.000000000, 0.0, 23 | 0.000000000, -1.000000000, 0.000000000, 0.0, 24 | 0.000000000, 0.000000000, 0.000000000, 1.0 ] 25 | # data: [ 1.000000000, 0.000000000, 0.000000000, 0.0, #bruce 26 | # 0.000000000, 0.000000000, -1.000000000, 0.0, 27 | # 0.000000000, 1.000000000, 0.000000000, 0.0, 28 | # 0.000000000, 0.000000000, 0.000000000, 1.0 ] 29 | #SonarTopic: "/rexrov/blueview_p900/sonar_image" 30 | #OdomTopic: "/rexrov/pose_gt" 31 | #Range: 10.0 32 | #GradientThreshold: 300.0 33 | #PyramidLayer: 3 34 | #FOV: 90.0 -------------------------------------------------------------------------------- /config/config_sim.yaml: -------------------------------------------------------------------------------- 1 | %YAML:1.0 2 | 3 | SonarTopic: "/rexrov/blueview_p900/sonar_image" 4 | OdomTopic: "/odometry/filtered" 5 | #OdomTopic: "/rexrov/pose_gt" 6 | Range: 10.0 7 | GradientThreshold: 100.0 # 0~255 8 | PyramidLayer: 3 9 | FOV: 90.0 10 | GradientInlierThreshold: 50.0 11 | LossThreshold: 50 12 | #transformation matrix from sonar to body frame 13 | Tbs: !!opencv-matrix 14 | rows: 4 15 | cols: 4 16 | dt: f 17 | # data: [ 0.996087228, -0.088375529, 0.000000000, 0.692565226, 18 | # 0.088375529, 0.996087228, -0.000000000, -0.961352574, 19 | # 0.000000000, 0.000000000, 1.000000000, -0.000000000, 20 | # 0.000000000, 0.000000000, 0.000000000, 1.000000000 ] 21 | data: [ 1.000000000, 0.000000000, 0.000000000, 1.15, 22 | 0.000000000, -1.000000000, 0.000000000, 0.0, 23 | 0.000000000, 0.000000000, -1.000000000, 0.3, 24 | 0.000000000, 0.000000000, 0.000000000, 1.0 ] 25 | # data: [ 1.000000000, 0.000000000, 0.000000000, 0.0, #bruce 26 | # 0.000000000, 0.000000000, -1.000000000, 0.0, 27 | # 0.000000000, 1.000000000, 0.000000000, 0.0, 28 | # 0.000000000, 0.000000000, 0.000000000, 1.0 ] 29 | #SonarTopic: "/rexrov/blueview_p900/sonar_image" 30 | #OdomTopic: "/rexrov/pose_gt" 31 | #Range: 10.0 32 | #GradientThreshold: 300.0 33 | #PyramidLayer: 3 34 | #FOV: 90.0 -------------------------------------------------------------------------------- /docker/compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | diso_srv: 4 | image: diso # Specify your image name and tag here 5 | build: 6 | context: ./ 7 | dockerfile: ./diso.DockerFile # Use the actual name of your Dockerfile 8 | volumes: 9 | - /tmp/.X11-unix:/tmp/.X11-unix 10 | - ~/.ssh:/home/da/.ssh 11 | environment: 12 | - DISPLAY=$DISPLAY 13 | - XAUTHORITY=$XAUTHORITY 14 | - NVIDIA_DRIVER_CAPABILITIES=all 15 | # ports: 16 | # - "2222:22" 17 | network_mode: host 18 | stdin_open: true 19 | tty: true 20 | command: bash -i -c "source /home/da/diso_ws/devel/setup.bash && roslaunch direct_sonar_odometry aracati2017.launch" 21 | 22 | -------------------------------------------------------------------------------- /docker/diso.DockerFile: -------------------------------------------------------------------------------- 1 | FROM osrf/ros:noetic-desktop-full 2 | 3 | # RUN 4 | RUN apt update && apt install vim -y 5 | SHELL ["/bin/bash", "-c"] 6 | 7 | # setup clion remote development 8 | RUN apt-get update \ 9 | && apt-get install -y ssh \ 10 | build-essential \ 11 | gcc \ 12 | g++ \ 13 | gdb \ 14 | clang \ 15 | make \ 16 | ninja-build \ 17 | cmake \ 18 | autoconf \ 19 | automake \ 20 | locales-all \ 21 | dos2unix \ 22 | rsync \ 23 | tar \ 24 | python \ 25 | && apt-get clean 26 | 27 | RUN useradd -m da \ 28 | && yes 123 | passwd da 29 | 30 | RUN usermod -s /bin/bash da 31 | 32 | # install ceres Dependencies 33 | 34 | # CMake 35 | RUN apt-get install cmake -y 36 | # google-glog + gflags 37 | RUN apt-get install libgoogle-glog-dev libgflags-dev -y 38 | # BLAS & LAPACK 39 | RUN apt-get install libatlas-base-dev -y 40 | # SuiteSparse and CXSparse (optional) 41 | RUN apt-get install libsuitesparse-dev -y 42 | # opencv boost eigen3 43 | RUN sudo apt install libopencv-dev libboost-dev libboost-filesystem-dev libeigen3-dev -y 44 | # ros dependencies 45 | RUN sudo apt-get update && sudo apt install ros-noetic-pcl-ros ros-noetic-tf2-sensor-msgs ros-noetic-image-geometry ros-noetic-cv-bridge -y 46 | # Clang ninja git 47 | RUN sudo apt install ninja-build clang-10 lldb-10 git -y 48 | 49 | # Build dependencies 50 | RUN sudo apt-get install -y --no-install-recommends apt-utils 51 | # RUN sudo apt-get install -y \ 52 | # build-essential cmake git libgtk2.0-dev pkg-config libavcodec-dev\ 53 | # libavformat-dev libswscale-dev python-dev python-numpy libtbb2 libtbb-dev\ 54 | # libjpeg-dev libpng-dev libtiff-dev libdc1394-22-dev \ 55 | # libboost-all-dev \ 56 | # libvtk6-dev libgtk-3-dev \ 57 | # libatlas-base-dev gfortran \ 58 | # libparmetis-dev \ 59 | # libtool 60 | 61 | RUN sudo apt install -y \ 62 | build-essential cmake git pkg-config libgtk-3-dev \ 63 | libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \ 64 | libxvidcore-dev libx264-dev libjpeg-dev libpng-dev libtiff-dev \ 65 | gfortran openexr libatlas-base-dev python3-dev python3-numpy \ 66 | libtbb2 libtbb-dev libdc1394-22-dev 67 | 68 | 69 | 70 | 71 | 72 | # RUN yes 123 | passwd root 73 | # RUN useradd -m da && yes 123 | passwd da 74 | 75 | RUN sudo apt install ninja-build clang-10 lldb-10 -y 76 | 77 | # install opencv 78 | WORKDIR /third_party 79 | RUN git clone https://github.com/opencv/opencv.git 80 | RUN git clone https://github.com/opencv/opencv_contrib.git 81 | WORKDIR /third_party/opencv 82 | RUN git checkout 1ac7baceff84768d583887fe7c7f1883e9f8e0cb 83 | WORKDIR /third_party/opencv_contrib 84 | RUN git checkout b8eca4161439f361ae4d5a398aab1495136c704b 85 | WORKDIR /third_party/opencv/build 86 | RUN cmake .. -GNinja\ 87 | -DCMAKE_CXX_COMPILER=/usr/bin/clang++-10\ 88 | -DCMAKE_C_COMPILER=/usr/bin/clang-10\ 89 | -DOPENCV_EXTRA_MODULES_PATH=/third_party/opencv_contrib/modules 90 | # -DCMAKE_INSTALL_PREFIX=/home/da/project/Thirdparty/opencv3.2 91 | RUN ninja && ninja install 92 | 93 | 94 | #install eigen 95 | WORKDIR /third_party 96 | RUN git clone https://gitlab.com/libeigen/eigen.git 97 | WORKDIR /third_party/eigen 98 | RUN git checkout 3dc3a0ea2d0773af4c0ffd7bbcb21c608e28fcef 99 | WORKDIR /third_party/eigen/build 100 | RUN cmake .. && make -j8 && make install 101 | 102 | #install fmt 103 | WORKDIR /third_party 104 | RUN git clone https://github.com/fmtlib/fmt.git 105 | WORKDIR /third_party/fmt 106 | RUN git checkout c240d98ffd37b65ee7c27185c62a2cbc093ff185 107 | WORKDIR /third_party/fmt/build 108 | RUN cmake .. && make -j8 && make install 109 | 110 | #install g2o 111 | WORKDIR /third_party 112 | RUN git clone https://github.com/RainerKuemmerle/g2o.git 113 | WORKDIR /third_party/g2o 114 | RUN git checkout 21b7ce45c9c129879f800a7047d3e4be7251444f 115 | WORKDIR /third_party/g2o/build 116 | RUN cmake .. -DCMAKE_BUILD_TYPE=Release && make -j8 && make install 117 | 118 | #install pcl 119 | WORKDIR /third_party 120 | RUN git clone https://github.com/PointCloudLibrary/pcl.git 121 | WORKDIR /third_party/pcl 122 | RUN git checkout af3ce2530b7ae8ed083a3515168626c587a5bbcd 123 | WORKDIR /third_party/pcl/build 124 | RUN cmake .. -DCMAKE_BUILD_TYPE=Release && make -j8 && make install 125 | 126 | #install sophus 127 | WORKDIR /third_party 128 | RUN git clone https://github.com/strasdat/Sophus.git 129 | WORKDIR /third_party/Sophus 130 | RUN git checkout 49a7e1286910019f74fb4f0bb3e213c909f8e1b7 131 | WORKDIR /third_party/Sophus/build 132 | RUN cmake .. && make -j8 && make install 133 | 134 | # Install sudo 135 | RUN apt-get update && apt-get install -y sudo ros-noetic-pcl-conversions 136 | 137 | # # Create the user 'da' and set password '123' 138 | # RUN useradd -m -s /bin/bash da && echo 'da:123' | chpasswd 139 | 140 | # Give 'da' sudo privileges 141 | RUN usermod -aG sudo da 142 | 143 | # To avoid password prompts for sudo commands, you can set 'da' to have passwordless sudo: 144 | RUN echo "da ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers 145 | 146 | USER da 147 | WORKDIR /home/da/diso_ws/src 148 | RUN git clone https://github.com/SenseRoboticsLab/DISO.git 149 | WORKDIR /home/da/diso_ws/src/DISO 150 | RUN git checkout main 151 | WORKDIR /home/da/diso_ws 152 | RUN /bin/bash -c "source /opt/ros/noetic/setup.bash && catkin_make" 153 | # 154 | RUN echo "source /opt/ros/noetic/setup.bash" >> /home/da/.bashrc 155 | RUN echo "source /home/da/diso_ws/devel/setup.bash" >> /home/da/.bashrc 156 | 157 | CMD ["/bin/bash"] -------------------------------------------------------------------------------- /img/1.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SenseRoboticsLab/DISO/64d75b84c61f7295dc5f5b05d11fa1fb77af7003/img/1.gif -------------------------------------------------------------------------------- /img/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SenseRoboticsLab/DISO/64d75b84c61f7295dc5f5b05d11fa1fb77af7003/img/2.png -------------------------------------------------------------------------------- /include/cv_bridge_slam/cv_bridge.h: -------------------------------------------------------------------------------- 1 | /********************************************************************* 2 | * Software License Agreement (BSD License) 3 | * 4 | * Copyright (c) 2011, Willow Garage, Inc, 5 | * Copyright (c) 2015, Tal Regev. 6 | * All rights reserved. 7 | * 8 | * Redistribution and use in source and binary forms, with or without 9 | * modification, are permitted provided that the following conditions 10 | * are met: 11 | * 12 | * * Redistributions of source code must retain the above copyright 13 | * notice, this list of conditions and the following disclaimer. 14 | * * Redistributions in binary form must reproduce the above 15 | * copyright notice, this list of conditions and the following 16 | * disclaimer in the documentation and/or other materials provided 17 | * with the distribution. 18 | * * Neither the name of the Willow Garage nor the names of its 19 | * contributors may be used to endorse or promote products derived 20 | * from this software without specific prior written permission. 21 | * 22 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 23 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 24 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 25 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 26 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 27 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 28 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 29 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 30 | * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 31 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 32 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 33 | * POSSIBILITY OF SUCH DAMAGE. 34 | *********************************************************************/ 35 | 36 | #ifndef CV_BRIDGE_CV_BRIDGE_H 37 | #define CV_BRIDGE_CV_BRIDGE_H 38 | 39 | #include 40 | #include 41 | #include 42 | #include 43 | #include 44 | #include 45 | #include 46 | #include 47 | 48 | namespace cv_bridge { 49 | 50 | class Exception : public std::runtime_error 51 | { 52 | public: 53 | Exception(const std::string& description) : std::runtime_error(description) {} 54 | }; 55 | 56 | class CvImage; 57 | 58 | typedef boost::shared_ptr CvImagePtr; 59 | typedef boost::shared_ptr CvImageConstPtr; 60 | 61 | //from: http://docs.opencv.org/modules/highgui/doc/reading_and_writing_images_and_video.html#Mat imread(const string& filename, int flags) 62 | typedef enum { 63 | BMP, DIB, 64 | JPG, JPEG, JPE, 65 | JP2, 66 | PNG, 67 | PBM, PGM, PPM, 68 | SR, RAS, 69 | TIFF, TIF, 70 | } Format; 71 | 72 | /** 73 | * \brief Image message class that is interoperable with sensor_msgs/Image but uses a 74 | * more convenient cv::Mat representation for the image data. 75 | */ 76 | class CvImage 77 | { 78 | public: 79 | std_msgs::Header header; //!< ROS header 80 | std::string encoding; //!< Image encoding ("mono8", "bgr8", etc.) 81 | cv::Mat image; //!< Image data for use with OpenCV 82 | 83 | /** 84 | * \brief Empty constructor. 85 | */ 86 | CvImage() {} 87 | 88 | /** 89 | * \brief Constructor. 90 | */ 91 | CvImage(const std_msgs::Header& header, const std::string& encoding, 92 | const cv::Mat& image = cv::Mat()) 93 | : header(header), encoding(encoding), image(image) 94 | { 95 | } 96 | 97 | /** 98 | * \brief Convert this message to a ROS sensor_msgs::Image message. 99 | * 100 | * The returned sensor_msgs::Image message contains a copy of the image data. 101 | */ 102 | sensor_msgs::ImagePtr toImageMsg() const; 103 | 104 | /** 105 | * dst_format is compress the image to desire format. 106 | * Default value is empty string that will convert to jpg format. 107 | * can be: jpg, jp2, bmp, png, tif at the moment 108 | * support this format from opencv: 109 | * http://docs.opencv.org/modules/highgui/doc/reading_and_writing_images_and_video.html#Mat imread(const string& filename, int flags) 110 | */ 111 | sensor_msgs::CompressedImagePtr toCompressedImageMsg(const Format dst_format = JPG) const; 112 | 113 | /** 114 | * \brief Copy the message data to a ROS sensor_msgs::Image message. 115 | * 116 | * This overload is intended mainly for aggregate messages such as stereo_msgs::DisparityImage, 117 | * which contains a sensor_msgs::Image as a data member. 118 | */ 119 | void toImageMsg(sensor_msgs::Image& ros_image) const; 120 | 121 | /** 122 | * dst_format is compress the image to desire format. 123 | * Default value is empty string that will convert to jpg format. 124 | * can be: jpg, jp2, bmp, png, tif at the moment 125 | * support this format from opencv: 126 | * http://docs.opencv.org/modules/highgui/doc/reading_and_writing_images_and_video.html#Mat imread(const string& filename, int flags) 127 | */ 128 | void toCompressedImageMsg(sensor_msgs::CompressedImage& ros_image, const Format dst_format = JPG) const; 129 | 130 | 131 | typedef boost::shared_ptr Ptr; 132 | typedef boost::shared_ptr ConstPtr; 133 | 134 | protected: 135 | boost::shared_ptr tracked_object_; // for sharing ownership 136 | 137 | /// @cond DOXYGEN_IGNORE 138 | friend 139 | CvImageConstPtr toCvShare(const sensor_msgs::Image& source, 140 | const boost::shared_ptr& tracked_object, 141 | const std::string& encoding); 142 | /// @endcond 143 | }; 144 | 145 | 146 | /** 147 | * \brief Convert a sensor_msgs::Image message to an OpenCV-compatible CvImage, copying the 148 | * image data. 149 | * 150 | * \param source A shared_ptr to a sensor_msgs::Image message 151 | * \param encoding The desired encoding of the image data, one of the following strings: 152 | * - \c "mono8" 153 | * - \c "bgr8" 154 | * - \c "bgra8" 155 | * - \c "rgb8" 156 | * - \c "rgba8" 157 | * - \c "mono16" 158 | * 159 | * If \a encoding is the empty string (the default), the returned CvImage has the same encoding 160 | * as \a source. 161 | */ 162 | CvImagePtr toCvCopy(const sensor_msgs::ImageConstPtr& source, 163 | const std::string& encoding = std::string()); 164 | 165 | CvImagePtr toCvCopy(const sensor_msgs::CompressedImageConstPtr& source, 166 | const std::string& encoding = std::string()); 167 | 168 | /** 169 | * \brief Convert a sensor_msgs::Image message to an OpenCV-compatible CvImage, copying the 170 | * image data. 171 | * 172 | * \param source A sensor_msgs::Image message 173 | * \param encoding The desired encoding of the image data, one of the following strings: 174 | * - \c "mono8" 175 | * - \c "bgr8" 176 | * - \c "bgra8" 177 | * - \c "rgb8" 178 | * - \c "rgba8" 179 | * - \c "mono16" 180 | * 181 | * If \a encoding is the empty string (the default), the returned CvImage has the same encoding 182 | * as \a source. 183 | * If the source is 8bit and the encoding 16 or vice-versa, a scaling is applied (65535/255 and 184 | * 255/65535 respectively). Otherwise, no scaling is applied and the rules from the convertTo OpenCV 185 | * function are applied (capping): http://docs.opencv.org/modules/core/doc/basic_structures.html#mat-convertto 186 | */ 187 | CvImagePtr toCvCopy(const sensor_msgs::Image& source, 188 | const std::string& encoding = std::string()); 189 | 190 | CvImagePtr toCvCopy(const sensor_msgs::CompressedImage& source, 191 | const std::string& encoding = std::string()); 192 | 193 | /** 194 | * \brief Convert an immutable sensor_msgs::Image message to an OpenCV-compatible CvImage, sharing 195 | * the image data if possible. 196 | * 197 | * If the source encoding and desired encoding are the same, the returned CvImage will share 198 | * the image data with \a source without copying it. The returned CvImage cannot be modified, as that 199 | * could modify the \a source data. 200 | * 201 | * \param source A shared_ptr to a sensor_msgs::Image message 202 | * \param encoding The desired encoding of the image data, one of the following strings: 203 | * - \c "mono8" 204 | * - \c "bgr8" 205 | * - \c "bgra8" 206 | * - \c "rgb8" 207 | * - \c "rgba8" 208 | * - \c "mono16" 209 | * 210 | * If \a encoding is the empty string (the default), the returned CvImage has the same encoding 211 | * as \a source. 212 | */ 213 | CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr& source, 214 | const std::string& encoding = std::string()); 215 | 216 | /** 217 | * \brief Convert an immutable sensor_msgs::Image message to an OpenCV-compatible CvImage, sharing 218 | * the image data if possible. 219 | * 220 | * If the source encoding and desired encoding are the same, the returned CvImage will share 221 | * the image data with \a source without copying it. The returned CvImage cannot be modified, as that 222 | * could modify the \a source data. 223 | * 224 | * This overload is useful when you have a shared_ptr to a message that contains a 225 | * sensor_msgs::Image, and wish to share ownership with the containing message. 226 | * 227 | * \param source The sensor_msgs::Image message 228 | * \param tracked_object A shared_ptr to an object owning the sensor_msgs::Image 229 | * \param encoding The desired encoding of the image data, one of the following strings: 230 | * - \c "mono8" 231 | * - \c "bgr8" 232 | * - \c "bgra8" 233 | * - \c "rgb8" 234 | * - \c "rgba8" 235 | * - \c "mono16" 236 | * 237 | * If \a encoding is the empty string (the default), the returned CvImage has the same encoding 238 | * as \a source. 239 | */ 240 | CvImageConstPtr toCvShare(const sensor_msgs::Image& source, 241 | const boost::shared_ptr& tracked_object, 242 | const std::string& encoding = std::string()); 243 | 244 | /** 245 | * \brief Convert a CvImage to another encoding using the same rules as toCvCopy 246 | */ 247 | CvImagePtr cvtColor(const CvImageConstPtr& source, 248 | const std::string& encoding); 249 | 250 | struct CvtColorForDisplayOptions { 251 | CvtColorForDisplayOptions() : 252 | do_dynamic_scaling(false), 253 | min_image_value(0.0), 254 | max_image_value(0.0), 255 | colormap(-1), 256 | bg_label(-1) {} 257 | bool do_dynamic_scaling; 258 | double min_image_value; 259 | double max_image_value; 260 | int colormap; 261 | int bg_label; 262 | }; 263 | 264 | 265 | /** 266 | * \brief Converts an immutable sensor_msgs::Image message to another CvImage for display purposes, 267 | * using practical conversion rules if needed. 268 | * 269 | * Data will be shared between input and output if possible. 270 | * 271 | * Recall: sensor_msgs::image_encodings::isColor and isMono tell whether an image contains R,G,B,A, mono 272 | * (or any combination/subset) with 8 or 16 bit depth. 273 | * 274 | * The following rules apply: 275 | * - if the output encoding is empty, the fact that the input image is mono or multiple-channel is 276 | * preserved in the ouput image. The bit depth will be 8. it tries to convert to BGR no matter what 277 | * encoding image is passed. 278 | * - if the output encoding is not empty, it must have sensor_msgs::image_encodings::isColor and 279 | * isMono return true. It must also be 8 bit in depth 280 | * - if the input encoding is an OpenCV format (e.g. 8UC1), and if we have 1,3 or 4 channels, it is 281 | * respectively converted to mono, BGR or BGRA. 282 | * - if the input encoding is 32SC1, this estimate that image as label image and will convert it as 283 | * bgr image with different colors for each label. 284 | * 285 | * \param source A shared_ptr to a sensor_msgs::Image message 286 | * \param encoding Either an encoding string that returns true in sensor_msgs::image_encodings::isColor 287 | * isMono or the empty string as explained above. 288 | * \param options (cv_bridge::CvtColorForDisplayOptions) Options to convert the source image with. 289 | * - do_dynamic_scaling If true, the image is dynamically scaled between its minimum and maximum value 290 | * before being converted to its final encoding. 291 | * - min_image_value Independently from do_dynamic_scaling, if min_image_value and max_image_value are 292 | * different, the image is scaled between these two values before being converted to its final encoding. 293 | * - max_image_value Maximum image value 294 | * - colormap Colormap which the source image converted with. 295 | */ 296 | CvImageConstPtr cvtColorForDisplay(const CvImageConstPtr& source, 297 | const std::string& encoding = std::string(), 298 | const CvtColorForDisplayOptions options = CvtColorForDisplayOptions()); 299 | 300 | /** 301 | * \brief Get the OpenCV type enum corresponding to the encoding. 302 | * 303 | * For example, "bgr8" -> CV_8UC3, "32FC1" -> CV_32FC1, and "32FC10" -> CV_32FC10. 304 | */ 305 | int getCvType(const std::string& encoding); 306 | 307 | } // namespace cv_bridge 308 | 309 | 310 | // CvImage as a first class message type 311 | 312 | // The rest of this file hooks into the roscpp serialization API to make CvImage 313 | // a first-class message type you can publish and subscribe to directly. 314 | // Unfortunately this doesn't yet work with image_transport, so don't rewrite all 315 | // your callbacks to use CvImage! It might be useful for specific tasks, like 316 | // processing bag files. 317 | 318 | /// @cond DOXYGEN_IGNORE 319 | namespace ros { 320 | 321 | namespace message_traits { 322 | 323 | template<> struct MD5Sum 324 | { 325 | static const char* value() { return MD5Sum::value(); } 326 | static const char* value(const cv_bridge::CvImage&) { return value(); } 327 | 328 | static const uint64_t static_value1 = MD5Sum::static_value1; 329 | static const uint64_t static_value2 = MD5Sum::static_value2; 330 | 331 | // If the definition of sensor_msgs/Image changes, we'll get a compile error here. 332 | ROS_STATIC_ASSERT(MD5Sum::static_value1 == 0x060021388200f6f0ULL); 333 | ROS_STATIC_ASSERT(MD5Sum::static_value2 == 0xf447d0fcd9c64743ULL); 334 | }; 335 | 336 | template<> struct DataType 337 | { 338 | static const char* value() { return DataType::value(); } 339 | static const char* value(const cv_bridge::CvImage&) { return value(); } 340 | }; 341 | 342 | template<> struct Definition 343 | { 344 | static const char* value() { return Definition::value(); } 345 | static const char* value(const cv_bridge::CvImage&) { return value(); } 346 | }; 347 | 348 | template<> struct HasHeader : TrueType {}; 349 | 350 | } // namespace ros::message_traits 351 | 352 | namespace serialization { 353 | 354 | template<> struct Serializer 355 | { 356 | /// @todo Still ignoring endianness... 357 | 358 | template 359 | inline static void write(Stream& stream, const cv_bridge::CvImage& m) 360 | { 361 | stream.next(m.header); 362 | stream.next((uint32_t)m.image.rows); // height 363 | stream.next((uint32_t)m.image.cols); // width 364 | stream.next(m.encoding); 365 | uint8_t is_bigendian = 0; 366 | stream.next(is_bigendian); 367 | stream.next((uint32_t)m.image.step); 368 | size_t data_size = m.image.step*m.image.rows; 369 | stream.next((uint32_t)data_size); 370 | if (data_size > 0) 371 | memcpy(stream.advance(data_size), m.image.data, data_size); 372 | } 373 | 374 | template 375 | inline static void read(Stream& stream, cv_bridge::CvImage& m) 376 | { 377 | stream.next(m.header); 378 | uint32_t height, width; 379 | stream.next(height); 380 | stream.next(width); 381 | stream.next(m.encoding); 382 | uint8_t is_bigendian; 383 | stream.next(is_bigendian); 384 | uint32_t step, data_size; 385 | stream.next(step); 386 | stream.next(data_size); 387 | int type = cv_bridge::getCvType(m.encoding); 388 | // Construct matrix pointing to the stream data, then copy it to m.image 389 | cv::Mat tmp((int)height, (int)width, type, stream.advance(data_size), (size_t)step); 390 | tmp.copyTo(m.image); 391 | } 392 | 393 | inline static uint32_t serializedLength(const cv_bridge::CvImage& m) 394 | { 395 | size_t data_size = m.image.step*m.image.rows; 396 | return serializationLength(m.header) + serializationLength(m.encoding) + 17 + data_size; 397 | } 398 | }; 399 | 400 | } // namespace ros::serialization 401 | 402 | namespace message_operations { 403 | 404 | template<> struct Printer 405 | { 406 | template 407 | static void stream(Stream&, const std::string&, const cv_bridge::CvImage&) 408 | { 409 | /// @todo Replicate printing for sensor_msgs::Image 410 | } 411 | }; 412 | 413 | } // namespace ros::message_operations 414 | 415 | } // namespace ros 416 | 417 | namespace cv_bridge { 418 | 419 | inline std::ostream& operator<<(std::ostream& s, const CvImage& m) 420 | { 421 | ros::message_operations::Printer::stream(s, "", m); 422 | return s; 423 | } 424 | 425 | } // namespace cv_bridge 426 | 427 | /// @endcond 428 | 429 | #endif 430 | -------------------------------------------------------------------------------- /include/cv_bridge_slam/rgb_colors.h: -------------------------------------------------------------------------------- 1 | // -*- mode: c++ -*- 2 | /********************************************************************* 3 | * Original color definition is at scikit-image distributed with 4 | * following license disclaimer: 5 | * 6 | * Copyright (C) 2011, the scikit-image team 7 | * All rights reserved. 8 | * 9 | * Redistribution and use in source and binary forms, with or without 10 | * modification, are permitted provided that the following conditions are 11 | * met: 12 | * 13 | * 1. Redistributions of source code must retain the above copyright 14 | * notice, this list of conditions and the following disclaimer. 15 | * 2. Redistributions in binary form must reproduce the above copyright 16 | * notice, this list of conditions and the following disclaimer in 17 | * the documentation and/or other materials provided with the 18 | * distribution. 19 | * 3. Neither the name of skimage nor the names of its contributors may be 20 | * used to endorse or promote products derived from this software without 21 | * specific prior written permission. 22 | * 23 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 24 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 25 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 26 | * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 27 | * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 28 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 29 | * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 30 | * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 31 | * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 32 | * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 33 | * POSSIBILITY OF SUCH DAMAGE. 34 | *********************************************************************/ 35 | 36 | #ifndef CV_BRIDGE_RGB_COLORS_H_ 37 | #define CV_BRIDGE_RGB_COLORS_H_ 38 | 39 | #include 40 | 41 | 42 | namespace cv_bridge 43 | { 44 | 45 | namespace rgb_colors 46 | { 47 | 48 | /** 49 | * @brief 50 | * 146 rgb colors 51 | */ 52 | enum Colors { 53 | ALICEBLUE, 54 | ANTIQUEWHITE, 55 | AQUA, 56 | AQUAMARINE, 57 | AZURE, 58 | BEIGE, 59 | BISQUE, 60 | BLACK, 61 | BLANCHEDALMOND, 62 | BLUE, 63 | BLUEVIOLET, 64 | BROWN, 65 | BURLYWOOD, 66 | CADETBLUE, 67 | CHARTREUSE, 68 | CHOCOLATE, 69 | CORAL, 70 | CORNFLOWERBLUE, 71 | CORNSILK, 72 | CRIMSON, 73 | CYAN, 74 | DARKBLUE, 75 | DARKCYAN, 76 | DARKGOLDENROD, 77 | DARKGRAY, 78 | DARKGREEN, 79 | DARKGREY, 80 | DARKKHAKI, 81 | DARKMAGENTA, 82 | DARKOLIVEGREEN, 83 | DARKORANGE, 84 | DARKORCHID, 85 | DARKRED, 86 | DARKSALMON, 87 | DARKSEAGREEN, 88 | DARKSLATEBLUE, 89 | DARKSLATEGRAY, 90 | DARKSLATEGREY, 91 | DARKTURQUOISE, 92 | DARKVIOLET, 93 | DEEPPINK, 94 | DEEPSKYBLUE, 95 | DIMGRAY, 96 | DIMGREY, 97 | DODGERBLUE, 98 | FIREBRICK, 99 | FLORALWHITE, 100 | FORESTGREEN, 101 | FUCHSIA, 102 | GAINSBORO, 103 | GHOSTWHITE, 104 | GOLD, 105 | GOLDENROD, 106 | GRAY, 107 | GREEN, 108 | GREENYELLOW, 109 | GREY, 110 | HONEYDEW, 111 | HOTPINK, 112 | INDIANRED, 113 | INDIGO, 114 | IVORY, 115 | KHAKI, 116 | LAVENDER, 117 | LAVENDERBLUSH, 118 | LAWNGREEN, 119 | LEMONCHIFFON, 120 | LIGHTBLUE, 121 | LIGHTCORAL, 122 | LIGHTCYAN, 123 | LIGHTGOLDENRODYELLOW, 124 | LIGHTGRAY, 125 | LIGHTGREEN, 126 | LIGHTGREY, 127 | LIGHTPINK, 128 | LIGHTSALMON, 129 | LIGHTSEAGREEN, 130 | LIGHTSKYBLUE, 131 | LIGHTSLATEGRAY, 132 | LIGHTSLATEGREY, 133 | LIGHTSTEELBLUE, 134 | LIGHTYELLOW, 135 | LIME, 136 | LIMEGREEN, 137 | LINEN, 138 | MAGENTA, 139 | MAROON, 140 | MEDIUMAQUAMARINE, 141 | MEDIUMBLUE, 142 | MEDIUMORCHID, 143 | MEDIUMPURPLE, 144 | MEDIUMSEAGREEN, 145 | MEDIUMSLATEBLUE, 146 | MEDIUMSPRINGGREEN, 147 | MEDIUMTURQUOISE, 148 | MEDIUMVIOLETRED, 149 | MIDNIGHTBLUE, 150 | MINTCREAM, 151 | MISTYROSE, 152 | MOCCASIN, 153 | NAVAJOWHITE, 154 | NAVY, 155 | OLDLACE, 156 | OLIVE, 157 | OLIVEDRAB, 158 | ORANGE, 159 | ORANGERED, 160 | ORCHID, 161 | PALEGOLDENROD, 162 | PALEGREEN, 163 | PALEVIOLETRED, 164 | PAPAYAWHIP, 165 | PEACHPUFF, 166 | PERU, 167 | PINK, 168 | PLUM, 169 | POWDERBLUE, 170 | PURPLE, 171 | RED, 172 | ROSYBROWN, 173 | ROYALBLUE, 174 | SADDLEBROWN, 175 | SALMON, 176 | SANDYBROWN, 177 | SEAGREEN, 178 | SEASHELL, 179 | SIENNA, 180 | SILVER, 181 | SKYBLUE, 182 | SLATEBLUE, 183 | SLATEGRAY, 184 | SLATEGREY, 185 | SNOW, 186 | SPRINGGREEN, 187 | STEELBLUE, 188 | TAN, 189 | TEAL, 190 | THISTLE, 191 | TOMATO, 192 | TURQUOISE, 193 | VIOLET, 194 | WHEAT, 195 | WHITE, 196 | WHITESMOKE, 197 | YELLOW, 198 | YELLOWGREEN, 199 | }; 200 | 201 | /** 202 | * @brief 203 | * get rgb color with enum. 204 | */ 205 | cv::Vec3d getRGBColor(const int color); 206 | 207 | } // namespace rgb_colors 208 | 209 | } // namespace cv_bridge 210 | 211 | #endif 212 | -------------------------------------------------------------------------------- /launch/aracati2017.launch: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /launch/bruce.launch: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /launch/marker.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /Grid1 10 | - /MarkerArray2 11 | - /Pose1 12 | - /Pose2 13 | Splitter Ratio: 0.5 14 | Tree Height: 746 15 | - Class: rviz/Selection 16 | Name: Selection 17 | - Class: rviz/Tool Properties 18 | Expanded: 19 | - /2D Pose Estimate1 20 | - /2D Nav Goal1 21 | - /Publish Point1 22 | Name: Tool Properties 23 | Splitter Ratio: 0.5886790156364441 24 | - Class: rviz/Views 25 | Expanded: 26 | - /Current View1 27 | Name: Views 28 | Splitter Ratio: 0.5 29 | - Class: rviz/Time 30 | Name: Time 31 | SyncMode: 0 32 | SyncSource: "" 33 | Preferences: 34 | PromptSaveOnExit: true 35 | Toolbars: 36 | toolButtonStyle: 2 37 | Visualization Manager: 38 | Class: "" 39 | Displays: 40 | - Alpha: 0.5 41 | Cell Size: 1 42 | Class: rviz/Grid 43 | Color: 160; 160; 164 44 | Enabled: true 45 | Line Style: 46 | Line Width: 0.029999999329447746 47 | Value: Lines 48 | Name: Grid 49 | Normal Cell Count: 0 50 | Offset: 51 | X: 0 52 | Y: 0 53 | Z: 0 54 | Plane: XY 55 | Plane Cell Count: 100 56 | Reference Frame: 57 | Value: true 58 | - Class: rviz/MarkerArray 59 | Enabled: true 60 | Marker Topic: /odom_pose_marker 61 | Name: MarkerArray 62 | Namespaces: 63 | odom_pose: true 64 | Queue Size: 100 65 | Value: true 66 | - Class: rviz/MarkerArray 67 | Enabled: true 68 | Marker Topic: /sonar_pose_marker 69 | Name: MarkerArray 70 | Namespaces: 71 | sonar_pose: true 72 | Queue Size: 100 73 | Value: true 74 | - Class: rviz/MarkerArray 75 | Enabled: true 76 | Marker Topic: /sonar_pose_opt_marker 77 | Name: MarkerArray 78 | Namespaces: 79 | sonar_pose: true 80 | Queue Size: 100 81 | Value: true 82 | - Alpha: 1 83 | Class: rviz/Axes 84 | Enabled: false 85 | Length: 1 86 | Name: Axes 87 | Radius: 0.10000000149011612 88 | Reference Frame: 89 | Show Trail: false 90 | Value: false 91 | - Class: rviz/TF 92 | Enabled: false 93 | Filter (blacklist): "" 94 | Filter (whitelist): "" 95 | Frame Timeout: 500 96 | Frames: 97 | All Enabled: true 98 | Marker Alpha: 1 99 | Marker Scale: 10 100 | Name: TF 101 | Show Arrows: true 102 | Show Axes: true 103 | Show Names: true 104 | Tree: 105 | {} 106 | Update Interval: 0 107 | Value: false 108 | - Alpha: 1 109 | Axes Length: 1 110 | Axes Radius: 0.10000000149011612 111 | Class: rviz/Pose 112 | Color: 255; 25; 0 113 | Enabled: false 114 | Head Length: 0.30000001192092896 115 | Head Radius: 0.10000000149011612 116 | Name: Pose 117 | Queue Size: 10 118 | Shaft Length: 1 119 | Shaft Radius: 0.05000000074505806 120 | Shape: Arrow 121 | Topic: /pose_gt 122 | Unreliable: false 123 | Value: false 124 | - Alpha: 1 125 | Axes Length: 1 126 | Axes Radius: 0.10000000149011612 127 | Class: rviz/Pose 128 | Color: 255; 25; 0 129 | Enabled: false 130 | Head Length: 0.30000001192092896 131 | Head Radius: 0.10000000149011612 132 | Name: Pose 133 | Queue Size: 10 134 | Shaft Length: 1 135 | Shaft Radius: 0.05000000074505806 136 | Shape: Axes 137 | Topic: /odom_pose 138 | Unreliable: false 139 | Value: false 140 | Enabled: true 141 | Global Options: 142 | Background Color: 48; 48; 48 143 | Default Light: true 144 | Fixed Frame: map 145 | Frame Rate: 30 146 | Name: root 147 | Tools: 148 | - Class: rviz/Interact 149 | Hide Inactive Objects: true 150 | - Class: rviz/MoveCamera 151 | - Class: rviz/Select 152 | - Class: rviz/FocusCamera 153 | - Class: rviz/Measure 154 | - Class: rviz/SetInitialPose 155 | Theta std deviation: 0.2617993950843811 156 | Topic: /initialpose 157 | X std deviation: 0.5 158 | Y std deviation: 0.5 159 | - Class: rviz/SetGoal 160 | Topic: /move_base_simple/goal 161 | - Class: rviz/PublishPoint 162 | Single click: true 163 | Topic: /clicked_point 164 | Value: true 165 | Views: 166 | Current: 167 | Class: rviz/Orbit 168 | Distance: 83.1622314453125 169 | Enable Stereo Rendering: 170 | Stereo Eye Separation: 0.05999999865889549 171 | Stereo Focal Distance: 1 172 | Swap Stereo Eyes: false 173 | Value: false 174 | Field of View: 0.7853981852531433 175 | Focal Point: 176 | X: -1.4861589670181274 177 | Y: 2.433211088180542 178 | Z: -0.6217615604400635 179 | Focal Shape Fixed Size: true 180 | Focal Shape Size: 0.05000000074505806 181 | Invert Z Axis: false 182 | Name: Current View 183 | Near Clip Distance: 0.009999999776482582 184 | Pitch: 1.5697963237762451 185 | Target Frame: 186 | Yaw: 0.3823881149291992 187 | Saved: ~ 188 | Window Geometry: 189 | Displays: 190 | collapsed: false 191 | Height: 1043 192 | Hide Left Dock: false 193 | Hide Right Dock: false 194 | QMainWindow State: 000000ff00000000fd00000004000000000000027b00000375fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d00000375000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f00000375fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003d00000375000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007800000003efc0100000002fb0000000800540069006d0065010000000000000780000003bc00fffffffb0000000800540069006d00650100000000000004500000000000000000000003ea0000037500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 195 | Selection: 196 | collapsed: false 197 | Time: 198 | collapsed: false 199 | Tool Properties: 200 | collapsed: false 201 | Views: 202 | collapsed: false 203 | Width: 1920 204 | X: 1920 205 | Y: 0 206 | -------------------------------------------------------------------------------- /launch/odom.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /GT2 10 | - /Odom1 11 | - /Odom2 12 | Splitter Ratio: 0.5 13 | Tree Height: 717 14 | - Class: rviz/Selection 15 | Name: Selection 16 | - Class: rviz/Tool Properties 17 | Expanded: 18 | - /2D Pose Estimate1 19 | - /2D Nav Goal1 20 | - /Publish Point1 21 | Name: Tool Properties 22 | Splitter Ratio: 0.5886790156364441 23 | - Class: rviz/Views 24 | Expanded: 25 | - /Current View1 26 | Name: Views 27 | Splitter Ratio: 0.5 28 | - Class: rviz/Time 29 | Name: Time 30 | SyncMode: 0 31 | SyncSource: Image 32 | Preferences: 33 | PromptSaveOnExit: true 34 | Toolbars: 35 | toolButtonStyle: 2 36 | Visualization Manager: 37 | Class: "" 38 | Displays: 39 | - Alpha: 0.5 40 | Cell Size: 1 41 | Class: rviz/Grid 42 | Color: 160; 160; 164 43 | Enabled: false 44 | Line Style: 45 | Line Width: 0.029999999329447746 46 | Value: Lines 47 | Name: Grid 48 | Normal Cell Count: 0 49 | Offset: 50 | X: 0 51 | Y: 0 52 | Z: 0 53 | Plane: XY 54 | Plane Cell Count: 10 55 | Reference Frame: 56 | Value: false 57 | - Alpha: 1 58 | Axes Length: 2 59 | Axes Radius: 0.30000001192092896 60 | Class: rviz/Pose 61 | Color: 255; 25; 0 62 | Enabled: true 63 | Head Length: 0.30000001192092896 64 | Head Radius: 0.10000000149011612 65 | Name: GT 66 | Queue Size: 10 67 | Shaft Length: 1 68 | Shaft Radius: 0.05000000074505806 69 | Shape: Axes 70 | Topic: /gt_repub/gt_pose 71 | Unreliable: false 72 | Value: true 73 | - Alpha: 1 74 | Buffer Length: 1 75 | Class: rviz/Path 76 | Color: 238; 11; 251 77 | Enabled: true 78 | Head Diameter: 0.30000001192092896 79 | Head Length: 0.20000000298023224 80 | Length: 0.30000001192092896 81 | Line Style: Billboards 82 | Line Width: 0.30000001192092896 83 | Name: GT 84 | Offset: 85 | X: 0 86 | Y: 0 87 | Z: 0 88 | Pose Color: 255; 85; 255 89 | Pose Style: None 90 | Queue Size: 10 91 | Radius: 0.029999999329447746 92 | Shaft Diameter: 0.10000000149011612 93 | Shaft Length: 0.10000000149011612 94 | Topic: /gt_repub/gt_path 95 | Unreliable: false 96 | Value: true 97 | - Alpha: 1 98 | Axes Length: 3 99 | Axes Radius: 0.30000001192092896 100 | Class: rviz/Pose 101 | Color: 255; 25; 0 102 | Enabled: true 103 | Head Length: 0.30000001192092896 104 | Head Radius: 0.10000000149011612 105 | Name: Odom 106 | Queue Size: 10 107 | Shaft Length: 1 108 | Shaft Radius: 0.05000000074505806 109 | Shape: Axes 110 | Topic: /gt_repub/odom_pose 111 | Unreliable: false 112 | Value: true 113 | - Alpha: 1 114 | Buffer Length: 1 115 | Class: rviz/Path 116 | Color: 204; 0; 0 117 | Enabled: true 118 | Head Diameter: 0.30000001192092896 119 | Head Length: 0.20000000298023224 120 | Length: 0.30000001192092896 121 | Line Style: Billboards 122 | Line Width: 0.30000001192092896 123 | Name: Odom 124 | Offset: 125 | X: 0 126 | Y: 0 127 | Z: 0 128 | Pose Color: 255; 85; 255 129 | Pose Style: None 130 | Queue Size: 10 131 | Radius: 0.029999999329447746 132 | Shaft Diameter: 0.10000000149011612 133 | Shaft Length: 0.10000000149011612 134 | Topic: /gt_repub/odom_path 135 | Unreliable: false 136 | Value: true 137 | - Class: rviz/Image 138 | Enabled: true 139 | Image Topic: /son 140 | Max Value: 1 141 | Median window: 5 142 | Min Value: 0 143 | Name: Image 144 | Normalize Range: true 145 | Queue Size: 2 146 | Transport Hint: compressed 147 | Unreliable: false 148 | Value: true 149 | Enabled: true 150 | Global Options: 151 | Background Color: 255; 255; 255 152 | Default Light: true 153 | Fixed Frame: map 154 | Frame Rate: 30 155 | Name: root 156 | Tools: 157 | - Class: rviz/Interact 158 | Hide Inactive Objects: true 159 | - Class: rviz/MoveCamera 160 | - Class: rviz/Select 161 | - Class: rviz/FocusCamera 162 | - Class: rviz/Measure 163 | - Class: rviz/SetInitialPose 164 | Theta std deviation: 0.2617993950843811 165 | Topic: /initialpose 166 | X std deviation: 0.5 167 | Y std deviation: 0.5 168 | - Class: rviz/SetGoal 169 | Topic: /move_base_simple/goal 170 | - Class: rviz/PublishPoint 171 | Single click: true 172 | Topic: /clicked_point 173 | Value: true 174 | Views: 175 | Current: 176 | Class: rviz/Orbit 177 | Distance: 85.57176971435547 178 | Enable Stereo Rendering: 179 | Stereo Eye Separation: 0.05999999865889549 180 | Stereo Focal Distance: 1 181 | Swap Stereo Eyes: false 182 | Value: false 183 | Field of View: 0.7853981852531433 184 | Focal Point: 185 | X: 10.54726505279541 186 | Y: 0.6224119663238525 187 | Z: -2.173110246658325 188 | Focal Shape Fixed Size: true 189 | Focal Shape Size: 0.05000000074505806 190 | Invert Z Axis: false 191 | Name: Current View 192 | Near Clip Distance: 0.009999999776482582 193 | Pitch: -1.45979642868042 194 | Target Frame: 195 | Yaw: 4.097246170043945 196 | Saved: ~ 197 | Window Geometry: 198 | Displays: 199 | collapsed: false 200 | Height: 1043 201 | Hide Left Dock: false 202 | Hide Right Dock: true 203 | Image: 204 | collapsed: false 205 | QMainWindow State: 000000ff00000000fd00000004000000000000039100000375fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afc0000003d00000375000000e60100001cfa000000000100000003fb0000000a0049006d0061006700650100000000ffffffff0000005e00fffffffb0000000a0049006d0061006700650000000000ffffffff0000000000000000fb000000100044006900730070006c0061007900730100000000000001560000015600fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f00000375fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003d00000375000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007800000003efc0100000002fb0000000800540069006d0065010000000000000780000003bc00fffffffb0000000800540069006d00650100000000000004500000000000000000000003e90000037500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 206 | Selection: 207 | collapsed: false 208 | Time: 209 | collapsed: false 210 | Tool Properties: 211 | collapsed: false 212 | Views: 213 | collapsed: true 214 | Width: 1920 215 | X: 0 216 | Y: 0 217 | -------------------------------------------------------------------------------- /launch/sim.launch: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /launch/sonar_odometry.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /MarkerArray1 10 | - /Pose1 11 | - /Odometry1 12 | - /direct_path1 13 | - /GT_path1 14 | - /GT_Pose1 15 | Splitter Ratio: 0.5 16 | Tree Height: 363 17 | - Class: rviz/Selection 18 | Name: Selection 19 | - Class: rviz/Tool Properties 20 | Expanded: 21 | - /2D Pose Estimate1 22 | - /2D Nav Goal1 23 | - /Publish Point1 24 | Name: Tool Properties 25 | Splitter Ratio: 0.5886790156364441 26 | - Class: rviz/Views 27 | Expanded: 28 | - /Current View1 29 | Name: Views 30 | Splitter Ratio: 0.5 31 | - Class: rviz/Time 32 | Name: Time 33 | SyncMode: 0 34 | SyncSource: Image 35 | Preferences: 36 | PromptSaveOnExit: true 37 | Toolbars: 38 | toolButtonStyle: 1 39 | Visualization Manager: 40 | Class: "" 41 | Displays: 42 | - Alpha: 0.5 43 | Cell Size: 1 44 | Class: rviz/Grid 45 | Color: 160; 160; 164 46 | Enabled: false 47 | Line Style: 48 | Line Width: 0.029999999329447746 49 | Value: Lines 50 | Name: Grid 51 | Normal Cell Count: 0 52 | Offset: 53 | X: 0 54 | Y: 0 55 | Z: 0 56 | Plane: XY 57 | Plane Cell Count: 10 58 | Reference Frame: 59 | Value: false 60 | - Class: rviz/MarkerArray 61 | Enabled: true 62 | Marker Topic: /direct_sonar/visualization_marker 63 | Name: MarkerArray 64 | Namespaces: 65 | frame: true 66 | landmark: true 67 | line: true 68 | Queue Size: 100 69 | Value: true 70 | - Class: rviz/Image 71 | Enabled: true 72 | Image Topic: /direct_sonar/image 73 | Max Value: 1 74 | Median window: 5 75 | Min Value: 0 76 | Name: Image 77 | Normalize Range: true 78 | Queue Size: 2 79 | Transport Hint: raw 80 | Unreliable: false 81 | Value: true 82 | - Alpha: 1 83 | Axes Length: 3 84 | Axes Radius: 0.5 85 | Class: rviz/Pose 86 | Color: 255; 25; 0 87 | Enabled: true 88 | Head Length: 0.30000001192092896 89 | Head Radius: 0.10000000149011612 90 | Name: Pose 91 | Queue Size: 10 92 | Shaft Length: 1 93 | Shaft Radius: 0.05000000074505806 94 | Shape: Axes 95 | Topic: /direct_sonar/pose 96 | Unreliable: false 97 | Value: true 98 | - Angle Tolerance: 0.009999999776482582 99 | Class: rviz/Odometry 100 | Covariance: 101 | Orientation: 102 | Alpha: 0.5 103 | Color: 255; 255; 127 104 | Color Style: Unique 105 | Frame: Local 106 | Offset: 1 107 | Scale: 1 108 | Value: true 109 | Position: 110 | Alpha: 0.30000001192092896 111 | Color: 204; 51; 204 112 | Scale: 1 113 | Value: true 114 | Value: false 115 | Enabled: false 116 | Keep: 99999999 117 | Name: Odometry 118 | Position Tolerance: 0.009999999776482582 119 | Queue Size: 10 120 | Shape: 121 | Alpha: 1 122 | Axes Length: 1 123 | Axes Radius: 0.10000000149011612 124 | Color: 255; 25; 0 125 | Head Length: 0.30000001192092896 126 | Head Radius: 0.10000000149011612 127 | Shaft Length: 1 128 | Shaft Radius: 0.05000000074505806 129 | Value: Axes 130 | Topic: /odometry/filtered 131 | Unreliable: false 132 | Value: false 133 | - Alpha: 1 134 | Buffer Length: 1 135 | Class: rviz/Path 136 | Color: 9; 8; 150 137 | Enabled: true 138 | Head Diameter: 0.30000001192092896 139 | Head Length: 0.20000000298023224 140 | Length: 0.30000001192092896 141 | Line Style: Billboards 142 | Line Width: 0.4000000059604645 143 | Name: direct_path 144 | Offset: 145 | X: 0 146 | Y: 0 147 | Z: 0 148 | Pose Color: 255; 85; 255 149 | Pose Style: None 150 | Queue Size: 10 151 | Radius: 0.029999999329447746 152 | Shaft Diameter: 0.10000000149011612 153 | Shaft Length: 0.10000000149011612 154 | Topic: /direct_sonar/path 155 | Unreliable: false 156 | Value: true 157 | - Alpha: 1 158 | Buffer Length: 1 159 | Class: rviz/Path 160 | Color: 238; 11; 251 161 | Enabled: true 162 | Head Diameter: 0.30000001192092896 163 | Head Length: 0.20000000298023224 164 | Length: 0.30000001192092896 165 | Line Style: Billboards 166 | Line Width: 0.4000000059604645 167 | Name: GT_path 168 | Offset: 169 | X: 0 170 | Y: 0 171 | Z: 0 172 | Pose Color: 255; 85; 255 173 | Pose Style: None 174 | Queue Size: 10 175 | Radius: 0.029999999329447746 176 | Shaft Diameter: 0.10000000149011612 177 | Shaft Length: 0.10000000149011612 178 | Topic: /gt_repub/gt_path 179 | Unreliable: false 180 | Value: true 181 | - Alpha: 1 182 | Axes Length: 2 183 | Axes Radius: 0.5 184 | Class: rviz/Pose 185 | Color: 255; 25; 0 186 | Enabled: true 187 | Head Length: 0.30000001192092896 188 | Head Radius: 0.10000000149011612 189 | Name: GT_Pose 190 | Queue Size: 10 191 | Shaft Length: 1 192 | Shaft Radius: 0.05000000074505806 193 | Shape: Axes 194 | Topic: /gt_repub/gt_pose 195 | Unreliable: false 196 | Value: true 197 | Enabled: true 198 | Global Options: 199 | Background Color: 255; 255; 255 200 | Default Light: true 201 | Fixed Frame: map 202 | Frame Rate: 30 203 | Name: root 204 | Tools: 205 | - Class: rviz/Interact 206 | Hide Inactive Objects: true 207 | - Class: rviz/MoveCamera 208 | - Class: rviz/Select 209 | - Class: rviz/FocusCamera 210 | - Class: rviz/Measure 211 | - Class: rviz/SetInitialPose 212 | Theta std deviation: 0.2617993950843811 213 | Topic: /initialpose 214 | X std deviation: 0.5 215 | Y std deviation: 0.5 216 | - Class: rviz/SetGoal 217 | Topic: /move_base_simple/goal 218 | - Class: rviz/PublishPoint 219 | Single click: true 220 | Topic: /clicked_point 221 | Value: true 222 | Views: 223 | Current: 224 | Class: rviz/Orbit 225 | Distance: 60.05583572387695 226 | Enable Stereo Rendering: 227 | Stereo Eye Separation: 0.05999999865889549 228 | Stereo Focal Distance: 1 229 | Swap Stereo Eyes: false 230 | Value: false 231 | Field of View: 0.7853981852531433 232 | Focal Point: 233 | X: 27.760169982910156 234 | Y: -0.23651236295700073 235 | Z: 4.9050469398498535 236 | Focal Shape Fixed Size: false 237 | Focal Shape Size: 0.05000000074505806 238 | Invert Z Axis: false 239 | Name: Current View 240 | Near Clip Distance: 0.009999999776482582 241 | Pitch: -1.5697963237762451 242 | Target Frame: 243 | Yaw: 0.6322262287139893 244 | Saved: 245 | - Class: rviz/Orbit 246 | Distance: 36.340858459472656 247 | Enable Stereo Rendering: 248 | Stereo Eye Separation: 0.05999999865889549 249 | Stereo Focal Distance: 1 250 | Swap Stereo Eyes: false 251 | Value: false 252 | Field of View: 0.7853981852531433 253 | Focal Point: 254 | X: 26.07555389404297 255 | Y: -1.3463335037231445 256 | Z: 0.13411489129066467 257 | Focal Shape Fixed Size: false 258 | Focal Shape Size: 0.05000000074505806 259 | Invert Z Axis: false 260 | Name: Orbit 261 | Near Clip Distance: 0.009999999776482582 262 | Pitch: -1.5697963237762451 263 | Target Frame: 264 | Yaw: 4.735418319702148 265 | Window Geometry: 266 | Displays: 267 | collapsed: true 268 | Height: 1043 269 | Hide Left Dock: true 270 | Hide Right Dock: false 271 | Image: 272 | collapsed: false 273 | QMainWindow State: 000000ff00000000fd000000040000000000000156000003e0fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f00700065007200740069006500730200000780000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730000000016000003e0000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000039500000385fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a0056006900650077007300000001cd000001ce000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000780000001e9fc0100000002fb0000000a0049006d0061006700650100000000000007800000005e00fffffffb0000000a00560069006500770073030000004e00000080000002e10000019700000003000003c00000003efc0100000002fb0000000800540069006d00650000000000000003c00000041800fffffffb0000000800540069006d0065010000000000000450000000000000000000000780000001f100000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000 274 | Selection: 275 | collapsed: false 276 | Time: 277 | collapsed: false 278 | Tool Properties: 279 | collapsed: false 280 | Views: 281 | collapsed: false 282 | Width: 1920 283 | X: 3840 284 | Y: 0 285 | -------------------------------------------------------------------------------- /launch/sonar_pointcloud.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /PointCloud21 10 | Splitter Ratio: 0.5 11 | Tree Height: 746 12 | - Class: rviz/Selection 13 | Name: Selection 14 | - Class: rviz/Tool Properties 15 | Expanded: 16 | - /2D Pose Estimate1 17 | - /2D Nav Goal1 18 | - /Publish Point1 19 | Name: Tool Properties 20 | Splitter Ratio: 0.5886790156364441 21 | - Class: rviz/Views 22 | Expanded: 23 | - /Current View1 24 | Name: Views 25 | Splitter Ratio: 0.5 26 | - Class: rviz/Time 27 | Name: Time 28 | SyncMode: 0 29 | SyncSource: PointCloud2 30 | Preferences: 31 | PromptSaveOnExit: true 32 | Toolbars: 33 | toolButtonStyle: 2 34 | Visualization Manager: 35 | Class: "" 36 | Displays: 37 | - Alpha: 0.5 38 | Cell Size: 1 39 | Class: rviz/Grid 40 | Color: 160; 160; 164 41 | Enabled: true 42 | Line Style: 43 | Line Width: 0.029999999329447746 44 | Value: Lines 45 | Name: Grid 46 | Normal Cell Count: 0 47 | Offset: 48 | X: 0 49 | Y: 0 50 | Z: 0 51 | Plane: XY 52 | Plane Cell Count: 10 53 | Reference Frame: 54 | Value: true 55 | - Alpha: 1 56 | Autocompute Intensity Bounds: true 57 | Autocompute Value Bounds: 58 | Max Value: 10 59 | Min Value: -10 60 | Value: true 61 | Axis: Z 62 | Channel Name: intensity 63 | Class: rviz/PointCloud2 64 | Color: 255; 255; 255 65 | Color Transformer: Intensity 66 | Decay Time: 0 67 | Enabled: true 68 | Invert Rainbow: false 69 | Max Color: 255; 255; 255 70 | Min Color: 0; 0; 0 71 | Name: PointCloud2 72 | Position Transformer: XYZ 73 | Queue Size: 10 74 | Selectable: true 75 | Size (Pixels): 3 76 | Size (m): 0.10000000149011612 77 | Style: Flat Squares 78 | Topic: /direct_sonar/point_cloud 79 | Unreliable: false 80 | Use Fixed Frame: true 81 | Use rainbow: true 82 | Value: true 83 | - Class: rviz/Image 84 | Enabled: true 85 | Image Topic: /direct_sonar/image 86 | Max Value: 1 87 | Median window: 5 88 | Min Value: 0 89 | Name: Image 90 | Normalize Range: true 91 | Queue Size: 2 92 | Transport Hint: raw 93 | Unreliable: false 94 | Value: true 95 | - Class: rviz/TF 96 | Enabled: true 97 | Filter (blacklist): "" 98 | Filter (whitelist): "" 99 | Frame Timeout: 15 100 | Frames: 101 | All Enabled: true 102 | base_link: 103 | Value: true 104 | odom: 105 | Value: true 106 | son: 107 | Value: true 108 | Marker Alpha: 1 109 | Marker Scale: 1 110 | Name: TF 111 | Show Arrows: true 112 | Show Axes: true 113 | Show Names: true 114 | Tree: 115 | base_link: 116 | odom: 117 | son: 118 | {} 119 | Update Interval: 0 120 | Value: true 121 | Enabled: true 122 | Global Options: 123 | Background Color: 48; 48; 48 124 | Default Light: true 125 | Fixed Frame: odom 126 | Frame Rate: 30 127 | Name: root 128 | Tools: 129 | - Class: rviz/Interact 130 | Hide Inactive Objects: true 131 | - Class: rviz/MoveCamera 132 | - Class: rviz/Select 133 | - Class: rviz/FocusCamera 134 | - Class: rviz/Measure 135 | - Class: rviz/SetInitialPose 136 | Theta std deviation: 0.2617993950843811 137 | Topic: /initialpose 138 | X std deviation: 0.5 139 | Y std deviation: 0.5 140 | - Class: rviz/SetGoal 141 | Topic: /move_base_simple/goal 142 | - Class: rviz/PublishPoint 143 | Single click: true 144 | Topic: /clicked_point 145 | Value: true 146 | Views: 147 | Current: 148 | Class: rviz/Orbit 149 | Distance: 67.86705017089844 150 | Enable Stereo Rendering: 151 | Stereo Eye Separation: 0.05999999865889549 152 | Stereo Focal Distance: 1 153 | Swap Stereo Eyes: false 154 | Value: false 155 | Field of View: 0.7853981852531433 156 | Focal Point: 157 | X: 5.985866546630859 158 | Y: 3.3165905475616455 159 | Z: 10.671085357666016 160 | Focal Shape Fixed Size: true 161 | Focal Shape Size: 0.05000000074505806 162 | Invert Z Axis: false 163 | Name: Current View 164 | Near Clip Distance: 0.009999999776482582 165 | Pitch: 0.9897968769073486 166 | Target Frame: 167 | Yaw: 3.175398588180542 168 | Saved: ~ 169 | Window Geometry: 170 | Displays: 171 | collapsed: false 172 | Height: 1043 173 | Hide Left Dock: false 174 | Hide Right Dock: false 175 | Image: 176 | collapsed: false 177 | QMainWindow State: 000000ff00000000fd00000004000000000000015600000375fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d00000375000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000039200000375fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fc0000003d00000375000000c10100001cfa000000000100000002fb0000000a0049006d0061006700650100000000ffffffff0000005e00fffffffb0000000a0056006900650077007301000006710000010f0000010000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007800000003efc0100000002fb0000000800540069006d0065010000000000000780000003bc00fffffffb0000000800540069006d006501000000000000045000000000000000000000028c0000037500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 178 | Selection: 179 | collapsed: false 180 | Time: 181 | collapsed: false 182 | Tool Properties: 183 | collapsed: false 184 | Views: 185 | collapsed: false 186 | Width: 1920 187 | X: 0 188 | Y: 0 189 | -------------------------------------------------------------------------------- /package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | direct_sonar_odometry 4 | 0.0.0 5 | The direct_sonar_odometry package 6 | 7 | 8 | 9 | 10 | da 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | catkin 52 | geometry_msgs 53 | image_transport 54 | nav_msgs 55 | roscpp 56 | rospy 57 | sensor_msgs 58 | std_msgs 59 | geometry_msgs 60 | image_transport 61 | nav_msgs 62 | roscpp 63 | rospy 64 | sensor_msgs 65 | std_msgs 66 | geometry_msgs 67 | image_transport 68 | nav_msgs 69 | roscpp 70 | rospy 71 | sensor_msgs 72 | std_msgs 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /src/Frame.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #include "Frame.h" 6 | #include "MapPoint.h" 7 | #include 8 | #include 9 | #include "opencv2/imgproc.hpp" 10 | #include "opencv2/imgcodecs.hpp" 11 | #include 12 | 13 | int Frame::mFrameNum = 0; 14 | 15 | Eigen::Vector3d 16 | sonar2Dto3D(const Eigen::Vector2d &p_pixel, double theta, double tx, double ty, double scale) 17 | { 18 | Eigen::Affine2d T_pixel_sonar = Eigen::Affine2d::Identity(); 19 | Eigen::Vector2d t(tx, ty); 20 | Eigen::Rotation2Dd R(theta); 21 | T_pixel_sonar.pretranslate(t).rotate(R).scale(scale); 22 | Eigen::Affine2d T_sonar_pixel = T_pixel_sonar.inverse(); 23 | Eigen::Vector2d p_sonar2D = T_sonar_pixel * p_pixel; 24 | Eigen::Vector3d p_sonar3D(p_sonar2D.x(), p_sonar2D.y(), 0); 25 | return p_sonar3D; 26 | } 27 | 28 | Eigen::Vector2d 29 | sonar3Dto2D(const Eigen::Vector3d &p_sonar3D, double theta, double tx, double ty, double scale) 30 | { 31 | Eigen::Affine2d T_pixel_sonar = Eigen::Affine2d::Identity(); 32 | Eigen::Vector2d t(tx, ty); 33 | Eigen::Rotation2Dd R(theta); 34 | T_pixel_sonar.pretranslate(t).rotate(R).scale(scale); 35 | Eigen::Vector2d p_sonar2D(p_sonar3D.x(), p_sonar3D.y()); 36 | Eigen::Vector2d p_pixel = T_pixel_sonar * p_sonar2D; 37 | 38 | return p_pixel; 39 | } 40 | 41 | double getPixelValue(const cv::Mat &img, double x, double y) 42 | { 43 | if (x < 0 || x >= img.cols - 1 || y < 0 || y >= img.rows - 1) { 44 | // Handle the out-of-bounds case. Here, we simply return 0, but you can adjust as needed. 45 | return 0.0; 46 | } 47 | uchar* data = &img.data[int(y) * img.step + int(x)]; 48 | double xx = x - floor(x); 49 | double yy = y - floor(y); 50 | return double( 51 | (1 - xx) * (1 - yy) * data[0] + xx * (1 - yy) * data[1] + (1 - xx) * yy * data[img.step] + 52 | xx * yy * data[img.step + 1]); 53 | } 54 | 55 | double getPatternPixelValue(const cv::Mat &img, double x, double y) 56 | { 57 | // major sample 58 | // {0,0} 59 | double x1 = x + 0; 60 | double y1 = y + 0; 61 | //minor sample 62 | //{0,-2} 63 | double x2 = x + 0; 64 | double y2 = y - 2; 65 | //{-1,-1} 66 | double x3 = x - 1; 67 | double y3 = y - 1; 68 | //{1,-1} 69 | double x4 = x + 1; 70 | double y4 = y - 1; 71 | //{-2,0} 72 | double x5 = x - 2; 73 | double y5 = y + 0; 74 | //{2,0} 75 | double x6 = x + 2; 76 | double y6 = y + 0; 77 | //{-1,1} 78 | double x7 = x - 1; 79 | double y7 = y + 1; 80 | //{1,1} 81 | double x8 = x + 1; 82 | double y8 = y + 1; 83 | //{0,2} 84 | double x9 = x + 0; 85 | double y9 = y + 2; 86 | // get the value of the pixel 87 | double v1 = getPixelValue(img, x1, y1); 88 | double v2 = getPixelValue(img, x2, y2); 89 | double v3 = getPixelValue(img, x3, y3); 90 | double v4 = getPixelValue(img, x4, y4); 91 | double v5 = getPixelValue(img, x5, y5); 92 | double v6 = getPixelValue(img, x6, y6); 93 | double v7 = getPixelValue(img, x7, y7); 94 | double v8 = getPixelValue(img, x8, y8); 95 | double v9 = getPixelValue(img, x9, y9); 96 | // weighted bilinear interpolation 97 | double value = v1 * (0.5) + (v2 + v3 + v4 + v5 + v6 + v7 + v8 + v9) * (0.5 / 8); 98 | 99 | return value; 100 | } 101 | 102 | Frame::Frame(const cv::Mat &img, double timestamp, double range, double FOV, int layer, 103 | double gradient_threshold) : mTimestamp(timestamp), mPyramidLayer(layer), 104 | mGradientThreshold(gradient_threshold), mRange(range), 105 | mFOV(FOV) 106 | { 107 | mT_s0_si.setIdentity(); 108 | mID = mFrameNum++; 109 | mImg = img.clone(); 110 | mScale = (mImg.rows) / mRange; 111 | mTheta = -0.5 * M_PI; 112 | mTx = 0.5 * mImg.cols; 113 | mTy = mImg.rows; 114 | ComputePyramid(mImg, mPyramid, mPyramidLayer); 115 | DetectKeyPoints(); 116 | } 117 | 118 | Frame::Frame(const cv::Mat &img, double timestamp, double range, double FOV, Eigen::Isometry3d T_b0_bi, 119 | int layer, double gradient_threshold) : mTimestamp(timestamp), mPyramidLayer(layer), 120 | mGradientThreshold(gradient_threshold), 121 | mRange(range), mFOV(FOV), mT_b0_bi(T_b0_bi) 122 | { 123 | mT_s0_si.setIdentity(); 124 | mID = mFrameNum++; 125 | mImg = img.clone(); 126 | mScale = (mImg.rows) / mRange; 127 | mTheta = -0.5 * M_PI; 128 | mTx = 0.5 * mImg.cols; 129 | mTy = mImg.rows; 130 | ComputePyramid(mImg, mPyramid, mPyramidLayer); 131 | DetectKeyPoints(); 132 | } 133 | 134 | void Frame::ComputePyramid(const cv::Mat &img, vector &img_pyramid_out, int layer) 135 | { 136 | img_pyramid_out.clear(); 137 | img_pyramid_out.push_back(img.clone()); 138 | cv::Mat src = img.clone(); 139 | for (int i = 1; i < layer; i++) { 140 | cv::Mat down; 141 | cv::pyrDown(src, down, cv::Size(src.cols / 2, src.rows / 2)); 142 | img_pyramid_out.push_back(down); 143 | src = down; 144 | } 145 | } 146 | 147 | 148 | void Frame::DetectKeyPoints() 149 | { 150 | // Load the image 151 | 152 | cv::Mat gray, results; 153 | cv::cvtColor(mImg, gray, cv::COLOR_BGR2GRAY); 154 | results = mImg.clone(); 155 | cv::Mat grad_x, grad_y; 156 | cv::Mat abs_grad_x, abs_grad_y, grad; 157 | 158 | // Compute gradients (you can adjust the kernel size depending on your needs) 159 | cv::Sobel(gray, grad_x, CV_32F, 1, 0, 7); 160 | cv::Sobel(gray, grad_y, CV_32F, 0, 1, 7); 161 | 162 | // Computing the magnitude of the gradients 163 | cv::magnitude(grad_x, grad_y, grad); 164 | 165 | cv::Mat floatImage; // Assuming this is your CV_32F image. 166 | 167 | // Find the minimum and maximum values in your floatImage. 168 | double minVal, maxVal; 169 | cv::minMaxLoc(grad, &minVal, &maxVal); 170 | // ROS_INFO_STREAM("grad max:" << maxVal << " min:" << minVal); 171 | 172 | // Normalize the float image to the range [-128, 127]. 173 | cv::Mat normalized_grad; 174 | cv::normalize(grad, normalized_grad, 0, 255, cv::NORM_MINMAX, CV_32F); 175 | 176 | // Convert the datatype from CV_32F to CV_8S. 177 | normalized_grad.convertTo(grad, CV_8U); 178 | // VisualizeHist(grad); 179 | 180 | 181 | // Apply threshold 182 | cv::Mat mask; 183 | cv::threshold(grad, mask, mGradientThreshold, 255, cv::THRESH_BINARY); 184 | 185 | 186 | // Convert to CV_8U 187 | mask.convertTo(mask, CV_8U); 188 | 189 | 190 | // Reshape to 1D array and sort indices in decreasing order 191 | cv::Mat grad1d = grad.reshape(0, 1); 192 | cv::Mat sortedIndices; 193 | cv::sortIdx(grad1d, sortedIndices, cv::SORT_EVERY_ROW + cv::SORT_DESCENDING); 194 | 195 | // Number of points you want, make sure it's less than or equal to total number of pixels 196 | int N = std::min(1500, grad1d.cols); 197 | 198 | // Get the top N gradient points 199 | cv::Mat topNPoints = sortedIndices(cv::Range::all(), cv::Range(0, N)); 200 | 201 | // // Draw green circles on original image at the points with top gradients 202 | // for (int i = 0; i < N; ++i) { 203 | // int idx = topNPoints.at(0, i); 204 | // int x = idx / grad.cols; // Convert index back to 2D coordinates 205 | // int y = idx % grad.cols; 206 | // 207 | // if (mask.at(x, y) == 255) { 208 | // if (!IsMarginalPoint(y, x)) { 209 | // cv::circle(results, cv::Point(y, x), 1, cv::Scalar(0, 255, 0), -1); 210 | // mKeyPoints.insert(make_pair(double(y), double(x))); 211 | // } 212 | // } 213 | // } 214 | double x = 4.0; 215 | // Define the grid size based on image size and distance x 216 | int gridRows = grad.rows / x; 217 | int gridCols = grad.cols / x; 218 | 219 | // Create a grid of points initialized with (-1,-1) indicating no point and their gradient values 220 | std::vector>> grid(gridRows, 221 | std::vector>( 222 | gridCols, {{-1, -1}, -1.0f})); 223 | 224 | 225 | for (int i = 0; i < N; ++i) { 226 | int idx = topNPoints.at(0, i); 227 | int row = idx / grad.cols; // Convert index back to 2D coordinates 228 | int col = idx % grad.cols; 229 | 230 | int gridRow = row / x; 231 | int gridCol = col / x; 232 | 233 | // float gradientValue = grad1d.at(0, i); 234 | 235 | // Ensure grid indices are valid 236 | if (gridRow >= 0 && gridRow < gridRows && gridCol >= 0 && gridCol < gridCols) { 237 | 238 | float gradientValue = grad1d.at(0, i); 239 | 240 | // Ensure mask indices are valid 241 | if (row >= 0 && row < mask.rows && col >= 0 && col < mask.cols) { 242 | if (gradientValue > grid[gridRow][gridCol].second && 243 | mask.at(row, col) == 255 && !IsMarginalPoint(col, row)) { 244 | grid[gridRow][gridCol] = {{col, row}, gradientValue}; 245 | } 246 | } 247 | } 248 | 249 | } 250 | 251 | // Extract points from grid and draw them 252 | for (int i = 0; i < gridRows; ++i) { 253 | for (int j = 0; j < gridCols; ++j) { 254 | cv::Point pt = grid[i][j].first; 255 | if (pt.x != -1 && pt.y != -1) { 256 | cv::circle(results, pt, 1, cv::Scalar(0, 255, 0), -1); 257 | mKeyPoints.insert(make_pair(double(pt.x), double(pt.y))); 258 | } 259 | } 260 | } 261 | ROS_INFO_STREAM("KeyPoints: " << mKeyPoints.size() << endl); 262 | 263 | } 264 | 265 | bool Frame::IsMarginalPoint(double x, double y) 266 | { 267 | Eigen::Vector3d p_3d = sonar2Dto3D(Eigen::Vector2d(x, y), mTheta, mTx, mTy, mScale); 268 | //cartesian to polar 269 | double r = sqrt(p_3d.x() * p_3d.x() + p_3d.y() * p_3d.y()); 270 | double theta_ = atan2(p_3d.y(), p_3d.x()); 271 | if ((r < 0.3) || (r > mRange - 0.3)) { 272 | return true; 273 | } 274 | else if ((theta_ < -0.5 * mFOV + 0.05) || (theta_ > 0.5 * mFOV - 0.05)) { 275 | return true; 276 | } 277 | else { 278 | return false; 279 | } 280 | } 281 | 282 | Frame::Frame(const Frame &f) 283 | { 284 | mT_s0_si = f.mT_s0_si; 285 | mT_b0_bi = f.mT_b0_bi; 286 | mID = f.mID; 287 | mImg = f.mImg.clone(); 288 | mKeyPoints = f.mKeyPoints; 289 | mInliers = f.mInliers; 290 | mTimestamp = f.mTimestamp; 291 | mPyramidLayer = f.mPyramidLayer; 292 | mGradientThreshold = f.mGradientThreshold; 293 | mRange = f.mRange; 294 | mFOV = f.mFOV; 295 | mScale = f.mScale; 296 | mTheta = f.mTheta; 297 | mTx = f.mTx; 298 | mTy = f.mTy; 299 | for (int i = 0; i < mPyramidLayer; i++) { 300 | cv::Mat p_copy = f.mPyramid[i].clone(); 301 | mPyramid.push_back(p_copy.clone()); 302 | } 303 | mObservations_F2L = f.mObservations_F2L; 304 | mObservations_L2F = f.mObservations_L2F; 305 | } 306 | 307 | const Eigen::Isometry3d &Frame::GetPose() const 308 | { 309 | shared_lock lock(mFrameMutex); 310 | return mT_s0_si; 311 | } 312 | 313 | void Frame::SetPose(const Eigen::Isometry3d &T_w_sj) 314 | { 315 | unique_lock lock(mFrameMutex); 316 | mT_s0_si = T_w_sj; 317 | } 318 | 319 | const map, shared_ptr> &Frame::GetObservationsF2L() const 320 | { 321 | shared_lock lock(mFrameMutex); 322 | return mObservations_F2L; 323 | } 324 | 325 | void Frame::SetObservationsF2L(const map, shared_ptr> &ObservationsF2L) 326 | { 327 | unique_lock lock(mFrameMutex); 328 | mObservations_F2L = ObservationsF2L; 329 | } 330 | 331 | const map> &Frame::GetObservationsL2F() const 332 | { 333 | shared_lock lock(mFrameMutex); 334 | return mObservations_L2F; 335 | } 336 | 337 | void Frame::SetObservationsL2F(const map> &ObservationsL2F) 338 | { 339 | unique_lock lock(mFrameMutex); 340 | mObservations_L2F = ObservationsL2F; 341 | } 342 | 343 | void Frame::AddObservation(shared_ptr p_mp, const pair &key) 344 | { 345 | if (mObservations_L2F.count(p_mp->mID)) { 346 | // ROS_WARN_STREAM("Observation already exists. MapPoint" << p_mp->mID << " frame" << mID); 347 | // RemoveObservation(p_mp->mID); 348 | // unique_lock lock(mFrameMutex); 349 | // mObservations_L2F.insert(make_pair(p_mp->mID, key)); 350 | // mObservations_F2L.insert(make_pair(key, p_mp)); 351 | return; 352 | } 353 | unique_lock lock(mFrameMutex); 354 | mObservations_L2F.insert(make_pair(p_mp->mID, key)); 355 | mObservations_F2L.insert(make_pair(key, p_mp)); 356 | } 357 | 358 | void Frame::RemoveObservation(int mp_id) 359 | { 360 | unique_lock lock(mFrameMutex); 361 | if (mObservations_L2F.count(mp_id)) { 362 | if (mObservations_F2L.count(mObservations_L2F[mp_id])) { 363 | mObservations_F2L.erase(mObservations_L2F[mp_id]); 364 | } 365 | mObservations_L2F.erase(mp_id); 366 | } 367 | } 368 | 369 | const Eigen::Isometry3d &Frame::GetOdomPose() const 370 | { 371 | std::shared_lock lock(mFrameMutex); 372 | return mT_b0_bi; 373 | } 374 | 375 | void Frame::VisualizeHist(const cv::Mat mat) 376 | { 377 | // Assuming mat is a single-channel image. 378 | int histSize = 256; // For 8-bit image, you can adjust for other types 379 | float range[] = {0, 256}; // For 8-bit image 380 | const float* histRange = {range}; 381 | cv::Mat hist; 382 | cv::calcHist(&mat, 1, 0, cv::Mat(), hist, 1, &histSize, &histRange); 383 | 384 | int hist_w = 512; 385 | int hist_h = 400; 386 | int bin_w = cvRound((double) hist_w / histSize); 387 | cv::Mat histImage(hist_h, hist_w, CV_8UC3, cv::Scalar(0, 0, 0)); 388 | 389 | // Normalize the histogram to fit in the histImage. 390 | cv::normalize(hist, hist, 0, histImage.rows, cv::NORM_MINMAX, -1, cv::Mat()); 391 | 392 | for (int i = 1; i < histSize; i++) { 393 | cv::line(histImage, cv::Point(bin_w * (i - 1), hist_h - cvRound(hist.at(i - 1))), 394 | cv::Point(bin_w * i, hist_h - cvRound(hist.at(i))), cv::Scalar(255, 0, 0), 2, 8, 395 | 0); 396 | } 397 | 398 | int tickLength = 5; // Length of each tick, can be adjusted 399 | int tickInterval = 25; // Interval between ticks, can be adjusted 400 | for (int i = 0; i <= histSize; i += tickInterval) { 401 | cv::line(histImage, cv::Point(bin_w * i, hist_h), cv::Point(bin_w * i, hist_h - tickLength), 402 | cv::Scalar(255, 255, 255), 1, 8, 0); 403 | cv::putText(histImage, std::to_string(i), cv::Point(bin_w * i, hist_h - tickLength - 5), 404 | cv::FONT_HERSHEY_SIMPLEX, 0.4, cv::Scalar(255, 255, 255), 1, cv::LINE_AA); 405 | } 406 | 407 | 408 | cv::imshow("Histogram", histImage); 409 | cv::waitKey(1); 410 | 411 | 412 | } 413 | -------------------------------------------------------------------------------- /src/Frame.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #ifndef SRC_FRAME_H 6 | #define SRC_FRAME_H 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | using namespace std; 18 | class MapPoint; 19 | class Frame 20 | { 21 | public: 22 | static int mFrameNum; 23 | int mID; 24 | void ComputePyramid(const cv::Mat &img, vector &img_pyramid_out, int layer = 5); 25 | void DetectKeyPoints(); 26 | bool IsMarginalPoint(double x, double y); 27 | void AddObservation(shared_ptr p_mp, const pair& key); 28 | void RemoveObservation(int mp_id); 29 | void VisualizeHist(const cv::Mat mat); 30 | //init from sonar image 31 | Frame(const cv::Mat &img, double timestamp, double range, double FOV, int layer = 5, double gradient_threshold = 0.1); 32 | Frame(const cv::Mat &img, double timestamp, double range, double FOV, Eigen::Isometry3d T_b0_bi, int layer = 5, double gradient_threshold = 0.1); 33 | Frame(const Frame& f); 34 | public: 35 | // sonar img 36 | cv::Mat mImg; 37 | //image pyramid 38 | vector mPyramid; 39 | int mPyramidLayer; 40 | //timestamp 41 | double mTimestamp; 42 | 43 | //key points 44 | set> mKeyPoints; 45 | set> mInliers; 46 | double mGradientThreshold; 47 | 48 | //Sonar Parameter 49 | double mRange,mTx,mTy,mTheta,mScale,mFOV; 50 | 51 | private: 52 | // frame pose 53 | // protected by mFrameMutex 54 | Eigen::Isometry3d mT_s0_si; 55 | Eigen::Isometry3d mT_b0_bi; 56 | public: 57 | const Eigen::Isometry3d &GetPose() const; 58 | const Eigen::Isometry3d &GetOdomPose() const; 59 | 60 | void SetPose(const Eigen::Isometry3d &T_w_sj); 61 | 62 | const map, shared_ptr> &GetObservationsF2L() const; 63 | 64 | void SetObservationsF2L(const map, shared_ptr> &ObservationsF2L); 65 | 66 | const map> &GetObservationsL2F() const; 67 | 68 | void SetObservationsL2F(const map> &ObservationsL2F); 69 | 70 | private: 71 | //observation Pixel_Pos -> MapPoint pointer, Frame to Landmark 72 | // protected by mFrameMutex 73 | map, shared_ptr> mObservations_F2L; 74 | //observation landmark ID -> Pixel_Pos, Landmark to Frame 75 | // protected by mFrameMutex 76 | map> mObservations_L2F; 77 | 78 | //mutex 79 | mutable shared_mutex mFrameMutex; 80 | 81 | 82 | }; 83 | Eigen::Vector3d 84 | sonar2Dto3D(const Eigen::Vector2d &p_pixel, double theta, double tx, double ty, double scale); 85 | Eigen::Vector2d 86 | sonar3Dto2D(const Eigen::Vector3d &p_sonar3D, double theta, double tx, double ty, double scale); 87 | double getPixelValue(const cv::Mat &img, double x, double y); 88 | double getPatternPixelValue(const cv::Mat &img, double x, double y); 89 | 90 | #endif //SRC_FRAME_H 91 | -------------------------------------------------------------------------------- /src/LocalMapping.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 09/08/23. 3 | // 4 | 5 | #include "LocalMapping.h" 6 | #include "nanoflann.hpp" 7 | #include "Track.h" 8 | #include "Frame.h" 9 | #include "MapPoint.h" 10 | #include "OptimizationType.h" 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | 23 | using namespace std; 24 | using namespace nanoflann; 25 | 26 | LocalMapping::LocalMapping(shared_ptr pTracker) : mpTracker(pTracker) 27 | { 28 | ros::NodeHandle nh; 29 | mMarkerPub = nh.advertise("/direct_sonar/visualization_marker", 10); 30 | mPointCloudPub = nh.advertise("/direct_sonar/point_cloud", 10); 31 | mSaveService = nh.advertiseService("/direct_sonar/save_map", &LocalMapping::SaveMapCallback, this); 32 | mT_bw_b0.setIdentity(); 33 | }; 34 | 35 | 36 | LocalMapping::LocalMapping(shared_ptr pTracker, Eigen::Isometry3d T_bw_b0):mT_bw_b0(T_bw_b0) 37 | { 38 | ros::NodeHandle nh; 39 | mPointCloudPub = nh.advertise("/direct_sonar/point_cloud", 10); 40 | mMarkerPub = nh.advertise("/direct_sonar/visualization_marker", 10); 41 | mSaveService = nh.advertiseService("/direct_sonar/save_map", &LocalMapping::SaveMapCallback, this); 42 | }; 43 | 44 | 45 | 46 | void LocalMapping::Run() 47 | { 48 | while (1) { 49 | if (!mProcessingQueue.empty()) { 50 | ProcessKeyFrame(); 51 | if (mActiveFrameWindow.size() > 1) { 52 | OptimizeWindow(); 53 | Visualize(); 54 | NotifyTracker(); 55 | } 56 | } 57 | else { 58 | this_thread::sleep_for(chrono::milliseconds(10)); 59 | } 60 | } 61 | } 62 | 63 | void LocalMapping::NotifyTracker() 64 | { 65 | shared_ptr state = make_shared(mpTracker); 66 | mpTracker->SetState(state); 67 | } 68 | 69 | void LocalMapping::InsertKeyFrame(shared_ptr pF) 70 | { 71 | unique_lock lock(mProcessingQueueMutex); 72 | mProcessingQueue.push_back(pF); 73 | } 74 | 75 | void LocalMapping::ProcessKeyFrame() 76 | { 77 | unique_lock lock(mProcessingQueueMutex); 78 | if (mProcessingQueue.empty()) { 79 | return; 80 | } 81 | unique_lock lock2(mWindowMutex); 82 | auto pF_new = mProcessingQueue.front(); 83 | // build more data association 84 | set> inliers_last, inliers_cur; 85 | map, pair> association; 86 | for (auto rit = mActiveFrameWindow.rbegin(); rit != mActiveFrameWindow.rend(); ++rit) { 87 | shared_ptr pF = *rit; 88 | // PredictCurrentPose(pF, mpCurrentFrame); 89 | PoseEstimationWindow2Frame(pF, pF_new, inliers_last, inliers_cur, association, mpTracker->mRange, 90 | mpTracker->mFOV); 91 | // PoseEstimationWindow2FramePyramid(pF, mpCurrentFrame, inliers_last, inliers_cur, association); 92 | pF->mInliers = inliers_last; 93 | pF_new->mInliers = inliers_cur; 94 | BuildAssociation(pF, pF_new, association, mpTracker->mGradientInlierThreshold); 95 | } 96 | 97 | 98 | mActiveFrameWindow.push_back(pF_new); 99 | if (mActiveFrameWindow.size() > mWindowSize) { 100 | mActiveFrameWindow.pop_front(); 101 | } 102 | IncreaseMapPoints(pF_new); 103 | mProcessingQueue.pop_front(); 104 | } 105 | 106 | void LocalMapping::OptimizeWindow() 107 | { 108 | // unique_lock lock2(mpTracker->mTrackMutex); 109 | int max_frame_id = Frame::mFrameNum; 110 | g2o::SparseOptimizer optimizer; 111 | g2o::BlockSolver_6_3::LinearSolverType* linearSolver; 112 | 113 | linearSolver = new g2o::LinearSolverEigen(); 114 | 115 | g2o::BlockSolver_6_3* solver_ptr = new g2o::BlockSolver_6_3(linearSolver); 116 | g2o::OptimizationAlgorithmLevenberg* solver = new g2o::OptimizationAlgorithmLevenberg(solver_ptr); 117 | optimizer.setAlgorithm(solver); 118 | // optimizer.setVerbose(true); 119 | 120 | //add frame vertex 121 | map, VertexSonarPose*> all_frame_vertex; 122 | map, VertexSonarPoint*> all_landmark_vertex; 123 | vector odom_edges; 124 | vector sonar_edges; 125 | { 126 | unique_lock lock(mWindowMutex); 127 | for (auto rit = mActiveFrameWindow.rbegin(); rit != mActiveFrameWindow.rend(); ++rit) { 128 | // for (auto pF: mActiveFrameWindow) { 129 | auto pF = *rit; 130 | Eigen::Isometry3d T_w_si = pF->GetPose(); 131 | Eigen::Isometry3d T_si_w = T_w_si.inverse(); 132 | g2o::SE3Quat est(T_si_w.rotation(), T_si_w.translation()); 133 | VertexSonarPose* vSE3 = new VertexSonarPose(); 134 | vSE3->setId(pF->mID); 135 | vSE3->setEstimate(T_w_si); 136 | optimizer.addVertex(vSE3); 137 | if (rit == mActiveFrameWindow.rend() - 1) { 138 | vSE3->setFixed(true); 139 | ROS_INFO_STREAM("Fix ID:" << pF->mID); 140 | } 141 | all_frame_vertex.insert(make_pair(pF, vSE3)); 142 | 143 | for (auto obs: pF->GetObservationsF2L()) { 144 | //add landmark vertex 145 | shared_ptr pMP = obs.second; 146 | if (all_landmark_vertex.count(pMP) == 0) { 147 | VertexSonarPoint* vPoint = new VertexSonarPoint(); 148 | vPoint->setId(max_frame_id + 1 + pMP->mID); 149 | vPoint->setEstimate(pMP->getPosition()); 150 | vPoint->setMarginalized(true); 151 | if (rit == mActiveFrameWindow.rend() - 1) { 152 | vPoint->setFixed(true); 153 | // ROS_INFO_STREAM("Fix ID:" << pF->mID); 154 | } 155 | optimizer.addVertex(vPoint); 156 | all_landmark_vertex.insert(make_pair(pMP, vPoint)); 157 | } 158 | 159 | //add frame2landmark edge 160 | Eigen::Vector2d p_pixel_meas(obs.first.first, obs.first.second);//pixel measurement 161 | EdgeSE3Sonar* e = new EdgeSE3Sonar(p_pixel_meas, pF->mTheta, pF->mTx, pF->mTy, 162 | pF->mScale); 163 | e->setVertex(0, vSE3); 164 | e->setVertex(1, all_landmark_vertex[pMP]); 165 | e->setInformation(Eigen::Matrix2d::Identity()); 166 | g2o::RobustKernelHuber* rk = new g2o::RobustKernelHuber; 167 | rk->setDelta(5.0); 168 | // e->setRobustKernel(rk); 169 | optimizer.addEdge(e); 170 | sonar_edges.push_back(e); 171 | } 172 | } 173 | Eigen::Isometry3d T_b_s = mpTracker->mT_b_s; 174 | for (auto rit = mActiveFrameWindow.rbegin(); rit != mActiveFrameWindow.rend() - 1; ++rit) { 175 | auto pF = *rit; 176 | auto pF_pre = *(rit + 1); 177 | if (pF && pF_pre) { 178 | Eigen::Isometry3d T_b0_bj = pF->GetOdomPose(); 179 | // Eigen::Isometry3d T_bj_b0 = T_b0_bj.inverse(); 180 | Eigen::Isometry3d T_b0_bi = pF_pre->GetOdomPose(); 181 | Eigen::Isometry3d T_bi_b0 = T_b0_bi.inverse(); 182 | Eigen::Isometry3d T_si_sj = T_b_s.inverse() * T_bi_b0 * T_b0_bj * T_b_s; 183 | EdgeSE3Odom* e_odom = new EdgeSE3Odom(T_si_sj); 184 | e_odom->setVertex(0, dynamic_cast(optimizer.vertex( 185 | pF_pre->mID))); 186 | e_odom->setVertex(1, dynamic_cast(optimizer.vertex( 187 | pF->mID))); 188 | Eigen::Matrix info = Eigen::Matrix::Identity(); 189 | info(0,0) = 1; 190 | info(1,1) = 1; 191 | info(2,2) = 200000; 192 | info(3,3) = 100; 193 | info(4,4) = 100; 194 | info(5,5) = 1; 195 | e_odom->setInformation(info); 196 | e_odom->computeError(); 197 | optimizer.addEdge(e_odom); 198 | // ROS_INFO_STREAM("add odom edge: " << pF_pre->mID << " -> " << pF->mID << " error: " 199 | // << e_odom->error().transpose() 200 | // << "relative odom pose:\n" << T_si_sj.matrix()); 201 | odom_edges.push_back(e_odom); 202 | 203 | } 204 | 205 | // Eigen::Isometry3d T_b_s = mpTracker->mT_b_s; 206 | // Eigen::Isometry3d T_bi_bj = T_b_s * T_si_sj * T_b_s.inverse(); 207 | } 208 | } 209 | 210 | double sonar_chi2 =0, odom_chi2 = 0; 211 | for(auto e:sonar_edges){ 212 | e->computeError(); 213 | sonar_chi2+=e->chi2(); 214 | } 215 | for(auto e:odom_edges){ 216 | e->computeError(); 217 | odom_chi2+=e->chi2(); 218 | } 219 | ROS_INFO_STREAM("sonar chi2: " << sonar_chi2 << " odom chi2: " << odom_chi2); 220 | 221 | 222 | optimizer.initializeOptimization(0); 223 | optimizer.optimize(50); 224 | 225 | //recover optimized data 226 | { 227 | unique_lock lock(mWindowMutex); 228 | for (auto pF_pVF: all_frame_vertex) { 229 | auto pF = pF_pVF.first; 230 | Eigen::Isometry3d T_s0_si = pF_pVF.second->estimate(); 231 | // Eigen::Isometry3d T_s0_si = T_si_w.inverse(); 232 | pF->SetPose(T_s0_si); 233 | // ROS_INFO_STREAM("Frame: " << pF->mID << " pose:\n" << T_s0_si.matrix()); 234 | 235 | } 236 | for (auto pMP_pVP: all_landmark_vertex) { 237 | auto pMP = pMP_pVP.first; 238 | Eigen::Vector3d pos = pMP_pVP.second->estimate(); 239 | // pos(2) = 0; 240 | pMP->setPosition(pos); 241 | } 242 | } 243 | 244 | } 245 | 246 | shared_ptr LocalMapping::GetLastFrameInWindow() 247 | { 248 | shared_lock lock(mWindowMutex); 249 | return mActiveFrameWindow.back(); 250 | } 251 | 252 | deque> LocalMapping::GetWindow() 253 | { 254 | shared_lock lock(mWindowMutex); 255 | return mActiveFrameWindow; 256 | } 257 | 258 | void LocalMapping::Visualize() 259 | { 260 | visualization_msgs::MarkerArray all_markers; 261 | 262 | 263 | //clear marker 264 | // visualization_msgs::Marker frame_marker; 265 | // frame_marker.header.frame_id = "map"; 266 | // frame_marker.header.stamp = ros::Time::now(); 267 | // frame_marker.ns = "frame"; 268 | // frame_marker.id = 0; 269 | // frame_marker.type = visualization_msgs::Marker::CUBE; 270 | // frame_marker.action = visualization_msgs::Marker::DELETEALL; 271 | // frame_marker.pose.position.x = 0; 272 | // frame_marker.pose.position.y = 0; 273 | // frame_marker.pose.position.z = 0; 274 | // frame_marker.pose.orientation.x = 0; 275 | // frame_marker.pose.orientation.y = 0; 276 | // frame_marker.pose.orientation.z = 0; 277 | // frame_marker.pose.orientation.w = 1; 278 | // frame_marker.scale.x = 0.2; 279 | // frame_marker.scale.y = 2; 280 | // frame_marker.scale.z = 1; 281 | // frame_marker.color.a = 1.0; 282 | // frame_marker.color.r = 0.0; 283 | // frame_marker.color.g = 1.0; 284 | // frame_marker.color.b = 0.0; 285 | // all_markers.markers.push_back(frame_marker); 286 | 287 | 288 | 289 | //visualize landmark and data association 290 | visualization_msgs::Marker line_marker; 291 | line_marker.header.frame_id = "map"; 292 | line_marker.header.stamp = ros::Time::now(); 293 | line_marker.ns = "line"; 294 | line_marker.id = 0; 295 | line_marker.type = visualization_msgs::Marker::LINE_LIST; 296 | line_marker.action = visualization_msgs::Marker::ADD; 297 | line_marker.scale.x = 0.01; 298 | line_marker.scale.y = 0.01; 299 | line_marker.scale.z = 0.01; 300 | line_marker.color.r = 0.0; 301 | line_marker.color.g = 0.0; 302 | line_marker.color.b = 1.0; 303 | line_marker.color.a = 0.1; 304 | 305 | 306 | std::set point_id; 307 | 308 | pcl::PointCloud cloud; 309 | 310 | // visualize frame in the window 311 | Eigen::Isometry3d T_b_s = mpTracker->mT_b_s; 312 | auto pF_first = *(mActiveFrameWindow.rend()-1); 313 | for (auto pF: mActiveFrameWindow) { 314 | Eigen::Isometry3d T_s0_si = pF->GetPose(); 315 | Eigen::Isometry3d T_b0_bi = T_b_s * T_s0_si * T_b_s.inverse(); 316 | Eigen::Quaterniond q_b0_bi(T_b0_bi.rotation()); 317 | visualization_msgs::Marker frame_marker; 318 | frame_marker.header.frame_id = "map"; 319 | frame_marker.header.stamp = ros::Time::now(); 320 | frame_marker.ns = "frame"; 321 | frame_marker.id = pF->mID; 322 | frame_marker.type = visualization_msgs::Marker::CUBE; 323 | frame_marker.action = visualization_msgs::Marker::ADD; 324 | frame_marker.pose.position.x = T_b0_bi.translation().x(); 325 | frame_marker.pose.position.y = T_b0_bi.translation().y(); 326 | frame_marker.pose.position.z = T_b0_bi.translation().z(); 327 | frame_marker.pose.orientation.x = q_b0_bi.x(); 328 | frame_marker.pose.orientation.y = q_b0_bi.y(); 329 | frame_marker.pose.orientation.z = q_b0_bi.z(); 330 | frame_marker.pose.orientation.w = q_b0_bi.w(); 331 | frame_marker.scale.x = 0.4; 332 | frame_marker.scale.y = 0.4; 333 | frame_marker.scale.z = 0.4; 334 | frame_marker.color.a = 0.7; 335 | if(pF->mID==pF_first->mID){ 336 | frame_marker.color.r = 1.0; 337 | frame_marker.color.g = 0.1; 338 | frame_marker.color.b = 0.1; 339 | } 340 | else{ 341 | frame_marker.color.r = 0.0; 342 | frame_marker.color.g = 0.0; 343 | frame_marker.color.b = 1.0; 344 | } 345 | // frame_marker.color.r = 0.0; 346 | // frame_marker.color.g = 1.0; 347 | // frame_marker.color.b = 0.0; 348 | all_markers.markers.push_back(frame_marker); 349 | 350 | 351 | auto obs = pF->GetObservationsF2L(); 352 | for (auto ob: obs) { 353 | shared_ptr pMP = ob.second; 354 | int obs_num = pMP->getObservationNum(); 355 | Eigen::Vector3d pos = pMP->getPosition(); 356 | visualization_msgs::Marker landmark_marker; 357 | landmark_marker.header.frame_id = "map"; 358 | landmark_marker.header.stamp = ros::Time::now(); 359 | landmark_marker.ns = "landmark"; 360 | landmark_marker.id = pMP->mID; 361 | landmark_marker.type = visualization_msgs::Marker::SPHERE; 362 | landmark_marker.action = visualization_msgs::Marker::ADD; 363 | landmark_marker.pose.position.x = pos.x(); 364 | landmark_marker.pose.position.y = pos.y(); 365 | landmark_marker.pose.position.z = pos.z(); 366 | landmark_marker.scale.x = 0.3; 367 | landmark_marker.scale.y = 0.3; 368 | landmark_marker.scale.z = 0.3; 369 | landmark_marker.color.a = 1.0; 370 | if (obs_num > 5) { 371 | landmark_marker.color.r = 0.0; 372 | landmark_marker.color.g = 1.0; 373 | landmark_marker.color.b = 0.0; 374 | 375 | all_markers.markers.push_back(landmark_marker); 376 | 377 | line_marker.points.push_back(frame_marker.pose.position); 378 | line_marker.points.push_back(landmark_marker.pose.position); 379 | if(point_id.count(pMP->mID)==0){ 380 | pcl::PointXYZ point; 381 | point.x = pos(0); 382 | point.y = pos(1); 383 | point.z = pos(2); 384 | cloud.points.push_back(point); 385 | point_id.insert(pMP->mID); 386 | } 387 | if(mActiveMapPoints.count(pMP->mID)==0){ 388 | mActiveMapPoints.insert(make_pair(pMP->mID,pMP)); 389 | } 390 | } 391 | // else { 392 | // landmark_marker.color.r = 1.0; 393 | // landmark_marker.color.g = 0.0; 394 | // landmark_marker.color.b = 0.0; 395 | // } 396 | 397 | 398 | } 399 | } 400 | all_markers.markers.push_back(line_marker); 401 | 402 | mMarkerPub.publish(all_markers); 403 | 404 | ROS_INFO_STREAM("PointCloud Size: "<::Ptr transformed_cloud (new pcl::PointCloud); 407 | Eigen::Isometry3d T_bw_s0 = mT_bw_b0 * mpTracker->mT_b_s; 408 | pcl::transformPointCloud(cloud,*transformed_cloud,T_bw_s0.matrix().cast()); 409 | 410 | 411 | sensor_msgs::PointCloud2 pc_msg; 412 | pcl::toROSMsg(*transformed_cloud, pc_msg); 413 | pc_msg.header.frame_id = "odom"; 414 | 415 | pc_msg.header.stamp = line_marker.header.stamp; 416 | mPointCloudPub.publish(pc_msg); 417 | 418 | 419 | } 420 | 421 | void LocalMapping::Reset() 422 | { 423 | unique_lock lock(mWindowMutex); 424 | unique_lock lock2(mProcessingQueueMutex); 425 | mActiveFrameWindow.clear(); 426 | mProcessingQueue.clear(); 427 | } 428 | 429 | void LocalMapping::PoseEstimationWindow2Frame(shared_ptr pF_pre, shared_ptr pF, 430 | set> &inliers_last, 431 | set> &inliers_current, 432 | map, pair> &association, 433 | double range, double fov) 434 | { 435 | //convert rgb to gray 436 | cv::Mat pre_gray; 437 | cv::cvtColor(pF_pre->mImg, pre_gray, cv::COLOR_BGR2GRAY); 438 | cv::Mat cur_gray; 439 | cv::cvtColor(pF->mImg, cur_gray, cv::COLOR_BGR2GRAY); 440 | //init pose 441 | Eigen::Isometry3d T_w_si = pF_pre->GetPose(); 442 | Eigen::Isometry3d T_w_sj = pF->GetPose(); 443 | Eigen::Isometry3d T_sj_si = T_w_sj.inverse() * T_w_si; 444 | 445 | double scale = (cur_gray.rows) / range; 446 | double theta = -0.5 * M_PI; 447 | double tx = 0.5 * cur_gray.cols; 448 | double ty = cur_gray.rows; 449 | 450 | // setup g2o 451 | typedef g2o::BlockSolver> DirectBlock; 452 | DirectBlock::LinearSolverType* linearSolver = new g2o::LinearSolverDense(); 453 | DirectBlock* solver_ptr = new DirectBlock(linearSolver); 454 | // g2o::OptimizationAlgorithmGaussNewton* solver = new g2o::OptimizationAlgorithmGaussNewton( solver_ptr ); // G-N 455 | g2o::OptimizationAlgorithmLevenberg* solver = new g2o::OptimizationAlgorithmLevenberg( 456 | solver_ptr); // L-M 457 | g2o::SparseOptimizer optimizer; 458 | optimizer.setAlgorithm(solver); 459 | optimizer.setVerbose(false); 460 | 461 | g2o::VertexSE3Expmap* pose = new g2o::VertexSE3Expmap(); 462 | pose->setEstimate(g2o::SE3Quat(T_sj_si.rotation(), T_sj_si.translation())); 463 | pose->setId(0); 464 | optimizer.addVertex(pose); 465 | vector all_edges; 466 | int edge_id = 0; 467 | 468 | map> id_pixel; 469 | auto obs = pF_pre->GetObservationsF2L(); 470 | for (auto it: obs) { 471 | Eigen::Vector2d p_pixel(it.first.first, it.first.second); 472 | Eigen::Vector3d p3d = sonar2Dto3D(p_pixel, theta, tx, ty, scale); 473 | // double grayscale = getPixelValue(pre_gray, p_pixel.x(), p_pixel.y()); 474 | double grayscale = getPatternPixelValue(pre_gray, p_pixel.x(), p_pixel.y()); 475 | 476 | id_pixel.insert(make_pair(edge_id, it.first)); 477 | 478 | EdgeSE3SonarDirect* edge = new EdgeSE3SonarDirect(p3d, theta, tx, ty, scale, range, fov, 479 | cur_gray); 480 | edge->setVertex(0, pose); 481 | edge->setMeasurement(grayscale); 482 | edge->setInformation(Eigen::Matrix::Identity()); 483 | edge->setId(edge_id++); 484 | //set robust kernel 485 | g2o::RobustKernelHuber* rk = new g2o::RobustKernelHuber; 486 | rk->setDelta(10.0); 487 | // edge->setRobustKernel(rk); 488 | optimizer.addEdge(edge); 489 | all_edges.push_back(edge); 490 | } 491 | // cout << "edges in graph: " << optimizer.edges().size() << endl; 492 | // ROS_INFO_STREAM("edges in graph: " << optimizer.edges().size()); 493 | 494 | optimizer.initializeOptimization(); 495 | optimizer.optimize(100); 496 | 497 | 498 | Eigen::Isometry3d T = pose->estimate(); 499 | T_sj_si = T; 500 | 501 | 502 | 503 | 504 | // cout << "Tcw=\n" << T sj_si.matrix() << endl; 505 | //convert to angle axis 506 | // Eigen::AngleAxisd a(T sj_si.rotation()); 507 | // cout << "rotation axis: " << a.axis().transpose() << " angle: " << a.angle() << endl; 508 | 509 | 510 | //save edge chi2 to file 511 | ofstream f_chi2; 512 | f_chi2.open("/home/da/project/ros/direct_sonar_ws/src/direct_sonar_odometry/debug/opt.txt", 513 | ios::out); 514 | 515 | for (auto e: all_edges) { 516 | // e->computeError(); 517 | f_chi2 << "edge[" << e->id() << "]: " << e->error().norm() << endl; 518 | if (e->error().norm() > mpTracker->mGradientInlierThreshold * 0.5 || (e->error().norm() == 0)) { 519 | //remove from id_pixel 520 | id_pixel.erase(e->id()); 521 | } 522 | } 523 | // ROS_INFO_STREAM("inlier number: " << id_pixel.size()); 524 | f_chi2.close(); 525 | 526 | inliers_last.clear(); 527 | for (auto it: id_pixel) { 528 | inliers_last.insert(it.second); 529 | } 530 | 531 | inliers_current.clear(); 532 | association.clear(); 533 | for (auto it: inliers_last) { 534 | Eigen::Vector2d p_pixel(it.first, it.second); 535 | Eigen::Vector3d p = sonar2Dto3D(p_pixel, theta, tx, ty, scale); 536 | Eigen::Vector3d p2 = T_sj_si * p; 537 | Eigen::Vector2d pixel_now = sonar3Dto2D(p2, theta, tx, ty, scale); 538 | inliers_current.insert(make_pair(pixel_now.x(), pixel_now.y())); 539 | association.insert(make_pair(it, make_pair(pixel_now.x(), pixel_now.y()))); 540 | } 541 | 542 | //set pose 543 | if(association.size()>mpTracker->mLossThreshold){ 544 | T_w_si = pF_pre->GetPose(); 545 | T_w_sj = T_w_si * T_sj_si.inverse(); 546 | pF->SetPose(T_w_sj); 547 | } 548 | 549 | 550 | // plot the feature points 551 | // cv::Mat img_show(pF->mImg.rows * 2, pF->mImg.cols, CV_8UC3); 552 | // pF_pre->mImg.copyTo(img_show(cv::Rect(0, 0, pF->mImg.cols, pF->mImg.rows))); 553 | // pF->mImg.copyTo(img_show(cv::Rect(0, pF->mImg.rows, pF->mImg.cols, pF->mImg.rows))); 554 | // 555 | // // pF_pre->mImg.copyTo(img_show(cv::Rect(pF->mImg.cols, 0, pF->mImg.cols, pF->mImg.rows))); 556 | // // pF->mImg.copyTo(img_show(cv::Rect(pF->mImg.cols, pF->mImg.rows, pF->mImg.cols, pF->mImg.rows))); 557 | // 558 | // for (auto it: inliers_last) { 559 | // Eigen::Vector2d p_pixel(it.first, it.second); 560 | // Eigen::Vector3d p = sonar2Dto3D(p_pixel, theta, tx, ty, scale); 561 | // // Eigen::Vector3d p = m.pos_world; 562 | // Eigen::Vector2d pixel_prev = sonar3Dto2D(p, theta, tx, ty, scale); 563 | // Eigen::Vector3d p2 = T_sj_si * p; 564 | // Eigen::Vector2d pixel_now = sonar3Dto2D(p2, theta, tx, ty, scale); 565 | // if (pixel_now(0, 0) < 0 || pixel_now(0, 0) >= pF->mImg.cols || pixel_now(1, 0) < 0 || 566 | // pixel_now(1, 0) >= pF->mImg.rows) { 567 | // continue; 568 | // } 569 | // 570 | // float b = 0; 571 | // float g = 250; 572 | // float r = 0; 573 | // img_show.ptr(pixel_prev(1, 0))[int(pixel_prev(0, 0)) * 3] = b; 574 | // img_show.ptr(pixel_prev(1, 0))[int(pixel_prev(0, 0)) * 3 + 1] = g; 575 | // img_show.ptr(pixel_prev(1, 0))[int(pixel_prev(0, 0)) * 3 + 2] = r; 576 | // 577 | // img_show.ptr(pixel_now(1, 0) + pF->mImg.rows)[int(pixel_now(0, 0)) * 3] = b; 578 | // img_show.ptr(pixel_now(1, 0) + pF->mImg.rows)[int(pixel_now(0, 0)) * 3 + 1] = g; 579 | // img_show.ptr(pixel_now(1, 0) + pF->mImg.rows)[int(pixel_now(0, 0)) * 3 + 2] = r; 580 | // cv::circle(img_show, cv::Point2d(pixel_prev(0, 0), pixel_prev(1, 0)), 2, cv::Scalar(b, g, r), 1); 581 | // cv::circle(img_show, cv::Point2d(pixel_now(0, 0), pixel_now(1, 0) + pF->mImg.rows), 2, 582 | // cv::Scalar(b, g, r), 1); 583 | // 584 | // } 585 | // //get ros time 586 | // auto t = ros::Time::now(); 587 | // stringstream ss; 588 | // ss << "/home/da/project/ros/direct_sonar_ws/src/direct_sonar_odometry/results/WINDOW_" << pF->mID 589 | // << "_" << pF_pre->mID << ".png"; 590 | // // cv::imshow("result", img_show); 591 | // // cv::waitKey(0); 592 | // cv::imwrite(ss.str(), img_show); 593 | 594 | 595 | } 596 | 597 | bool LocalMapping::SaveMapCallback(std_srvs::EmptyRequest &req, std_srvs::EmptyResponse &res) 598 | { 599 | pcl::PointCloud cloud; 600 | for(auto it: mActiveMapPoints){ 601 | auto pos = it.second->getPosition(); 602 | pcl::PointXYZ p(pos.x(),pos.y(),pos.z()); 603 | cloud.points.push_back(p); 604 | } 605 | ROS_INFO_STREAM("PointCloud Size: "<::Ptr transformed_cloud (new pcl::PointCloud); 608 | Eigen::Isometry3d T_bw_s0 = mT_bw_b0 * mpTracker->mT_b_s; 609 | pcl::transformPointCloud(cloud,*transformed_cloud,T_bw_s0.matrix().cast()); 610 | 611 | 612 | sensor_msgs::PointCloud2 pc_msg; 613 | pcl::toROSMsg(*transformed_cloud, pc_msg); 614 | pc_msg.header.frame_id = "map"; 615 | 616 | pc_msg.header.stamp = ros::Time::now(); 617 | mPointCloudPub.publish(pc_msg); 618 | return true; 619 | } 620 | 621 | void LocalMapping::PubMap() 622 | { 623 | pcl::PointCloud cloud; 624 | for(auto it: mActiveMapPoints){ 625 | auto pos = it.second->getPosition(); 626 | pcl::PointXYZ p(pos.x(),pos.y(),pos.z()); 627 | cloud.points.push_back(p); 628 | } 629 | ROS_INFO_STREAM("PointCloud Size: "<::Ptr transformed_cloud (new pcl::PointCloud); 632 | Eigen::Isometry3d T_bw_s0 = mT_bw_b0 * mpTracker->mT_b_s; 633 | pcl::transformPointCloud(cloud,*transformed_cloud,T_bw_s0.matrix().cast()); 634 | 635 | 636 | sensor_msgs::PointCloud2 pc_msg; 637 | pcl::toROSMsg(*transformed_cloud, pc_msg); 638 | pc_msg.header.frame_id = "map"; 639 | 640 | pc_msg.header.stamp = ros::Time::now(); 641 | mPointCloudPub.publish(pc_msg); 642 | // return true; 643 | } 644 | -------------------------------------------------------------------------------- /src/LocalMapping.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 09/08/23. 3 | // 4 | 5 | #ifndef SRC_LOCALMAPPING_H 6 | #define SRC_LOCALMAPPING_H 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | 18 | using namespace std; 19 | class Frame; 20 | 21 | class MapPoint; 22 | 23 | class Track; 24 | 25 | class LocalMapping; 26 | 27 | // class LocalMapState{ 28 | // protected: 29 | // shared_ptr mpLocalMaper; 30 | // public: 31 | // LocalMapState(shared_ptr pLocalMaper):mpLocalMaper(pLocalMaper){}; 32 | // virtual void Update() = 0; 33 | // }; 34 | // 35 | // class LocalMapToUpdate : public LocalMapState{ 36 | // public: 37 | // LocalMapToUpdate(shared_ptr pLocalMaper):LocalMapState(pLocalMaper){}; 38 | // void Update() override; 39 | // }; 40 | // class LocalMapUpToDate : public LocalMapState{ 41 | // public: 42 | // LocalMapUpToDate(shared_ptr pLocalMaper):LocalMapState(pLocalMaper){}; 43 | // void Update() override; 44 | // }; 45 | 46 | class LocalMapping : public enable_shared_from_this 47 | { 48 | public: 49 | LocalMapping(shared_ptr pTracker); 50 | LocalMapping(shared_ptr pTracker, Eigen::Isometry3d T_bw_b0); 51 | void Run(); 52 | void NotifyTracker(); 53 | // void SetState(shared_ptr pState); 54 | void InsertKeyFrame(shared_ptr pF); 55 | void ProcessKeyFrame(); 56 | void OptimizeWindow(); 57 | void PoseEstimationWindow2Frame(shared_ptr pF_pre, shared_ptr pF, 58 | set> &inliers_last, 59 | set> &inliers_current, 60 | map, pair> &association, double range, double fov); 61 | void Visualize(); 62 | shared_ptr GetLastFrameInWindow(); 63 | deque> GetWindow(); 64 | void Reset(); 65 | bool SaveMapCallback(std_srvs::EmptyRequest &req, std_srvs::EmptyResponse &res); 66 | void PubMap(); 67 | 68 | 69 | protected: 70 | shared_ptr mpTracker; 71 | // shared_ptr mState; 72 | 73 | // protected by mWindowMutex 74 | shared_mutex mWindowMutex; 75 | int mWindowSize = 10; 76 | deque> mActiveFrameWindow; 77 | 78 | //Frame to process, protected by mProcessingQueueMutex 79 | shared_mutex mProcessingQueueMutex; 80 | deque> mProcessingQueue; 81 | 82 | ros::Publisher mMarkerPub; 83 | ros::Publisher mPointCloudPub; 84 | std::map> mActiveMapPoints; 85 | //ros save service 86 | ros::ServiceServer mSaveService; 87 | public: 88 | Eigen::Isometry3d mT_bw_b0; 89 | 90 | }; 91 | 92 | 93 | #endif //SRC_LOCALMAPPING_H 94 | -------------------------------------------------------------------------------- /src/MapPoint.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #include "MapPoint.h" 6 | #include "Frame.h" 7 | #include 8 | 9 | int MapPoint::mMapPointNumber = 0; 10 | 11 | MapPoint::MapPoint(const Eigen::Vector3d &p_w) 12 | { 13 | mPw = p_w; 14 | mID = mMapPointNumber++; 15 | mObservationNum = 0; 16 | } 17 | 18 | MapPoint::MapPoint(const MapPoint &p) 19 | { 20 | mPw = p.mPw; 21 | mID = p.mID; 22 | mObservations = p.mObservations; 23 | mObservationNum = p.mObservationNum; 24 | } 25 | 26 | void MapPoint::AddObservation(shared_ptr pF) 27 | { 28 | if (mObservations.count(pF->mID)) { 29 | // RemoveObservation(pF->mID); 30 | // unique_lock lock(mMapPointMutex); 31 | // mObservations.insert(make_pair(pF->mID, pF)); 32 | // mObservationNum++; 33 | return; 34 | } 35 | unique_lock lock(mMapPointMutex); 36 | mObservations.insert(make_pair(pF->mID, pF)); 37 | mObservationNum++; 38 | } 39 | 40 | void MapPoint::RemoveObservation(int frame_id) 41 | { 42 | unique_lock lock(mMapPointMutex); 43 | if (mObservations.count(frame_id)) { 44 | mObservations.erase(frame_id); 45 | mObservationNum--; 46 | } 47 | } 48 | 49 | const Eigen::Vector3d &MapPoint::getPosition() const 50 | { 51 | shared_lock lock(mMapPointMutex); 52 | return mPw; 53 | } 54 | 55 | void MapPoint::setPosition(const Eigen::Vector3d &mPw) 56 | { 57 | unique_lock lock(mMapPointMutex); 58 | MapPoint::mPw = mPw; 59 | } 60 | 61 | const map> &MapPoint::getObservations() const 62 | { 63 | shared_lock lock(mMapPointMutex); 64 | return mObservations; 65 | } 66 | 67 | int MapPoint::getObservationNum() const 68 | { 69 | shared_lock lock(mMapPointMutex); 70 | return mObservationNum; 71 | } 72 | 73 | -------------------------------------------------------------------------------- /src/MapPoint.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #ifndef SRC_MAPPOINT_H 6 | #define SRC_MAPPOINT_H 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | using namespace std; 18 | class Frame; 19 | class MapPoint 20 | { 21 | public: 22 | MapPoint(const Eigen::Vector3d& p_w); 23 | MapPoint(const MapPoint& p); 24 | void AddObservation(shared_ptr pF); 25 | void RemoveObservation(int frame_id); 26 | static int mMapPointNumber; 27 | int mID; 28 | 29 | const Eigen::Vector3d &getPosition() const; 30 | 31 | void setPosition(const Eigen::Vector3d &mPw); 32 | 33 | const map> &getObservations() const; 34 | 35 | int getObservationNum() const; 36 | 37 | 38 | private: 39 | // map point position under world frame 40 | Eigen::Vector3d mPw; 41 | // observation frame_ID -> pixel_position 42 | map> mObservations; 43 | // how many time this landmark has been observed from frames 44 | int mObservationNum; 45 | 46 | mutable shared_mutex mMapPointMutex; 47 | 48 | }; 49 | 50 | 51 | #endif //SRC_MAPPOINT_H 52 | -------------------------------------------------------------------------------- /src/OptimizationType.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #include "OptimizationType.h" 6 | #include 7 | 8 | void EdgeSE3SonarDirect::computeError() 9 | { 10 | const VertexSE3Expmap* v = static_cast ( _vertices[0] ); 11 | Eigen::Vector3d x_local = v->estimate().map(x_world_); 12 | Eigen::Vector2d x_pixel = sonar3Dto2D(x_local, theta_, tx_, ty_, scale_); 13 | 14 | double x = x_pixel.x(); 15 | double y = x_pixel.y(); 16 | // if (IsMarginalPoint(x, y)) { 17 | // _error(0, 0) = 0.0; 18 | // this->setLevel(1); 19 | // return; 20 | // } 21 | 22 | Eigen::Vector2d delta(getPixelValue(x + 1, y) - getPixelValue(x - 1, y), 23 | getPixelValue(x, y + 1) - getPixelValue(x, y - 1)); 24 | 25 | if (delta.norm() < 20.0) { 26 | _error(0, 0) = 0.0; 27 | // this->setLevel(1); 28 | } 29 | else { 30 | double est = getPixelValue(x, y); 31 | _error(0, 0) = est - _measurement; 32 | } 33 | // check x,y is in the image 34 | // if (IsMarginalPoint(x, y, theta_, tx_, ty_, scale_)) { 35 | // _error(0, 0) = 0.0; 36 | // this->setLevel(1); 37 | // } 38 | // else if (delta.norm() < GRADIENT_THRESHOLD) { 39 | // _error(0, 0) = 0.0; 40 | // // this->setLevel(1); 41 | // } 42 | // else { 43 | // _measurement is the pixel intensity on the reference frame 44 | // getPixelValue(x, y) is the pixel intensity on the current frame, it will update 45 | // with current sonar pose 46 | 47 | // } 48 | 49 | } 50 | 51 | void EdgeSE3SonarDirect::linearizeOplus() 52 | { 53 | if (level() == 1) { 54 | _jacobianOplusXi = Eigen::Matrix::Zero(); 55 | return; 56 | } 57 | VertexSE3Expmap* vtx = static_cast ( _vertices[0] ); 58 | Eigen::Isometry3d T_si_w = vtx->estimate(); 59 | Eigen::Vector3d xyz_trans = vtx->estimate().map(x_world_); // q in book 60 | 61 | Eigen::Vector2d p_pixel = sonar3Dto2D(xyz_trans, theta_, tx_, ty_, scale_); 62 | double u = p_pixel.x(); 63 | double v = p_pixel.y(); 64 | 65 | // jacobian from se3 to u,v 66 | // NOTE that in g2o the Lie algebra is (\omega, \epsilon), where \omega is so(3) and \epsilon the translation 67 | 68 | // so(3) jacobian 69 | Eigen::Matrix jacobian_uv_pi; 70 | Eigen::Rotation2Dd R_pixel_sonar(theta_); 71 | jacobian_uv_pi = scale_ * R_pixel_sonar.matrix(); 72 | Eigen::Matrix jacobian_pi_tlocal; 73 | jacobian_pi_tlocal << 1, 0, 0, 0, 1, 0; 74 | Eigen::Matrix jacobian_tlocal_R_si_w; 75 | jacobian_tlocal_R_si_w = -skew_symetric_matrix(T_si_w.rotation() * x_world_); 76 | Eigen::Matrix jacobian_uv_R_si_w = 77 | jacobian_uv_pi * jacobian_pi_tlocal * jacobian_tlocal_R_si_w; 78 | 79 | Eigen::Matrix jacobian_tlocal_t_si_si_w = Eigen::Matrix::Identity(); 80 | Eigen::Matrix jacobian_uv_t_si_si_w = 81 | jacobian_uv_pi * jacobian_pi_tlocal * jacobian_tlocal_t_si_si_w; 82 | 83 | Eigen::Matrix jacobian_uv_ksai = Eigen::Matrix::Zero(); 84 | jacobian_uv_ksai.block<2, 3>(0, 0) = jacobian_uv_R_si_w; 85 | jacobian_uv_ksai.block<2, 3>(0, 3) = jacobian_uv_t_si_si_w; 86 | 87 | 88 | Eigen::Matrix jacobian_pixel_uv; 89 | 90 | jacobian_pixel_uv(0, 0) = (getPixelValue(u + 1, v) - getPixelValue(u - 1, v)) / 2; 91 | jacobian_pixel_uv(0, 1) = (getPixelValue(u, v + 1) - getPixelValue(u, v - 1)) / 2; 92 | 93 | Eigen::Matrix jacobian_intensity_xi = jacobian_pixel_uv * jacobian_uv_ksai; 94 | _jacobianOplusXi = jacobian_pixel_uv * jacobian_uv_ksai; 95 | } 96 | 97 | void EdgeSE3Sonar::computeError() 98 | { 99 | const VertexSonarPose* v0 = static_cast ( _vertices[0] ); 100 | const VertexSonarPoint* v1 = static_cast ( _vertices[1] ); 101 | Eigen::Vector3d p_world = v1->estimate(); 102 | Eigen::Isometry3d T_si_w = v0->estimate().inverse(); 103 | Eigen::Vector3d x_local = T_si_w * p_world; 104 | Eigen::Vector2d x_pixel_est = sonar3Dto2D(x_local, theta_, tx_, ty_, scale_); 105 | 106 | double x = x_pixel_est.x(); 107 | double y = x_pixel_est.y(); 108 | // Eigen::Vector2d delta(getPixelValue(x+1,y)-getPixelValue(x-1,y), 109 | // getPixelValue(x,y+1)-getPixelValue(x,y-1)); 110 | // check x,y is in the image 111 | // if (IsMarginalPoint(x, y, theta_, tx_, ty_, scale_)) { 112 | // _error(0, 0) = 0.0; 113 | // this->setLevel(1); 114 | // } 115 | // else if (delta.norm() < GRADIENT_THRESHOLD) { 116 | // _error(0, 0) = 0.0; 117 | // // this->setLevel(1); 118 | // } 119 | // else { 120 | // _measurement is the pixel intensity on the reference frame 121 | // getPixelValue(x, y) is the pixel intensity on the current frame, it will update 122 | // with current sonar pose 123 | 124 | // } 125 | Eigen::Vector2d err = x_pixel_ - x_pixel_est; 126 | _error << err; 127 | } 128 | 129 | // void EdgeSE3Sonar::linearizeOplus() 130 | // { 131 | // if (level() == 1) { 132 | // _jacobianOplusXi = Eigen::Matrix::Zero(); 133 | // return; 134 | // } 135 | // VertexSE3Expmap* v0 = static_cast ( _vertices[0] ); 136 | // VertexSBAPointXYZ* v1 = static_cast ( _vertices[1] ); 137 | // Eigen::Vector3d x_world = v1->estimate(); 138 | // Eigen::Isometry3d T_si_w = v0->estimate(); 139 | // Eigen::Vector3d xyz_trans = T_si_w * v1->estimate(); // q in book 140 | // 141 | // 142 | // // jacobian from se3 to u,v 143 | // // NOTE that in g2o the Lie algebra is (\omega, \epsilon), where \omega is so(3) and \epsilon the translation 144 | // 145 | // // so(3) jacobian 146 | // Eigen::Matrix jacobian_uv_pi; 147 | // Eigen::Rotation2Dd R_pixel_sonar(theta_); 148 | // jacobian_uv_pi = scale_ * R_pixel_sonar.matrix(); 149 | // Eigen::Matrix jacobian_pi_tlocal; 150 | // jacobian_pi_tlocal << 1, 0, 0, 0, 1, 0; 151 | // Eigen::Matrix jacobian_tlocal_R_si_w; 152 | // jacobian_tlocal_R_si_w = -skew_symetric_matrix(T_si_w.rotation() * x_world); 153 | // Eigen::Matrix jacobian_uv_R_si_w = 154 | // jacobian_uv_pi * jacobian_pi_tlocal * jacobian_tlocal_R_si_w; 155 | // 156 | // Eigen::Matrix jacobian_tlocal_t_si_si_w = Eigen::Matrix::Identity(); 157 | // Eigen::Matrix jacobian_uv_t_si_si_w = 158 | // jacobian_uv_pi * jacobian_pi_tlocal * jacobian_tlocal_t_si_si_w; 159 | // 160 | // Eigen::Matrix jacobian_uv_ksai = Eigen::Matrix::Zero(); 161 | // jacobian_uv_ksai.block<2, 3>(0, 0) = jacobian_uv_R_si_w; 162 | // jacobian_uv_ksai.block<2, 3>(0, 3) = jacobian_uv_t_si_si_w; 163 | // 164 | // 165 | // 166 | // _jacobianOplusXi = jacobian_uv_ksai; 167 | // } 168 | void EdgeSE3Odom::computeError() 169 | { 170 | const VertexSonarPose* v0 = static_cast ( _vertices[0] ); 171 | const VertexSonarPose* v1 = static_cast ( _vertices[1] ); 172 | 173 | Eigen::Isometry3d T_si_s0 = v0->estimate().inverse(); 174 | Eigen::Isometry3d T_sj_s0 = v1->estimate().inverse(); 175 | Eigen::Isometry3d T_s0_si = T_si_s0.inverse(); 176 | Eigen::Isometry3d T_s0_sj = T_sj_s0.inverse(); 177 | Eigen::Isometry3d T_si_sj_est = T_si_s0 * T_s0_sj; 178 | Eigen::Isometry3d T_err = T_si_sj_est * mT_si_sj.inverse(); 179 | Sophus::SO3d SO3_err(T_err.rotation()); 180 | Eigen::Vector3d so3_err = SO3_err.log(); 181 | // so3_err(0) = 0; 182 | // so3_err(1) = 0; 183 | Eigen::Vector3d t_err = T_err.translation(); 184 | // t_err(1) = 0; 185 | // t_err(2) = 0; 186 | _error << so3_err, t_err; 187 | 188 | } 189 | 190 | VertexSonarPose::VertexSonarPose(const Eigen::Isometry3d &T_si_s0) 191 | { 192 | _estimate = T_si_s0; 193 | } 194 | 195 | void VertexSonarPose::oplusImpl(const double* update_) 196 | { 197 | // update_ 3 rotation 3 translation 198 | Eigen::Matrix update_r; 199 | Eigen::Matrix update_t; 200 | update_r.setZero(); 201 | update_t.setZero(); 202 | update_r << 0, 0, update_[2]; 203 | update_t << update_[3], update_[4], 0; 204 | Sophus::SO3d update_SO3 = Sophus::SO3d::exp(update_r); 205 | 206 | Eigen::Isometry3d T_s0_si = _estimate; 207 | Eigen::Vector3d t_s0_si = T_s0_si.translation(); 208 | Eigen::Matrix3d R_s0_si = T_s0_si.rotation(); 209 | t_s0_si += R_s0_si*update_t; 210 | R_s0_si = R_s0_si * update_SO3.matrix(); 211 | 212 | T_s0_si.setIdentity(); 213 | T_s0_si.rotate(R_s0_si); 214 | T_s0_si.pretranslate(t_s0_si); 215 | _estimate = T_s0_si; 216 | 217 | 218 | } 219 | 220 | VertexSonarPoint::VertexSonarPoint(const Eigen::Vector3d &p) 221 | { 222 | _estimate = p; 223 | } 224 | 225 | void VertexSonarPoint::oplusImpl(const double* update_) 226 | { 227 | double x = update_[0], y = update_[1], z = update_[2]; 228 | _estimate(0) += x; 229 | _estimate(1) += y; 230 | // _estimate(2) += z; 231 | } 232 | -------------------------------------------------------------------------------- /src/OptimizationType.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #ifndef SRC_OPTIMIZATIONTYPE_H 6 | #define SRC_OPTIMIZATIONTYPE_H 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | #include 18 | #include 19 | #include "opencv2/imgproc.hpp" 20 | #include "opencv2/imgcodecs.hpp" 21 | #include 22 | #include 23 | // #include 24 | #include 25 | 26 | 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | #include 33 | #include 34 | #include "Frame.h" 35 | 36 | using namespace std; 37 | using namespace g2o; 38 | 39 | 40 | class VertexSonarPose : public BaseVertex<6, Eigen::Isometry3d> 41 | { 42 | public: 43 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 44 | VertexSonarPose(){} 45 | VertexSonarPose(const Eigen::Isometry3d& T_si_s0); 46 | virtual bool read(std::istream &is){} 47 | virtual bool write(std::ostream &os) const{} 48 | virtual void setToOriginImpl() 49 | { 50 | _estimate.setIdentity(); 51 | } 52 | virtual void oplusImpl(const double *update_); 53 | }; 54 | class VertexSonarPoint : public BaseVertex<3,Eigen::Vector3d> 55 | { 56 | public: 57 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 58 | VertexSonarPoint(){} 59 | VertexSonarPoint(const Eigen::Vector3d& p); 60 | virtual bool read(std::istream &is){} 61 | virtual bool write(std::ostream &os) const{} 62 | virtual void setToOriginImpl() 63 | { 64 | _estimate.setZero(); 65 | } 66 | virtual void oplusImpl(const double *update_); 67 | }; 68 | 69 | 70 | class EdgeSE3Sonar : public BaseBinaryEdge<2, double, VertexSonarPose, VertexSonarPoint> 71 | { 72 | public: 73 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 74 | 75 | EdgeSE3Sonar() {} 76 | 77 | EdgeSE3Sonar(const Eigen::Vector2d &p_pixel, double theta, double tx, double ty, double scale) 78 | : x_pixel_(p_pixel), theta_(theta), tx_(tx), ty_(ty), scale_(scale) {} 79 | 80 | virtual void computeError(); 81 | 82 | // plus in manifold 83 | // virtual void linearizeOplus(); 84 | 85 | // dummy read and write functions because we don't care... 86 | virtual bool read(std::istream &in) {} 87 | 88 | virtual bool write(std::ostream &out) const {} 89 | 90 | public: 91 | Eigen::Vector2d x_pixel_; 92 | double theta_; 93 | double tx_; 94 | double ty_; 95 | double scale_; 96 | 97 | protected: 98 | inline Eigen::Matrix3d skew_symetric_matrix(const Eigen::Vector3d &v) 99 | { 100 | Eigen::Matrix3d S; // Skew-symmetric matrix 101 | S << 0, -v(2), v(1), v(2), 0, -v(0), -v(1), v(0), 0; 102 | return S; 103 | } 104 | }; 105 | 106 | class EdgeSE3Odom : public BaseBinaryEdge<6, double, VertexSonarPose, VertexSonarPose> 107 | { 108 | public: 109 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 110 | 111 | EdgeSE3Odom() {} 112 | 113 | EdgeSE3Odom(Eigen::Isometry3d &T_si_sj) : mT_si_sj(T_si_sj) {} 114 | 115 | virtual void computeError(); 116 | 117 | // plus in manifold 118 | // virtual void linearizeOplus(); 119 | 120 | // dummy read and write functions because we don't care... 121 | virtual bool read(std::istream &in) {} 122 | 123 | virtual bool write(std::ostream &out) const {} 124 | 125 | public: 126 | Eigen::Isometry3d mT_si_sj; 127 | }; 128 | 129 | class EdgeSE3SonarDirect : public BaseUnaryEdge<1, double, VertexSE3Expmap> 130 | { 131 | public: 132 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 133 | 134 | EdgeSE3SonarDirect() {} 135 | 136 | EdgeSE3SonarDirect(const Eigen::Vector3d &point, double theta, double tx, double ty, double scale, 137 | double range, double fov, cv::Mat image) : x_world_(point), theta_(theta), 138 | tx_(tx), ty_(ty), scale_(scale), 139 | range_(range), fov_(fov), 140 | image_(image.clone()) {} 141 | 142 | virtual void computeError(); 143 | 144 | // plus in manifold 145 | virtual void linearizeOplus(); 146 | 147 | // dummy read and write functions because we don't care... 148 | virtual bool read(std::istream &in) {} 149 | 150 | virtual bool write(std::ostream &out) const {} 151 | 152 | protected: 153 | bool IsMarginalPoint(double x, double y) 154 | { 155 | Eigen::Vector3d p_3d = sonar2Dto3D(Eigen::Vector2d(x, y), theta_, tx_, ty_, scale_); 156 | //cartesian to polar 157 | double r = sqrt(p_3d.x() * p_3d.x() + p_3d.y() * p_3d.y()); 158 | if ((r < 0.3) || (r > range_ - 0.3)) { 159 | return true; 160 | } 161 | else if ((theta_ < -0.5 * fov_ + 0.05) || (theta_ > 0.5 * fov_ - 0.05)) { 162 | return true; 163 | } 164 | else { 165 | return false; 166 | } 167 | } 168 | 169 | inline double getSinlePixelValue(double x, double y) 170 | { 171 | if (x < 0 || x >= image_.cols - 1 || y < 0 || y >= image_.rows - 1) { 172 | // Handle the out-of-bounds case. Here, we simply return 0, but you can adjust as needed. 173 | return 0.0; 174 | } 175 | 176 | uchar* data = &image_.data[int(y) * image_.step + int(x)]; 177 | double xx = x - floor(x); 178 | double yy = y - floor(y); 179 | return double((1 - xx) * (1 - yy) * data[0] + xx * (1 - yy) * data[1] + 180 | (1 - xx) * yy * data[image_.step] + xx * yy * data[image_.step + 1]); 181 | } 182 | 183 | inline double getPatternPixelValue(double x, double y) 184 | { 185 | // major sample 186 | // {0,0} 187 | double x1 = x + 0; 188 | double y1 = y + 0; 189 | //minor sample 190 | //{0,-2} 191 | double x2 = x + 0; 192 | double y2 = y - 2; 193 | //{-1,-1} 194 | double x3 = x - 1; 195 | double y3 = y - 1; 196 | //{1,-1} 197 | double x4 = x + 1; 198 | double y4 = y - 1; 199 | //{-2,0} 200 | double x5 = x - 2; 201 | double y5 = y + 0; 202 | //{2,0} 203 | double x6 = x + 2; 204 | double y6 = y + 0; 205 | //{-1,1} 206 | double x7 = x - 1; 207 | double y7 = y + 1; 208 | //{1,1} 209 | double x8 = x + 1; 210 | double y8 = y + 1; 211 | //{0,2} 212 | double x9 = x + 0; 213 | double y9 = y + 2; 214 | // get the value of the pixel 215 | double v1 = getSinlePixelValue(x1, y1); 216 | double v2 = getSinlePixelValue(x2, y2); 217 | double v3 = getSinlePixelValue(x3, y3); 218 | double v4 = getSinlePixelValue(x4, y4); 219 | double v5 = getSinlePixelValue(x5, y5); 220 | double v6 = getSinlePixelValue(x6, y6); 221 | double v7 = getSinlePixelValue(x7, y7); 222 | double v8 = getSinlePixelValue(x8, y8); 223 | double v9 = getSinlePixelValue(x9, y9); 224 | // weighted bilinear interpolation 225 | double value = v1 * (0.5) + (v2 + v3 + v4 + v5 + v6 + v7 + v8 + v9) * (0.5 / 8); 226 | 227 | return value; 228 | } 229 | 230 | inline double getPixelValue(double x, double y) 231 | { 232 | // return getSinlePixelValue(x, y); 233 | return getPatternPixelValue(x, y); 234 | } 235 | 236 | inline Eigen::Matrix3d skew_symetric_matrix(const Eigen::Vector3d &v) 237 | { 238 | Eigen::Matrix3d S; // Skew-symmetric matrix 239 | S << 0, -v(2), v(1), v(2), 0, -v(0), -v(1), v(0), 0; 240 | return S; 241 | } 242 | 243 | public: 244 | Eigen::Vector3d x_world_; // 3D point in world frame 245 | double theta_, tx_, ty_, scale_, range_, fov_; // Camera intrinsics 246 | cv::Mat image_; // reference image 247 | }; 248 | 249 | #endif //SRC_OPTIMIZATIONTYPE_H 250 | -------------------------------------------------------------------------------- /src/RepubGT.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | Eigen::Isometry3d T_bw_b0_GT; 9 | bool isGTInit = false; 10 | nav_msgs::Path mPath; 11 | ros::Publisher mGTPub; 12 | ros::Publisher mGTPosePub; 13 | ros::Publisher mGTPathPub; 14 | 15 | Eigen::Isometry3d T_bw_b0_odom; 16 | bool isOdomInit = false; 17 | nav_msgs::Path mPathOdom; 18 | ros::Publisher mOdomPub; 19 | ros::Publisher mOdomPosePub; 20 | ros::Publisher mOdomPathPub; 21 | 22 | void GTCallback(const nav_msgs::Odometry::ConstPtr &msg) 23 | { 24 | Eigen::Vector3d t(msg->pose.pose.position.x, msg->pose.pose.position.y, msg->pose.pose.position.z); 25 | Eigen::Quaterniond q(msg->pose.pose.orientation.w, msg->pose.pose.orientation.x, 26 | msg->pose.pose.orientation.y, msg->pose.pose.orientation.z); 27 | Eigen::Isometry3d T_bw_bj; 28 | T_bw_bj.setIdentity(); 29 | T_bw_bj.rotate(q); 30 | T_bw_bj.pretranslate(t); 31 | if (!isGTInit) { 32 | T_bw_b0_GT = T_bw_bj; 33 | isGTInit = true; 34 | return; 35 | } 36 | Eigen::Isometry3d T_b0_bj = T_bw_b0_GT.inverse() * T_bw_bj; 37 | t = T_b0_bj.translation(); 38 | q = Eigen::Quaterniond(T_b0_bj.rotation()); 39 | nav_msgs::Odometry gt_odom_msg; 40 | gt_odom_msg.header = msg->header; 41 | gt_odom_msg.header.frame_id = "map"; 42 | gt_odom_msg.child_frame_id = "base_link"; 43 | gt_odom_msg.pose.pose.position.x = t.x(); 44 | gt_odom_msg.pose.pose.position.y = t.y(); 45 | gt_odom_msg.pose.pose.position.z = t.z(); 46 | gt_odom_msg.pose.pose.orientation.x = q.x(); 47 | gt_odom_msg.pose.pose.orientation.y = q.y(); 48 | gt_odom_msg.pose.pose.orientation.z = q.z(); 49 | gt_odom_msg.pose.pose.orientation.w = q.w(); 50 | mGTPub.publish(gt_odom_msg); 51 | 52 | geometry_msgs::PoseStamped pose; 53 | pose.header = gt_odom_msg.header; 54 | pose.pose = gt_odom_msg.pose.pose; 55 | mGTPosePub.publish(pose); 56 | 57 | mPath.poses.push_back(pose); 58 | mGTPathPub.publish(mPath); 59 | } 60 | 61 | void GTPoseCallback(const geometry_msgs::PoseStamped::ConstPtr &msg) 62 | { 63 | Eigen::Vector3d t(msg->pose.position.y, msg->pose.position.x, msg->pose.position.z); 64 | Eigen::Quaterniond q(msg->pose.orientation.w, msg->pose.orientation.x, 65 | msg->pose.orientation.y, msg->pose.orientation.z); 66 | Eigen::Isometry3d T_bw_bj; 67 | T_bw_bj.setIdentity(); 68 | T_bw_bj.rotate(q); 69 | T_bw_bj.pretranslate(t); 70 | if (!isGTInit) { 71 | T_bw_b0_GT = T_bw_bj; 72 | isGTInit = true; 73 | return; 74 | } 75 | Eigen::Isometry3d T_b0_bj = T_bw_b0_GT.inverse() * T_bw_bj; 76 | t = T_b0_bj.translation(); 77 | q = Eigen::Quaterniond(T_b0_bj.rotation()); 78 | nav_msgs::Odometry gt_odom_msg; 79 | gt_odom_msg.header = msg->header; 80 | gt_odom_msg.header.frame_id = "map"; 81 | gt_odom_msg.child_frame_id = "base_link"; 82 | gt_odom_msg.pose.pose.position.x = t.x(); 83 | gt_odom_msg.pose.pose.position.y = t.y(); 84 | gt_odom_msg.pose.pose.position.z = t.z(); 85 | gt_odom_msg.pose.pose.orientation.x = q.x(); 86 | gt_odom_msg.pose.pose.orientation.y = q.y(); 87 | gt_odom_msg.pose.pose.orientation.z = q.z(); 88 | gt_odom_msg.pose.pose.orientation.w = q.w(); 89 | mGTPub.publish(gt_odom_msg); 90 | 91 | geometry_msgs::PoseStamped pose; 92 | pose.header = gt_odom_msg.header; 93 | pose.pose = gt_odom_msg.pose.pose; 94 | mGTPosePub.publish(pose); 95 | 96 | mPath.poses.push_back(pose); 97 | mGTPathPub.publish(mPath); 98 | } 99 | 100 | void OdomCallback(const nav_msgs::Odometry::ConstPtr &msg) 101 | { 102 | Eigen::Vector3d t(msg->pose.pose.position.x, msg->pose.pose.position.y, msg->pose.pose.position.z); 103 | Eigen::Quaterniond q(msg->pose.pose.orientation.w, msg->pose.pose.orientation.x, 104 | msg->pose.pose.orientation.y, msg->pose.pose.orientation.z); 105 | Eigen::Isometry3d T_bw_bj; 106 | T_bw_bj.setIdentity(); 107 | T_bw_bj.rotate(q); 108 | T_bw_bj.pretranslate(t); 109 | if (!isOdomInit) { 110 | T_bw_b0_odom = T_bw_bj; 111 | isOdomInit = true; 112 | return; 113 | } 114 | Eigen::Isometry3d T_b0_bj = T_bw_b0_odom.inverse() * T_bw_bj; 115 | t = T_b0_bj.translation(); 116 | q = Eigen::Quaterniond(T_b0_bj.rotation()); 117 | nav_msgs::Odometry odom_msg; 118 | odom_msg.header = msg->header; 119 | odom_msg.header.frame_id = "map"; 120 | odom_msg.child_frame_id = "base_link"; 121 | odom_msg.pose.pose.position.x = t.x(); 122 | odom_msg.pose.pose.position.y = t.y(); 123 | odom_msg.pose.pose.position.z = t.z(); 124 | odom_msg.pose.pose.orientation.x = q.x(); 125 | odom_msg.pose.pose.orientation.y = q.y(); 126 | odom_msg.pose.pose.orientation.z = q.z(); 127 | odom_msg.pose.pose.orientation.w = q.w(); 128 | mOdomPub.publish(odom_msg); 129 | 130 | geometry_msgs::PoseStamped pose; 131 | pose.header = odom_msg.header; 132 | pose.pose = odom_msg.pose.pose; 133 | mOdomPosePub.publish(pose); 134 | 135 | mPathOdom.poses.push_back(pose); 136 | mOdomPathPub.publish(mPathOdom); 137 | } 138 | 139 | void Odom2Callback(const geometry_msgs::PoseStamped::ConstPtr &msg) 140 | { 141 | Eigen::Vector3d t(msg->pose.position.y, msg->pose.position.x, msg->pose.position.z); 142 | Eigen::Quaterniond q(msg->pose.orientation.w, msg->pose.orientation.x, 143 | msg->pose.orientation.y, msg->pose.orientation.z); 144 | Eigen::Isometry3d T_bw_bj; 145 | T_bw_bj.setIdentity(); 146 | T_bw_bj.rotate(q); 147 | T_bw_bj.pretranslate(t); 148 | if (!isOdomInit) { 149 | T_bw_b0_odom = T_bw_bj; 150 | isOdomInit = true; 151 | return; 152 | } 153 | Eigen::Isometry3d T_b0_bj = T_bw_b0_odom.inverse() * T_bw_bj; 154 | t = T_b0_bj.translation(); 155 | q = Eigen::Quaterniond(T_b0_bj.rotation()); 156 | nav_msgs::Odometry odom_msg; 157 | odom_msg.header = msg->header; 158 | odom_msg.header.frame_id = "map"; 159 | odom_msg.child_frame_id = "base_link"; 160 | odom_msg.pose.pose.position.x = t.x(); 161 | odom_msg.pose.pose.position.y = t.y(); 162 | odom_msg.pose.pose.position.z = t.z(); 163 | odom_msg.pose.pose.orientation.x = q.x(); 164 | odom_msg.pose.pose.orientation.y = q.y(); 165 | odom_msg.pose.pose.orientation.z = q.z(); 166 | odom_msg.pose.pose.orientation.w = q.w(); 167 | mOdomPub.publish(odom_msg); 168 | 169 | geometry_msgs::PoseStamped pose; 170 | pose.header = odom_msg.header; 171 | pose.pose = odom_msg.pose.pose; 172 | mOdomPosePub.publish(pose); 173 | 174 | mPathOdom.poses.push_back(pose); 175 | mOdomPathPub.publish(mPathOdom); 176 | } 177 | 178 | int main(int argc, char** argv) 179 | { 180 | // Initialize the ROS node 181 | ros::init(argc, argv, "odom_subscriber_node"); 182 | ros::NodeHandle nh; 183 | 184 | // Create a subscriber for the odometry topic 185 | ros::Subscriber odom_sub = nh.subscribe("/rexrov/pose_gt", 10, GTCallback); 186 | ros::Subscriber gt_pose_sub = nh.subscribe("/pose_gt", 10, GTPoseCallback); 187 | 188 | mGTPub = nh.advertise("/gt_repub/gt_odom", 10); 189 | mGTPathPub = nh.advertise("/gt_repub/gt_path", 10); 190 | mGTPosePub = nh.advertise("/gt_repub/gt_pose", 10); 191 | 192 | // Create a subscriber for the odometry topic 193 | ros::Subscriber odom_sub_odom = nh.subscribe("/odometry/filtered", 10, OdomCallback); 194 | ros::Subscriber odom_sub_odom2 = nh.subscribe("/odom_pose", 10, Odom2Callback); 195 | 196 | mOdomPub = nh.advertise("/gt_repub/odom", 10); 197 | mOdomPathPub = nh.advertise("/gt_repub/odom_path", 10); 198 | mOdomPosePub = nh.advertise("/gt_repub/odom_pose", 10); 199 | 200 | mPath.header.stamp = ros::Time::now(); 201 | mPath.header.frame_id = "map"; 202 | mPathOdom.header = mPath.header; 203 | //init pose 204 | T_bw_b0_GT.setIdentity(); 205 | T_bw_b0_odom.setIdentity(); 206 | 207 | // Spin to allow the callback function to run when new messages arrive 208 | ros::spin(); 209 | 210 | return 0; 211 | } 212 | 213 | -------------------------------------------------------------------------------- /src/System.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #include "System.h" 6 | #include "Frame.h" 7 | #include "Track.h" 8 | #include "LocalMapping.h" 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | System::System(const string &strSettingFile) 20 | { 21 | cv::FileStorage fsSettings(strSettingFile.c_str(), cv::FileStorage::READ); 22 | if (!fsSettings.isOpened()) { 23 | ROS_ERROR_STREAM("Failed to open settings file at: " << strSettingFile << endl); 24 | exit(-1); 25 | } 26 | 27 | mSonarTopic = (string) fsSettings["SonarTopic"]; 28 | mOdomTopic = (string) fsSettings["OdomTopic"]; 29 | mRange = (double) fsSettings["Range"]; 30 | mGradientThreshold = (double) fsSettings["GradientThreshold"]; 31 | mPyramidLayer = (int) fsSettings["PyramidLayer"]; 32 | mFOV = ((double) fsSettings["FOV"] / 180) * M_PI; 33 | int loss_threshold = (int) fsSettings["LossThreshold"]; 34 | double gradient_inlier_threshold = (double) fsSettings["GradientInlierThreshold"]; 35 | cv::FileNode node = fsSettings["Tbs"]; 36 | cv::Mat Tbs; 37 | mT_b_s = Eigen::Isometry3d::Identity(); 38 | if (!node.empty()) { 39 | Tbs = node.mat(); 40 | if (Tbs.rows != 4 || Tbs.cols != 4) { 41 | ROS_ERROR_STREAM("Tbs matrix have to be a 4x4 transformation matrix"); 42 | exit(-1); 43 | } 44 | } 45 | else { 46 | ROS_ERROR_STREAM("Tbs matrix doesn't exist"); 47 | exit(-1); 48 | } 49 | cv::cv2eigen(Tbs, mT_b_s.matrix()); 50 | 51 | ROS_INFO_STREAM("SonarTopic: " << mSonarTopic << endl); 52 | ROS_INFO_STREAM("OdomTopic: " << mOdomTopic << endl); 53 | ROS_INFO_STREAM("Range: " << mRange << endl); 54 | ROS_INFO_STREAM("GradientThreshold: " << mGradientThreshold << endl); 55 | ROS_INFO_STREAM("PyramidLayer: " << mPyramidLayer << endl); 56 | ROS_INFO_STREAM("FOV: " << 180 * mFOV / M_PI << endl); 57 | ROS_INFO_STREAM("LossThreshold: " << loss_threshold << endl); 58 | ROS_INFO_STREAM("GradientInlierThreshold: " << gradient_inlier_threshold << endl); 59 | ROS_INFO_STREAM("Tbs: \n" << fixed << setprecision(9) << Tbs << endl); 60 | 61 | 62 | mpTracker = make_shared(mRange, mFOV, mPyramidLayer, loss_threshold, 63 | gradient_inlier_threshold, mT_b_s); 64 | shared_ptr track_state = make_shared(mpTracker); 65 | mpTracker->SetState(track_state); 66 | 67 | mpLocalMaper = make_shared(mpTracker); 68 | mpTracker->SetLocalMaper(mpLocalMaper); 69 | 70 | //start new thread 71 | mptLocalMaper = make_shared(&LocalMapping::Run, mpLocalMaper); 72 | 73 | mT_bw_b0 = Eigen::Isometry3d::Identity(); 74 | // mOdom_Path.reserve(10000); 75 | 76 | ros::NodeHandle nh; 77 | mSonarPosePub = nh.advertise("/direct_sonar/pose_draw", 10); 78 | mOdomPub = nh.advertise("/direct_sonar/odom_test", 1000); 79 | 80 | 81 | mPointCloudPub = nh.advertise("/direct_sonar/point_cloud", 1); 82 | 83 | 84 | } 85 | 86 | void System::frameLoad(const sensor_msgs::ImageConstPtr &image_msg, 87 | const geometry_msgs::PoseStamped::ConstPtr &odom_msg) 88 | { 89 | Eigen::Vector3d t(odom_msg->pose.position.y, odom_msg->pose.position.x, odom_msg->pose.position.z); 90 | Eigen::Quaterniond q(odom_msg->pose.orientation.w, odom_msg->pose.orientation.x, 91 | odom_msg->pose.orientation.y, odom_msg->pose.orientation.z); 92 | double time = image_msg->header.stamp.toSec(); 93 | Eigen::Isometry3d I = Eigen::Isometry3d::Identity(); 94 | if (mT_bw_b0.matrix() == I.matrix()) { 95 | mT_bw_b0.setIdentity(); 96 | mT_bw_b0.rotate(q); 97 | mT_bw_b0.pretranslate(t); 98 | mpLocalMaper->mT_bw_b0 = mT_bw_b0; 99 | // return; 100 | } 101 | Eigen::Isometry3d T_bw_bi = Eigen::Isometry3d::Identity(); 102 | Eigen::AngleAxisd a(q); 103 | T_bw_bi.rotate(q); 104 | T_bw_bi.pretranslate(t); 105 | Eigen::Isometry3d T_b0_bi = mT_bw_b0.inverse() * T_bw_bi; 106 | mOdom_Path.insert(make_pair(time, T_b0_bi)); 107 | Save(); 108 | 109 | cv::Mat img = cv_bridge::toCvShare(image_msg, "bgr8")->image; 110 | cv::Mat down; 111 | cv::pyrDown(img, down, cv::Size(img.cols / 2, img.rows / 2)); 112 | // Frame f(down, image_msg->header.stamp.toSec(), mRange, mFOV, mPyramidLayer, mGradientThreshold); 113 | Frame f(down, image_msg->header.stamp.toSec(), mRange, mFOV, T_b0_bi, mPyramidLayer, 114 | mGradientThreshold); 115 | ExtracPointCloud(down, time, f.mTheta, f.mTx, f.mTy, f.mScale); 116 | nav_msgs::Odometry odom_test; 117 | q = Eigen::Quaterniond(T_b0_bi.rotation()); 118 | t = T_b0_bi.translation(); 119 | odom_test.header.stamp = image_msg->header.stamp; 120 | odom_test.header.frame_id = "odom"; 121 | odom_test.child_frame_id = "base_link"; 122 | odom_test.pose.pose.position.x = t.x(); 123 | odom_test.pose.pose.position.y = t.y(); 124 | odom_test.pose.pose.position.z = t.z(); 125 | odom_test.pose.pose.orientation.x = q.x(); 126 | odom_test.pose.pose.orientation.y = q.y(); 127 | odom_test.pose.pose.orientation.z = q.z(); 128 | odom_test.pose.pose.orientation.w = q.w(); 129 | mOdomPub.publish(odom_test); 130 | BroadcastTF(T_b0_bi,image_msg->header.stamp,"odom","base_link"); 131 | // mTracker->TrackFrame2Frame(f); 132 | Eigen::Isometry3d T_s0_si = mpTracker->TrackFrame(f); 133 | 134 | Eigen::Isometry3d T_bw_bi_sonar = mT_bw_b0 * mT_b_s * T_s0_si * mT_b_s.inverse(); 135 | 136 | geometry_msgs::PoseStamped pose; 137 | pose.header.stamp = ros::Time::now(); 138 | pose.header.frame_id = "map"; 139 | pose.pose.position.x = T_bw_bi_sonar.translation().x(); 140 | pose.pose.position.y = T_bw_bi_sonar.translation().y(); 141 | pose.pose.position.z = T_bw_bi_sonar.translation().z(); 142 | q= Eigen::Quaterniond(T_bw_bi_sonar.rotation()); 143 | pose.pose.orientation.x = q.x(); 144 | pose.pose.orientation.y = q.y(); 145 | pose.pose.orientation.z = q.z(); 146 | pose.pose.orientation.w = q.w(); 147 | mSonarPosePub.publish(pose); 148 | 149 | } 150 | 151 | void System::runRos() 152 | { 153 | ros::NodeHandle nh; 154 | image_transport::ImageTransport it(nh); 155 | image_transport::SubscriberFilter image_sub(it, mSonarTopic, 100, 156 | image_transport::TransportHints("compressed")); 157 | message_filters::Subscriber odom_sub(nh, mOdomTopic, 100); 158 | typedef message_filters::sync_policies::ApproximateTime MySyncPolicy; 159 | message_filters::Synchronizer sync(MySyncPolicy(10), image_sub, odom_sub); 160 | sync.registerCallback(boost::bind(&System::frameLoad, this, _1, _2)); 161 | 162 | 163 | 164 | // image_transport::SubscriberFilter image_sub2(it, mSonarTopic, 100); 165 | // message_filters::Subscriber odom_sub2(nh, mOdomTopic, 100); 166 | // typedef message_filters::sync_policies::ApproximateTime MySyncPolicy2; 167 | // message_filters::Synchronizer sync2(MySyncPolicy2(10), image_sub2, odom_sub2); 168 | // sync2.registerCallback(boost::bind(&System::frameLoad2, this, _1, _2)); 169 | ros::spin(); 170 | } 171 | 172 | void System::frameLoad2(const sensor_msgs::ImageConstPtr &image_msg, 173 | const nav_msgs::Odometry_>::ConstPtr &odom_msg) 174 | { 175 | cv::Mat img = cv_bridge::toCvShare(image_msg, "bgr8")->image; 176 | Eigen::Vector3d t(odom_msg->pose.pose.position.x, odom_msg->pose.pose.position.y, 177 | odom_msg->pose.pose.position.z); 178 | Eigen::Quaterniond q(odom_msg->pose.pose.orientation.w, odom_msg->pose.pose.orientation.x, 179 | odom_msg->pose.pose.orientation.y, odom_msg->pose.pose.orientation.z); 180 | double time = image_msg->header.stamp.toSec(); 181 | Eigen::Isometry3d I = Eigen::Isometry3d::Identity(); 182 | if (mT_bw_b0.matrix() == I.matrix()) { 183 | mT_bw_b0.setIdentity(); 184 | mT_bw_b0.rotate(q); 185 | mT_bw_b0.pretranslate(t); 186 | // return; 187 | } 188 | Eigen::Isometry3d T_bw_bi = Eigen::Isometry3d::Identity(); 189 | Eigen::AngleAxisd a(q); 190 | T_bw_bi.rotate(q); 191 | T_bw_bi.pretranslate(t); 192 | Eigen::Isometry3d T_b0_bi = mT_bw_b0.inverse() * T_bw_bi; 193 | mOdom_Path.insert(make_pair(time, T_b0_bi)); 194 | Save(); 195 | Frame f(img, image_msg->header.stamp.toSec(), mRange, mFOV, mPyramidLayer, mGradientThreshold); 196 | // mTracker->TrackFrame2Frame(f); 197 | mpTracker->TrackFrame(f); 198 | } 199 | 200 | void System::Save() 201 | { 202 | std::ofstream outfile; 203 | outfile.open( 204 | "/home/da/project/ros/direct_sonar_ws/src/direct_sonar_odometry/evaluation/stamped_groundtruth_gt.txt", 205 | std::ios_base::out); 206 | outfile << "#timestamp tx ty tz qx qy qz qw" << endl; 207 | for (auto time_pose: mOdom_Path) { 208 | Eigen::Isometry3d T_b0_bi = time_pose.second; 209 | Eigen::Isometry3d T_s0_si = mT_b_s.inverse() * T_b0_bi * mT_b_s; 210 | // Eigen::Vector3d t = T_s0_si.translation(); 211 | // Eigen::Quaterniond q(T_s0_si.rotation()); 212 | Eigen::Vector3d t = T_b0_bi.translation(); 213 | Eigen::Quaterniond q(T_b0_bi.rotation()); 214 | outfile << fixed << setprecision(12) << time_pose.first << " " << t.x() << " " << t.y() << " " << t.z() 215 | << " " << q.x() << " " << q.y() << " " << q.z() << " " << q.w() << endl; 216 | } 217 | outfile.close(); 218 | 219 | } 220 | 221 | void System::ExtracPointCloud(const cv::Mat &img, double timestamp, double theta, double tx, double ty, 222 | double scale) 223 | { 224 | cv::Mat gray = img.clone(); 225 | cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY); 226 | // Set the intensity threshold 227 | int threshold = 200; // Change this as required 228 | 229 | // To store points whose intensity is greater than threshold 230 | std::vector points; 231 | 232 | // Find points that are greater than threshold 233 | cv::Mat mask = gray > threshold; // This creates a binary mask 234 | cv::findNonZero(mask, points); 235 | 236 | // pcl::PointCloud cloud; 237 | // for (auto p: points) { 238 | // Eigen::Vector2d p_pixel(p.x, p.y); 239 | // Eigen::Vector3d p_3d = sonar2Dto3D(p_pixel, theta, tx, ty, scale); 240 | // pcl::PointXYZ point; 241 | // point.x = p_3d(0); 242 | // point.y = p_3d(1); 243 | // point.z = p_3d(2); 244 | // 245 | // cloud.points.push_back(point); 246 | // 247 | // } 248 | // ROS_INFO_STREAM("PointCloud Size: "< 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | 28 | using namespace std; 29 | 30 | class Track; 31 | class LocalMapping; 32 | class System 33 | { 34 | public: 35 | System(const string &strSettingFile); 36 | 37 | void frameLoad(const sensor_msgs::ImageConstPtr &image_msg, 38 | const geometry_msgs::PoseStamped::ConstPtr &odom_msg); 39 | 40 | void frameLoad2(const sensor_msgs::ImageConstPtr &image_msg, 41 | const nav_msgs::Odometry::ConstPtr &odom_msg); 42 | 43 | void ExtracPointCloud(const cv::Mat &img, double timestamp, double theta, double tx, double ty, 44 | double scale); 45 | void BroadcastTF(const Eigen::Isometry3d &T_c0_cj_orb, 46 | const ros::Time &stamp, 47 | const string &id, 48 | const string &child_id); 49 | void Save(); 50 | 51 | void runRos(); 52 | 53 | private: 54 | string mSonarTopic; 55 | string mOdomTopic; 56 | double mRange; 57 | double mGradientThreshold; 58 | int mPyramidLayer; 59 | double mFOV; 60 | 61 | //odometry initial pose 62 | Eigen::Isometry3d mT_bw_b0; 63 | //extrinsic parameter sonar to body 64 | Eigen::Isometry3d mT_b_s; 65 | map mOdom_Path; 66 | shared_ptr mpTracker; 67 | shared_ptr mpLocalMaper; 68 | shared_ptr mptLocalMaper; 69 | ros::Publisher mSonarPosePub; 70 | 71 | ros::Publisher mOdomPub; 72 | ros::Publisher mPointCloudPub; 73 | tf::TransformBroadcaster m_tb; 74 | // ros::Publisher mOdomPub; 75 | }; 76 | 77 | 78 | #endif //SRC_SYSTEM_H 79 | -------------------------------------------------------------------------------- /src/Track.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | 5 | #ifndef SRC_TRACK_H 6 | #define SRC_TRACK_H 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include "nanoflann.hpp" 17 | #include 18 | #include 19 | 20 | using namespace std; 21 | using namespace nanoflann; 22 | 23 | class Frame; 24 | 25 | class MapPoint; 26 | 27 | class Track; 28 | 29 | class LocalMapping; 30 | 31 | class TrackState{ 32 | protected: 33 | shared_ptr mpTracker; 34 | public: 35 | TrackState(shared_ptr pTracker):mpTracker(pTracker){}; 36 | virtual void TrackFrame(const Frame& f) = 0; 37 | }; 38 | 39 | class TrackToUpdate : public TrackState{ 40 | public: 41 | TrackToUpdate(shared_ptr pTracker):TrackState(pTracker){}; 42 | void TrackFrame(const Frame &f) override; 43 | }; 44 | class TrackUpToDate : public TrackState{ 45 | public: 46 | TrackUpToDate(shared_ptr pTracker):TrackState(pTracker){}; 47 | void TrackFrame(const Frame &f) override; 48 | }; 49 | 50 | class Track : public enable_shared_from_this 51 | { 52 | public: 53 | Track(double range, double fov, int pyramid_layer, int loss_threshold, double gradient_threshold, 54 | Eigen::Isometry3d& T_b_s); 55 | 56 | shared_ptr mpCurrentFrame; 57 | shared_ptr mpLastFrame; 58 | 59 | void TrackFrame2Frame(const Frame &f); 60 | 61 | Eigen::Isometry3d TrackFrame(const Frame &f); 62 | 63 | void TrackFromLastFrame(const Frame &f); 64 | 65 | void TrackFromWindow(const Frame &f); 66 | 67 | void PredictCurrentPose(shared_ptr f_pre, shared_ptr f_cur); 68 | 69 | // void OptimizeWindow(); 70 | 71 | bool PoseEstimationFrame2Frame(cv::Mat &pre_img, cv::Mat &img, Eigen::Isometry3d &Tsj_si, 72 | set> &obs, int layer, 73 | set> &inliers_last, 74 | set> &inliers_current, 75 | map, pair> &association); 76 | 77 | void PoseEstimationWindow2Frame(shared_ptr pF_pre, shared_ptr pF, 78 | set> &inliers_last, 79 | set> &inliers_current, 80 | map, pair> &association); 81 | 82 | void PoseEstimationWindow2FramePyramid(shared_ptr pF_pre, shared_ptr pF, 83 | set> &inliers_last, 84 | set> &inliers_current, 85 | map, pair> &association); 86 | 87 | 88 | 89 | void InitializeMap(); 90 | 91 | void Reset(shared_ptr f_pre); 92 | 93 | bool NeedNewKeyFrame(shared_ptr pF); 94 | 95 | void InsertKeyFrame(shared_ptr pF); 96 | 97 | void DrawTrack(); 98 | 99 | void PublishPose(); 100 | 101 | // void Visualize(); 102 | 103 | void SavePath(string file_path); 104 | 105 | private: 106 | // protected by mStateMutex 107 | shared_ptr mState; 108 | shared_mutex mStateMutex; 109 | public: 110 | void SetState(const shared_ptr &State); 111 | 112 | private: 113 | 114 | shared_ptr mpLocalMaper; 115 | public: 116 | void SetLocalMaper(const shared_ptr &pLocalMaper); 117 | 118 | private: 119 | 120 | // double mScale, mTheta, mTx, mTy; 121 | 122 | int mPyramidLayer; 123 | map mPath; 124 | map mOdomPath; 125 | 126 | 127 | 128 | //current pose 129 | Eigen::Isometry3d mT_w_sj = Eigen::Isometry3d::Identity(); 130 | 131 | //Local Frame window 132 | // int mWindowSize = 5; 133 | // deque> mActiveFrameWindow; 134 | 135 | //Local map points 136 | map> mMapPoints; 137 | 138 | //ros 139 | ros::Publisher mImagePub; 140 | ros::Publisher mPosePub; 141 | ros::Publisher mPathPub; 142 | ros::Publisher mOdomPub; 143 | ros::Publisher mOdomPathPub; 144 | public: 145 | Eigen::Isometry3d mT_b_s; 146 | //inlier threshold 147 | double mGradientInlierThreshold; 148 | //loss threshold 149 | int mLossThreshold; 150 | double mRange, mFOV; 151 | shared_mutex mTrackMutex; 152 | 153 | }; 154 | 155 | void AssociateFrameToLandmark(shared_ptr pF, shared_ptr pMP, 156 | pair p_pixel); 157 | 158 | void UnassociateFrameToLandmark(shared_ptr pF, shared_ptr pMP, 159 | pair p_pixel); 160 | 161 | void BuildAssociation(shared_ptr pF_last, shared_ptr pF_cur, 162 | map, pair> &association, int loss_threshold); 163 | 164 | void IncreaseMapPoints(shared_ptr pF); 165 | 166 | struct Point2D 167 | { 168 | double x, y; 169 | }; 170 | 171 | struct PointCloud 172 | { 173 | std::vector pts; 174 | 175 | // Must return the number of data points 176 | inline size_t kdtree_get_point_count() const { return pts.size(); } 177 | 178 | // Returns the dim'th component of the idx'th point in the class: 179 | inline double kdtree_get_pt(const size_t idx, int dim) const 180 | { 181 | if (dim == 0) { return pts[idx].x; } 182 | else { return pts[idx].y; } 183 | } 184 | 185 | // Optional bounding-box computation: return false to default to a standard bbox computation loop. 186 | template 187 | bool kdtree_get_bbox(BBOX &) const { return false; } 188 | }; 189 | 190 | typedef KDTreeSingleIndexAdaptor, PointCloud, 2> KDTree; 191 | 192 | 193 | #endif //SRC_TRACK_H 194 | -------------------------------------------------------------------------------- /src/direct_semidense.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | 24 | using namespace std; 25 | using namespace g2o; 26 | 27 | 28 | struct Measurement 29 | { 30 | Measurement(Eigen::Vector3d p, double g) : pos_world(p), grayscale(g) {} 31 | 32 | Eigen::Vector3d pos_world; 33 | double grayscale; 34 | }; 35 | 36 | 37 | inline Eigen::Vector3d 38 | sonar2Dto3D(const Eigen::Vector2d &p_pixel, double theta, double tx, double ty, double scale) 39 | { 40 | Eigen::Affine2d T_pixel_sonar = Eigen::Affine2d::Identity(); 41 | Eigen::Vector2d t(tx, ty); 42 | Eigen::Rotation2Dd R(theta); 43 | T_pixel_sonar.pretranslate(t).rotate(R).scale(scale); 44 | Eigen::Affine2d T_sonar_pixel = T_pixel_sonar.inverse(); 45 | Eigen::Vector2d p_sonar2D = T_sonar_pixel * p_pixel; 46 | Eigen::Vector3d p_sonar3D(p_sonar2D.x(), p_sonar2D.y(), 0); 47 | return p_sonar3D; 48 | } 49 | 50 | inline Eigen::Vector2d 51 | sonar3Dto2D(const Eigen::Vector3d &p_sonar3D, double theta, double tx, double ty, double scale) 52 | { 53 | Eigen::Affine2d T_pixel_sonar = Eigen::Affine2d::Identity(); 54 | Eigen::Vector2d t(tx, ty); 55 | Eigen::Rotation2Dd R(theta); 56 | T_pixel_sonar.pretranslate(t).rotate(R).scale(scale); 57 | Eigen::Vector2d p_sonar2D(p_sonar3D.x(), p_sonar3D.y()); 58 | Eigen::Vector2d p_pixel = T_pixel_sonar * p_sonar2D; 59 | 60 | return p_pixel; 61 | } 62 | 63 | 64 | bool 65 | poseEstimationDirect(const vector &measurements, cv::Mat* gray, Eigen::Matrix3f &intrinsics, 66 | Eigen::Isometry3d &Tcw); 67 | 68 | bool 69 | poseEstimationSonarDirect(const vector &meas, cv::Mat gray, Eigen::Isometry3d &Tcw, 70 | double theta, double tx, double ty, double scale, cv::Mat& pre_img, cv::Mat& img); 71 | 72 | 73 | class EdgeSE3SonarDirect : public BaseUnaryEdge<1, double, VertexSE3Expmap> 74 | { 75 | public: 76 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 77 | 78 | EdgeSE3SonarDirect() {} 79 | 80 | EdgeSE3SonarDirect(const Eigen::Vector3d &point, double theta, double tx, double ty, double scale, 81 | cv::Mat image) : x_world_(point), theta_(theta), tx_(tx), ty_(ty), scale_(scale), 82 | image_(image.clone()) {} 83 | 84 | virtual void computeError() 85 | { 86 | const VertexSE3Expmap* v = static_cast ( _vertices[0] ); 87 | Eigen::Vector3d x_local = v->estimate().map(x_world_); 88 | Eigen::Vector2d x_pixel = sonar3Dto2D(x_local, theta_, tx_, ty_, scale_); 89 | 90 | double x = x_pixel.x(); 91 | double y = x_pixel.y(); 92 | // check x,y is in the image 93 | if (x - 4 < 0 || (x + 4) > image_.cols || (y - 4) < 0 || (y + 4) > image_.rows) { 94 | _error(0, 0) = 0.0; 95 | this->setLevel(1); 96 | } 97 | else { 98 | // _measurement is the pixel intensity on the reference frame 99 | // getPixelValue(x, y) is the pixel intensity on the current frame, it will update 100 | // with current sonar pose 101 | _error(0, 0) = getPixelValue(x, y) - _measurement; 102 | } 103 | } 104 | 105 | // plus in manifold 106 | virtual void linearizeOplus() 107 | { 108 | if (level() == 1) { 109 | _jacobianOplusXi = Eigen::Matrix::Zero(); 110 | return; 111 | } 112 | VertexSE3Expmap* vtx = static_cast ( _vertices[0] ); 113 | Eigen::Isometry3d T_si_w = vtx->estimate(); 114 | Eigen::Vector3d xyz_trans = vtx->estimate().map(x_world_); // q in book 115 | 116 | Eigen::Vector2d p_pixel = sonar3Dto2D(xyz_trans, theta_, tx_, ty_, scale_); 117 | double u = p_pixel.x(); 118 | double v = p_pixel.y(); 119 | 120 | // jacobian from se3 to u,v 121 | // NOTE that in g2o the Lie algebra is (\omega, \epsilon), where \omega is so(3) and \epsilon the translation 122 | 123 | // so(3) jacobian 124 | Eigen::Matrix jacobian_uv_pi; 125 | Eigen::Rotation2Dd R_pixel_sonar(theta_); 126 | jacobian_uv_pi = scale_ * R_pixel_sonar.matrix(); 127 | Eigen::Matrix jacobian_pi_tlocal; 128 | jacobian_pi_tlocal << 1, 0, 0, 0, 1, 0; 129 | Eigen::Matrix jacobian_tlocal_R_si_w; 130 | jacobian_tlocal_R_si_w = -skew_symetric_matrix(T_si_w.rotation() * x_world_); 131 | Eigen::Matrix jacobian_uv_R_si_w = 132 | jacobian_uv_pi * jacobian_pi_tlocal * jacobian_tlocal_R_si_w; 133 | 134 | Eigen::Matrix jacobian_tlocal_t_si_si_w = Eigen::Matrix::Identity(); 135 | Eigen::Matrix jacobian_uv_t_si_si_w = 136 | jacobian_uv_pi * jacobian_pi_tlocal * jacobian_tlocal_t_si_si_w; 137 | 138 | Eigen::Matrix jacobian_uv_ksai = Eigen::Matrix::Zero(); 139 | jacobian_uv_ksai.block<2, 3>(0, 0) = jacobian_uv_R_si_w; 140 | jacobian_uv_ksai.block<2, 3>(0, 3) = jacobian_uv_t_si_si_w; 141 | 142 | 143 | Eigen::Matrix jacobian_pixel_uv; 144 | 145 | jacobian_pixel_uv(0, 0) = (getPixelValue(u + 1, v) - getPixelValue(u - 1, v)) / 2; 146 | jacobian_pixel_uv(0, 1) = (getPixelValue(u, v + 1) - getPixelValue(u, v - 1)) / 2; 147 | 148 | Eigen::Matrix jacobian_intensity_xi = jacobian_pixel_uv * jacobian_uv_ksai; 149 | _jacobianOplusXi = jacobian_pixel_uv * jacobian_uv_ksai; 150 | } 151 | 152 | // dummy read and write functions because we don't care... 153 | virtual bool read(std::istream &in) {} 154 | 155 | virtual bool write(std::ostream &out) const {} 156 | 157 | protected: 158 | // get a gray scale value from reference image (bilinear interpolated) 159 | inline double getPixelValue(double x, double y) 160 | { 161 | uchar* data = &image_.data[int(y) * image_.step + int(x)]; 162 | double xx = x - floor(x); 163 | double yy = y - floor(y); 164 | return double((1 - xx) * (1 - yy) * data[0] + xx * (1 - yy) * data[1] + 165 | (1 - xx) * yy * data[image_.step] + xx * yy * data[image_.step + 1]); 166 | } 167 | 168 | inline Eigen::Matrix3d skew_symetric_matrix(const Eigen::Vector3d &v) 169 | { 170 | Eigen::Matrix3d S; // Skew-symmetric matrix 171 | S << 0, -v(2), v(1), v(2), 0, -v(0), -v(1), v(0), 0; 172 | return S; 173 | } 174 | 175 | public: 176 | Eigen::Vector3d x_world_; // 3D point in world frame 177 | double theta_, tx_, ty_, scale_; // Camera intrinsics 178 | cv::Mat image_; // reference image 179 | }; 180 | 181 | std::string type2str(int type) 182 | { 183 | std::string r; 184 | 185 | uchar depth = type & CV_MAT_DEPTH_MASK; 186 | uchar chans = 1 + (type >> CV_CN_SHIFT); 187 | 188 | switch (depth) { 189 | case CV_8U: 190 | r = "8U"; 191 | break; 192 | case CV_8S: 193 | r = "8S"; 194 | break; 195 | case CV_16U: 196 | r = "16U"; 197 | break; 198 | case CV_16S: 199 | r = "16S"; 200 | break; 201 | case CV_32S: 202 | r = "32S"; 203 | break; 204 | case CV_32F: 205 | r = "32F"; 206 | break; 207 | case CV_64F: 208 | r = "64F"; 209 | break; 210 | default: 211 | r = "User"; 212 | break; 213 | } 214 | 215 | r += "C"; 216 | r += (chans + '0'); 217 | 218 | return r; 219 | } 220 | 221 | 222 | int main(int argc, char** argv) 223 | { 224 | srand((unsigned int) time(0)); 225 | string path_to_dataset = "/home/da/project/ros/direct_sonar_ws/src/direct_sonar_odometry/sonar_data"; 226 | 227 | Eigen::Isometry3d Tcw = Eigen::Isometry3d::Identity(); 228 | Eigen::AngleAxisd angle(0.01*M_PI,Eigen::Vector3d::UnitZ()); 229 | // Tcw.rotate(angle.matrix()); 230 | double range = 10; 231 | 232 | cv::Mat prev_color; 233 | 234 | vector measurements; 235 | for (int index = 1; index <= 5; index++) { 236 | string img_path = path_to_dataset + "/" + std::to_string(index) + ".png"; 237 | //open image 238 | cv::Mat img = cv::imread(img_path); 239 | if (img.data == nullptr) { 240 | cerr << "image " << img_path << " is empty!" << endl; 241 | continue; 242 | } 243 | double scale = (img.rows) / range; 244 | double theta = -0.5 * M_PI; 245 | double tx = 0.5 * img.cols; 246 | double ty = img.rows; 247 | // output img opencv type 248 | cout << "image type: " << type2str(img.type()) << endl; 249 | // convert to uchar 250 | cv::Mat gray; 251 | cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY); 252 | cout << "after conversion image type: " << type2str(gray.type()) << endl; 253 | 254 | 255 | if (index == 1) { 256 | // select the pixels with high gradiants 257 | for (int x = 10; x < gray.cols - 10; x++) 258 | for (int y = 10; y < gray.rows - 10; y++) { 259 | Eigen::Vector2d delta(gray.ptr(y)[x + 1] - gray.ptr(y)[x - 1], 260 | gray.ptr(y + 1)[x] - gray.ptr(y - 1)[x]); 261 | if (delta.norm() < 100) { 262 | continue; 263 | } 264 | Eigen::Vector2d p_pixel(x, y); 265 | Eigen::Vector3d p3d = sonar2Dto3D(p_pixel, theta, tx, ty, scale); 266 | Eigen::Vector2d p2d_test = sonar3Dto2D(p3d, theta, tx, ty, scale); 267 | double grayscale = double(gray.ptr(y)[x]); 268 | measurements.push_back(Measurement(p3d, grayscale)); 269 | } 270 | prev_color = img.clone(); 271 | cout << "add total " << measurements.size() << " measurements." << endl; 272 | continue; 273 | } 274 | 275 | 276 | cv::Mat img_show(img.rows * 2, img.cols, CV_8UC3); 277 | prev_color.copyTo(img_show(cv::Rect(0, 0, img.cols, img.rows))); 278 | img.copyTo(img_show(cv::Rect(0, img.rows, img.cols, img.rows))); 279 | cv::imshow("original", img_show); 280 | cv::waitKey(1); 281 | 282 | chrono::steady_clock::time_point t1 = chrono::steady_clock::now(); 283 | poseEstimationSonarDirect(measurements, gray, Tcw, theta, tx, ty, scale,prev_color,img); 284 | chrono::steady_clock::time_point t2 = chrono::steady_clock::now(); 285 | chrono::duration time_used = chrono::duration_cast>(t2 - t1); 286 | cout << "direct method costs time: " << time_used.count() << " seconds." << endl; 287 | cout << "Tcw=" << Tcw.matrix() << endl; 288 | 289 | 290 | } 291 | return 0; 292 | } 293 | 294 | bool 295 | poseEstimationSonarDirect(const vector &measurements, cv::Mat gray, Eigen::Isometry3d &Tcw, 296 | double theta, double tx, double ty, double scale, cv::Mat& pre_img, cv::Mat& img) 297 | { 298 | // setup g2o 299 | typedef g2o::BlockSolver> DirectBlock; 300 | DirectBlock::LinearSolverType* linearSolver = new g2o::LinearSolverDense(); 301 | DirectBlock* solver_ptr = new DirectBlock(linearSolver); 302 | // g2o::OptimizationAlgorithmGaussNewton* solver = new g2o::OptimizationAlgorithmGaussNewton( solver_ptr ); // G-N 303 | g2o::OptimizationAlgorithmLevenberg* solver = new g2o::OptimizationAlgorithmLevenberg( 304 | solver_ptr); // L-M 305 | g2o::SparseOptimizer optimizer; 306 | optimizer.setAlgorithm(solver); 307 | optimizer.setVerbose(true); 308 | 309 | g2o::VertexSE3Expmap* pose = new g2o::VertexSE3Expmap(); 310 | pose->setEstimate(g2o::SE3Quat(Tcw.rotation(), Tcw.translation())); 311 | pose->setId(0); 312 | optimizer.addVertex(pose); 313 | 314 | // add edge 315 | int id = 1; 316 | for (Measurement m: measurements) { 317 | EdgeSE3SonarDirect* edge = new EdgeSE3SonarDirect(m.pos_world, theta, tx, ty, scale, gray); 318 | edge->setVertex(0, pose); 319 | edge->setMeasurement(m.grayscale); 320 | edge->setInformation(Eigen::Matrix::Identity()); 321 | edge->setId(id++); 322 | //set robust kernel 323 | g2o::RobustKernelHuber *rk = new g2o::RobustKernelHuber; 324 | rk->setDelta(10.0); 325 | edge->setRobustKernel(rk); 326 | optimizer.addEdge(edge); 327 | } 328 | cout << "edges in graph: " << optimizer.edges().size() << endl; 329 | 330 | for(int i=0;i<1;i++){ 331 | optimizer.initializeOptimization(); 332 | optimizer.optimize(100); 333 | Eigen::Isometry3d T = pose->estimate(); 334 | Tcw = T; 335 | 336 | cout<<"Tcw=\n"<= img.cols || pixel_now(1, 0) < 0 || 348 | pixel_now(1, 0) >= img.rows) { 349 | continue; 350 | } 351 | 352 | float b = 0; 353 | float g = 250; 354 | float r = 0; 355 | img_show.ptr(pixel_prev(1, 0))[int(pixel_prev(0, 0)) * 3] = b; 356 | img_show.ptr(pixel_prev(1, 0))[int(pixel_prev(0, 0)) * 3 + 1] = g; 357 | img_show.ptr(pixel_prev(1, 0))[int(pixel_prev(0, 0)) * 3 + 2] = r; 358 | 359 | img_show.ptr(pixel_now(1, 0) + img.rows)[int(pixel_now(0, 0)) * 3] = b; 360 | img_show.ptr(pixel_now(1, 0) + img.rows)[int(pixel_now(0, 0)) * 3 + 1] = g; 361 | img_show.ptr(pixel_now(1, 0) + img.rows)[int(pixel_now(0, 0)) * 3 + 2] = r; 362 | cv::circle(img_show, cv::Point2d(pixel_prev(0, 0), pixel_prev(1, 0)), 2, cv::Scalar(b, g, r), 363 | 1); 364 | cv::circle(img_show, cv::Point2d(pixel_now(0, 0), pixel_now(1, 0) + img.rows), 2, 365 | cv::Scalar(b, g, r), 1); 366 | } 367 | cv::imshow("result", img_show); 368 | cv::waitKey(0); 369 | cv::imwrite("/home/da/project/ros/direct_sonar_ws/src/direct_sonar_odometry/results/result.png", img_show); 370 | } 371 | return true; 372 | } -------------------------------------------------------------------------------- /src/main.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 03/08/23. 3 | // 4 | #include "System.h" 5 | #include 6 | using namespace std; 7 | int main(int argc, char **argv) 8 | { 9 | ros::init(argc, argv, "direct_sonar_odometry"); 10 | string setting = argv[1]; 11 | System sys(setting); 12 | sys.runRos(); 13 | 14 | return 0; 15 | } 16 | -------------------------------------------------------------------------------- /src/pyramid_test.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include "nanoflann.hpp" 6 | 7 | using namespace std; 8 | using namespace nanoflann; 9 | 10 | // This is an exampleof a custom dataset class 11 | struct Point2D 12 | { 13 | double x, y; 14 | }; 15 | 16 | struct PointCloud 17 | { 18 | std::vector pts; 19 | 20 | // Must return the number of data points 21 | inline size_t kdtree_get_point_count() const { return pts.size(); } 22 | 23 | // Returns the dim'th component of the idx'th point in the class: 24 | inline double kdtree_get_pt(const size_t idx, int dim) const 25 | { 26 | if (dim == 0) { return pts[idx].x; } 27 | else { return pts[idx].y; } 28 | } 29 | 30 | // Optional bounding-box computation: return false to default to a standard bbox computation loop. 31 | template 32 | bool kdtree_get_bbox(BBOX &) const { return false; } 33 | }; 34 | 35 | typedef KDTreeSingleIndexAdaptor, PointCloud, 2> KDTree; 36 | 37 | int main() 38 | { 39 | PointCloud cloud; 40 | 41 | // Fill the cloud with random points 42 | for (int i = 0; i < 1000; i++) { 43 | Point2D point; 44 | point.x = rand() % 1000; 45 | point.y = rand() % 1000; 46 | cloud.pts.push_back(point); 47 | } 48 | 49 | auto start = std::chrono::high_resolution_clock::now(); 50 | // 51 | KDTree index(2 /*dim*/, cloud, KDTreeSingleIndexAdaptorParams(10 /* max leaf */)); 52 | index.buildIndex(); 53 | auto end = std::chrono::high_resolution_clock::now(); 54 | double time_taken = std::chrono::duration_cast(end - start).count(); 55 | time_taken *= 1e-9; 56 | cout << "build KD tree takes " << fixed << time_taken << setprecision(9) << " sec" << endl; 57 | 58 | const size_t num_results = 1; 59 | size_t ret_index; 60 | double out_dist_sqr; 61 | nanoflann::KNNResultSet resultSet(num_results); 62 | resultSet.init(&ret_index, &out_dist_sqr); 63 | 64 | for (int i = 0; i < 1000000; i++) { 65 | Point2D point; 66 | point.x = rand() % 1000; 67 | point.y = rand() % 1000; 68 | 69 | double query_pt[2] = { point.x, point.y }; 70 | index.findNeighbors(resultSet, &query_pt[0], nanoflann::SearchParameters(10)); 71 | 72 | 73 | } 74 | 75 | 76 | 77 | end = std::chrono::high_resolution_clock::now(); 78 | time_taken = std::chrono::duration_cast(end - start).count(); 79 | time_taken *= 1e-9; 80 | cout << "knn search takes " << fixed << time_taken << setprecision(9) << " sec" << endl; 81 | 82 | std::cout << "knnSearch(nn="< 6 | using namespace std; 7 | int main(int argc, char **argv) 8 | { 9 | ros::init(argc, argv, "direct_sonar_odometry"); 10 | string setting = argv[1]; 11 | System sys(setting); 12 | sys.runRos(); 13 | 14 | return 0; 15 | } 16 | -------------------------------------------------------------------------------- /src/traj_se3_align.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by da on 15/08/23. 3 | // 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include "opencv2/imgproc.hpp" 20 | #include "opencv2/imgcodecs.hpp" 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | 32 | #include 33 | 34 | #include 35 | #include 36 | #include 37 | 38 | using namespace std; 39 | using namespace g2o; 40 | 41 | class EdgeSE3Align : public BaseMultiEdge<6, SE3Quat> 42 | { 43 | public: 44 | EdgeSE3Align() 45 | { 46 | resize(3); 47 | } 48 | 49 | void computeError() override 50 | { 51 | const VertexSE3Expmap* v1 = static_cast(_vertices[0]); 52 | const VertexSE3Expmap* v2 = static_cast(_vertices[1]); 53 | const VertexSE3Expmap* v3 = static_cast(_vertices[2]); 54 | 55 | Eigen::Isometry3d T_s0_si = v1->estimate(); 56 | Eigen::Isometry3d T_e0_ei = v2->estimate(); 57 | Eigen::Isometry3d T_e_s = v3->estimate(); 58 | 59 | Eigen::Isometry3d T_err = T_e_s * T_s0_si * T_e_s.inverse() * T_e0_ei.inverse(); 60 | Eigen::Quaterniond q_err(T_err.rotation()); 61 | Sophus::SO3d SO3_err(q_err); 62 | Eigen::Vector3d so3_err = SO3_err.log(); 63 | // so3_err.setZero(); 64 | Eigen::Vector3d t_err = T_err.translation(); 65 | // t_err.setZero(); 66 | 67 | _error << so3_err, t_err; 68 | 69 | } 70 | 71 | bool read(istream &is) override 72 | { 73 | return false; 74 | } 75 | 76 | bool write(ostream &os) const override 77 | { 78 | return false; 79 | } 80 | }; 81 | 82 | int main(int argc, char** argv) 83 | { 84 | if (argc != 3) { 85 | cout << "Usage: traj_se3_align stamped_traj_estimate.txt stamped_groundtruth.txt" << endl; 86 | return 1; 87 | } 88 | 89 | ifstream fin1(argv[1]); 90 | ifstream fin2(argv[2]); 91 | if (!fin1 || !fin2) { 92 | cout << "trajectory file does not exist!" << endl; 93 | return 1; 94 | } 95 | 96 | ros::init(argc, argv, "traj_se3_align"); 97 | ros::NodeHandle nh; 98 | 99 | map poses_sonar; 100 | map time_v1id; 101 | map poses_odom; 102 | 103 | std::string line; 104 | while (std::getline(fin1, line)) { 105 | if (line[0] == '#') { 106 | continue; 107 | } 108 | std::istringstream iss(line); 109 | double timestamp, tx, ty, tz, qx, qy, qz, qw; 110 | if (iss >> timestamp >> tx >> ty >> tz >> qx >> qy >> qz >> qw) { 111 | Eigen::Quaterniond q1(qw, qx, qy, qz); 112 | Eigen::Isometry3d T_s0_si= Eigen::Isometry3d::Identity(); 113 | T_s0_si.rotate(q1.toRotationMatrix()); 114 | T_s0_si.pretranslate(Eigen::Vector3d(tx, ty, tz)); 115 | poses_sonar.insert(make_pair(timestamp, T_s0_si)); 116 | } 117 | } 118 | fin1.close(); 119 | 120 | while (std::getline(fin2, line)) { 121 | if (line[0] == '#') { 122 | continue; 123 | } 124 | std::istringstream iss(line); 125 | double timestamp, tx, ty, tz, qx, qy, qz, qw; 126 | if (iss >> timestamp >> tx >> ty >> tz >> qx >> qy >> qz >> qw) { 127 | Eigen::Quaterniond q1(qw, qx, qy, qz); 128 | Eigen::Isometry3d T_e0_ei = Eigen::Isometry3d::Identity(); 129 | T_e0_ei.rotate(q1.toRotationMatrix()); 130 | T_e0_ei.pretranslate(Eigen::Vector3d(tx, ty, tz)); 131 | poses_odom.insert(make_pair(timestamp, T_e0_ei)); 132 | } 133 | } 134 | fin2.close(); 135 | 136 | g2o::SparseOptimizer optimizer; 137 | g2o::BlockSolver_6_3::LinearSolverType* linearSolver; 138 | 139 | linearSolver = new g2o::LinearSolverDense(); 140 | 141 | g2o::BlockSolver_6_3* solver_ptr = new g2o::BlockSolver_6_3(linearSolver); 142 | g2o::OptimizationAlgorithmLevenberg* solver = new g2o::OptimizationAlgorithmLevenberg(solver_ptr); 143 | optimizer.setAlgorithm(solver); 144 | optimizer.setVerbose(true); 145 | 146 | int id = 0; 147 | for (auto it: poses_sonar) { 148 | double time = it.first; 149 | Eigen::Isometry3d T_s0_si = it.second; 150 | if (poses_odom.count(time) == 0) { 151 | cout << fixed << setprecision(9) << "no corresponding pose in odom: " << time << endl; 152 | continue; 153 | } 154 | g2o::SE3Quat est(T_s0_si.rotation(), T_s0_si.translation()); 155 | VertexSE3Expmap* vSE3 = new VertexSE3Expmap(); 156 | vSE3->setId(id); 157 | vSE3->setEstimate(est); 158 | vSE3->setFixed(true); 159 | optimizer.addVertex(vSE3); 160 | time_v1id.insert(make_pair(time, id)); 161 | id++; 162 | } 163 | 164 | //set extrinsic 165 | Eigen::Isometry3d T_e_s = Eigen::Isometry3d::Identity(); 166 | // Eigen::AngleAxisd a_x(M_PI, Eigen::Vector3d::UnitX()); 167 | // T_e_s.rotate(a_x.toRotationMatrix()); 168 | g2o::SE3Quat est_ex(T_e_s.rotation(), T_e_s.translation()); 169 | VertexSE3Expmap* vSE3_ex = new VertexSE3Expmap(); 170 | vSE3_ex->setId(id); 171 | vSE3_ex->setEstimate(est_ex); 172 | vSE3_ex->setFixed(false); 173 | optimizer.addVertex(vSE3_ex); 174 | id++; 175 | 176 | int pose_num = id; 177 | for (auto it: poses_sonar) { 178 | double time = it.first; 179 | if (poses_odom.count(time) == 0) { 180 | // cout << fixed << setprecision(9) << "no corresponding pose in odom: " << time << endl; 181 | continue; 182 | } 183 | Eigen::Isometry3d T_e0_ei = poses_odom[time]; 184 | g2o::SE3Quat est(T_e0_ei.rotation(), T_e0_ei.translation()); 185 | VertexSE3Expmap* vSE3 = new VertexSE3Expmap(); 186 | vSE3->setId(id); 187 | vSE3->setEstimate(est); 188 | vSE3->setFixed(true); 189 | optimizer.addVertex(vSE3); 190 | id++; 191 | 192 | int v1id = time_v1id[time]; 193 | EdgeSE3Align* edge = new EdgeSE3Align(); 194 | // edge->setId(0); 195 | edge->setVertex(0, dynamic_cast(optimizer.vertex(v1id))); 196 | edge->setVertex(1, vSE3); 197 | edge->setVertex(2, vSE3_ex); 198 | edge->setInformation(Eigen::Matrix::Identity()); 199 | optimizer.addEdge(edge); 200 | 201 | cout << fixed << setprecision(9) << "add pose: " << time << endl; 202 | 203 | } 204 | 205 | 206 | cout << "before opt T_e_s: \n" << T_e_s.matrix() << endl; 207 | optimizer.initializeOptimization(); 208 | optimizer.setVerbose(true); 209 | optimizer.optimize(100); 210 | Eigen::Isometry3d T_e_s_opt = vSE3_ex->estimate(); 211 | cout << "after opt T_e_s: \n" << T_e_s_opt.matrix() << endl; 212 | 213 | T_e_s.setIdentity(); 214 | T_e_s.rotate(T_e_s_opt.rotation()); 215 | T_e_s = T_e_s_opt; 216 | 217 | 218 | ofstream outfile("/home/da/project/ros/direct_sonar_ws/src/direct_sonar_odometry/evaluation/stamped_traj_estimate_opt.txt", ios::out); 219 | outfile << "#timestamp x y z qx qy qz qw\n"; 220 | for (auto it: poses_sonar) { 221 | double time = it.first; 222 | Eigen::Isometry3d T_s0_si = it.second; 223 | Eigen::Isometry3d T_e0_ei = T_e_s * T_s0_si * T_e_s.inverse(); 224 | Eigen::Vector3d t = T_e0_ei.translation(); 225 | Eigen::Quaterniond q(T_e0_ei.rotation()); 226 | outfile << fixed << setprecision(12) << time << " " << t.x() << " " << t.y() << " " << t.z() 227 | << " " << q.x() << " " << q.y() << " " << q.z() << " " << q.w() << endl; 228 | } 229 | 230 | //publish pose using MarkerArray 231 | visualization_msgs::MarkerArray markers_sonar_pose; 232 | id = 0; 233 | for(auto it:poses_sonar){ 234 | Eigen::Isometry3d T_s0_si = it.second; 235 | Eigen::Quaterniond q_s0_si(T_s0_si.rotation()); 236 | visualization_msgs::Marker sonar_pose_marker; 237 | sonar_pose_marker.header.frame_id = "map"; 238 | sonar_pose_marker.header.stamp = ros::Time::now(); 239 | sonar_pose_marker.ns = "sonar_pose"; 240 | sonar_pose_marker.id = id; 241 | sonar_pose_marker.type = visualization_msgs::Marker::ARROW; 242 | sonar_pose_marker.action = visualization_msgs::Marker::ADD; 243 | sonar_pose_marker.pose.position.x = T_s0_si.translation().x(); 244 | sonar_pose_marker.pose.position.y = T_s0_si.translation().y(); 245 | sonar_pose_marker.pose.position.z = T_s0_si.translation().z(); 246 | sonar_pose_marker.pose.orientation.x = q_s0_si.x(); 247 | sonar_pose_marker.pose.orientation.y = q_s0_si.y(); 248 | sonar_pose_marker.pose.orientation.z = q_s0_si.z(); 249 | sonar_pose_marker.pose.orientation.w = q_s0_si.w(); 250 | sonar_pose_marker.scale.x = 0.5; 251 | sonar_pose_marker.scale.y = 0.1; 252 | sonar_pose_marker.scale.z = 0.1; 253 | sonar_pose_marker.color.a = 0.5; 254 | sonar_pose_marker.color.r = 0.0; 255 | sonar_pose_marker.color.g = 0.0; 256 | sonar_pose_marker.color.b = 1.0; 257 | markers_sonar_pose.markers.push_back(sonar_pose_marker); 258 | id++; 259 | 260 | } 261 | 262 | visualization_msgs::MarkerArray markers_odom_pose; 263 | id = 0; 264 | for(auto it:poses_odom){ 265 | double time = it.first; 266 | if(poses_sonar.count(time) == 0){ 267 | continue; 268 | } 269 | Eigen::Isometry3d T_e0_ei = it.second; 270 | Eigen::Quaterniond q_e0_ei(T_e0_ei.rotation()); 271 | visualization_msgs::Marker sonar_pose_marker; 272 | sonar_pose_marker.header.frame_id = "map"; 273 | sonar_pose_marker.header.stamp = ros::Time::now(); 274 | sonar_pose_marker.ns = "odom_pose"; 275 | sonar_pose_marker.id = id; 276 | sonar_pose_marker.type = visualization_msgs::Marker::ARROW; 277 | sonar_pose_marker.action = visualization_msgs::Marker::ADD; 278 | sonar_pose_marker.pose.position.x = T_e0_ei.translation().x(); 279 | sonar_pose_marker.pose.position.y = T_e0_ei.translation().y(); 280 | sonar_pose_marker.pose.position.z = T_e0_ei.translation().z(); 281 | sonar_pose_marker.pose.orientation.x = q_e0_ei.x(); 282 | sonar_pose_marker.pose.orientation.y = q_e0_ei.y(); 283 | sonar_pose_marker.pose.orientation.z = q_e0_ei.z(); 284 | sonar_pose_marker.pose.orientation.w = q_e0_ei.w(); 285 | sonar_pose_marker.scale.x = 0.5; 286 | sonar_pose_marker.scale.y = 0.1; 287 | sonar_pose_marker.scale.z = 0.1; 288 | sonar_pose_marker.color.a = 0.5; 289 | sonar_pose_marker.color.r = 1.0; 290 | sonar_pose_marker.color.g = 0.0; 291 | sonar_pose_marker.color.b = 0.0; 292 | markers_odom_pose.markers.push_back(sonar_pose_marker); 293 | id++; 294 | 295 | } 296 | 297 | visualization_msgs::MarkerArray markers_sonar_pose_opt; 298 | id = 0; 299 | for(auto it:poses_sonar){ 300 | Eigen::Isometry3d T_s0_si = it.second; 301 | Eigen::Isometry3d T_e0_ei = T_e_s * T_s0_si * T_e_s.inverse(); 302 | Eigen::Quaterniond q_e0_ei(T_e0_ei.rotation()); 303 | visualization_msgs::Marker sonar_pose_marker; 304 | sonar_pose_marker.header.frame_id = "map"; 305 | sonar_pose_marker.header.stamp = ros::Time::now(); 306 | sonar_pose_marker.ns = "sonar_pose"; 307 | sonar_pose_marker.id = id; 308 | sonar_pose_marker.type = visualization_msgs::Marker::ARROW; 309 | sonar_pose_marker.action = visualization_msgs::Marker::ADD; 310 | sonar_pose_marker.pose.position.x = T_e0_ei.translation().x(); 311 | sonar_pose_marker.pose.position.y = T_e0_ei.translation().y(); 312 | sonar_pose_marker.pose.position.z = T_e0_ei.translation().z(); 313 | sonar_pose_marker.pose.orientation.x = q_e0_ei.x(); 314 | sonar_pose_marker.pose.orientation.y = q_e0_ei.y(); 315 | sonar_pose_marker.pose.orientation.z = q_e0_ei.z(); 316 | sonar_pose_marker.pose.orientation.w = q_e0_ei.w(); 317 | sonar_pose_marker.scale.x = 0.5; 318 | sonar_pose_marker.scale.y = 0.1; 319 | sonar_pose_marker.scale.z = 0.1; 320 | sonar_pose_marker.color.a = 0.5; 321 | sonar_pose_marker.color.r = 0.0; 322 | sonar_pose_marker.color.g = 1.0; 323 | sonar_pose_marker.color.b = 0.0; 324 | markers_sonar_pose_opt.markers.push_back(sonar_pose_marker); 325 | id++; 326 | 327 | } 328 | 329 | ros::Publisher pub_sonar_pose = nh.advertise("sonar_pose_marker", 100); 330 | ros::Publisher pub_odom_pose = nh.advertise("odom_pose_marker", 100); 331 | ros::Publisher pub_odom_pose2 = nh.advertise("sonar_pose_opt_marker", 100); 332 | ros::Rate loop_rate(1); 333 | while(ros::ok()){ 334 | pub_sonar_pose.publish(markers_sonar_pose); 335 | pub_odom_pose.publish(markers_odom_pose); 336 | pub_odom_pose2.publish(markers_sonar_pose_opt); 337 | ros::spinOnce(); 338 | loop_rate.sleep(); 339 | } 340 | 341 | 342 | 343 | 344 | 345 | return 0; 346 | } 347 | --------------------------------------------------------------------------------