├── .gitignore ├── README.md └── svo_ros ├── CMakeLists.txt ├── calib ├── 25000826_fisheye_mask.png ├── bluefox_25000742_pinhole.yaml ├── bluefox_25000826_equi.yaml ├── bluefox_25000826_fisheye.yaml ├── euroc_mono_calib.yaml ├── euroc_stereo_calib.yaml ├── fla_stereo_imu.yaml └── svo_test_pinhole.yaml ├── doc ├── advanced.md ├── camera_calibration.md ├── euroc.md ├── get_started.md ├── imgs │ ├── stereo_imu.png │ ├── test_rqt.png │ └── test_rviz.png ├── install.md └── run_on_arm.md ├── include └── svo_ros │ ├── svo_factory.h │ ├── svo_interface.h │ ├── svo_node_base.h │ ├── svo_nodelet.h │ └── visualizer.h ├── launch ├── euroc_mono_imu.launch ├── euroc_stereo_imu.launch ├── fla_stereo_imu.launch ├── live_nodelet.launch ├── live_nodelet_fisheye.launch ├── run_from_topic.launch └── run_from_topic_arm.launch ├── package.xml ├── param ├── euroc_mono_imu.yaml ├── euroc_stereo_imu.yaml ├── fast_pinhole.yaml ├── fisheye.yaml ├── fla_stereo_imu.yaml └── pinhole.yaml ├── rqt.perspective ├── rviz_config.rviz ├── scripts ├── kalibr_to_svo.py ├── omni_matlab_to_rpg.py ├── setupros_ip_arm.sh └── setupros_ip_pc.sh ├── src ├── svo_factory.cpp ├── svo_interface.cpp ├── svo_node.cpp ├── svo_node_base.cpp ├── svo_nodelet.cpp └── visualizer.cpp └── svo_nodelet.xml /.gitignore: -------------------------------------------------------------------------------- 1 | *.user 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This repository contains examples to use `SVO 2.0` binaries in various configurations: 2 | * monocular 3 | * stereo 4 | * stereo/monocular + IMU 5 | * pinhole/fisheye/catadioptric cameras are supported 6 | 7 | For detailed instructions, please refer to the documentation under `rpg_svo_example/svo_ros/doc`. 8 | We provide several launch files under `rpg_svo_example/svo_ros/launch` for different datasets, including the [EuRoC](http://projects.asl.ethz.ch/datasets/doku.php?id=kmavvisualinertialdatasets) datasets. 9 | These can be used as a reference for customization according to specific needs. 10 | 11 | To use `SVO 2.0` as a binary form, you need to request the binaries of `SVO 2.0` from [here](http://rpg.ifi.uzh.ch/svo2.html). 12 | The binaries you get already include a copy of this repository, therefore they are self-contained. 13 | 14 | This repository will also be used for the following purposes: 15 | * Keep track of the update of the binaries 16 | * Add new launch files and example code to use the binaries 17 | 18 | If the binaries are updated, you can download the new version from the original links in the email you received. 19 | 20 | **ARM binaries**: You can replace the file name in the download link with `svo_binaries_1404_indigo_armhf.zip` to get the link for the `armhf` binaries. 21 | 22 | **If you have any problem regarding using `SVO 2.0` binaries, 23 | you can either start an issue or contact Zichao Zhang (zzhang AT ifi DOT uzh DOT ch).** 24 | 25 | # Binaries changelog 26 | **22.01.2018** Update 16.04 binaries to OpenCV 3.3.1 27 | 28 | **29.09.2017** Add binaries for `armhf` (14.04 + `indigo`) 29 | 30 | **17.07.2017** Initial Release 31 | -------------------------------------------------------------------------------- /svo_ros/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(svo_ros) 2 | cmake_minimum_required(VERSION 2.8.3) 3 | 4 | find_package(catkin_simple REQUIRED) 5 | catkin_simple() 6 | 7 | ############################################################################# 8 | # Set build flags, set ARM_ARCHITECTURE environment variable on Odroid 9 | SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pthread -Wall -Werror -D_LINUX -D_REENTRANT -march=native -Wno-unused-variable -Wno-unused-but-set-variable -Wno-unknown-pragmas") 10 | 11 | IF(DEFINED ENV{ARM_ARCHITECTURE}) 12 | SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mfpu=neon -march=armv7-a") 13 | ADD_DEFINITIONS(-DHAVE_FAST_NEON) 14 | ELSE() 15 | SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mmmx -msse -msse -msse2 -msse3 -mssse3 -mno-avx") 16 | ENDIF() 17 | IF(CMAKE_COMPILER_IS_GNUCC) 18 | SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") 19 | ELSE() 20 | SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") 21 | ENDIF() 22 | SET(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS} -O3 -fsee -fomit-frame-pointer -fno-signed-zeros -fno-math-errno -funroll-loops -ffast-math -fno-finite-math-only") 23 | 24 | ############################################################################# 25 | # Build library 26 | 27 | set(HEADERS 28 | include/svo_ros/svo_interface.h 29 | include/svo_ros/svo_factory.h 30 | include/svo_ros/visualizer.h 31 | include/svo_ros/svo_nodelet.h 32 | ) 33 | 34 | set(SOURCES 35 | src/svo_interface.cpp 36 | src/svo_factory.cpp 37 | src/svo_node_base.cpp 38 | src/visualizer.cpp 39 | ) 40 | 41 | cs_add_library(${PROJECT_NAME} ${SOURCES} ${HEADERS}) 42 | 43 | ############################################################################# 44 | # Build executables 45 | cs_add_executable(svo_node src/svo_node.cpp) 46 | target_link_libraries(svo_node ${PROJECT_NAME}) 47 | 48 | cs_add_library(svo_nodelet src/svo_nodelet.cpp) 49 | target_link_libraries(svo_nodelet ${PROJECT_NAME}) 50 | 51 | ############################################################################# 52 | # Install 53 | cs_install() 54 | install(FILES svo_nodelet.xml 55 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} 56 | ) 57 | cs_export() 58 | -------------------------------------------------------------------------------- /svo_ros/calib/25000826_fisheye_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uzh-rpg/rpg_svo_example/d465dd9898f213e774e92c87bad7ccc481932321/svo_ros/calib/25000826_fisheye_mask.png -------------------------------------------------------------------------------- /svo_ros/calib/bluefox_25000742_pinhole.yaml: -------------------------------------------------------------------------------- 1 | cameras: 2 | - camera: 3 | distortion: 4 | parameters: 5 | cols: 1 6 | rows: 4 7 | data: [-0.277970, 0.060647, -0.002097, 0.000373] 8 | type: radial-tangential 9 | image_height: 480 10 | image_width: 752 11 | intrinsics: 12 | cols: 1 13 | rows: 4 14 | data: [416.401549, 416.375319, 385.554786, 237.640332] 15 | label: cam0 16 | line-delay-nanoseconds: 0 17 | type: pinhole 18 | T_B_C: 19 | cols: 4 20 | rows: 4 21 | data: [ 1.0, 0.0, 0.0, 0.038, 22 | 0.0, -1.0, 0.0, 0.0, 23 | 0.0, 0.0, -1.0, -0.02, 24 | 0.0, 0.0, 0.0, 1.0] 25 | serial_no: 25000742 26 | calib_date: 2015-03-05 27 | description: 'Bluefox 0742' 28 | label: bluefox-0742 29 | -------------------------------------------------------------------------------- /svo_ros/calib/bluefox_25000826_equi.yaml: -------------------------------------------------------------------------------- 1 | cameras: 2 | - camera: 3 | distortion: 4 | parameters: 5 | cols: 1 6 | rows: 4 7 | data: [-0.042748291579198384, -0.012338895220958084, 0.020575944694821845, 8 | -0.010432642512385784] 9 | type: equidistant 10 | image_height: 480 11 | image_width: 752 12 | intrinsics: 13 | cols: 1 14 | rows: 4 15 | data: [212.20595878579752, 211.93463862680093, 397.43146518249233, 242.44205491777075] 16 | label: cam0 17 | line-delay-nanoseconds: 0 18 | type: pinhole 19 | mask: 25000826_fisheye_mask.png 20 | T_B_C: 21 | cols: 4 22 | rows: 4 23 | data: [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 24 | 1.0] 25 | serial_no: 0 26 | calib_date: 0 27 | description: /camera/image_raw 28 | label: camchain-homezichaoworkspacecalib250008262016-10-02-15-20-30.yaml 29 | -------------------------------------------------------------------------------- /svo_ros/calib/bluefox_25000826_fisheye.yaml: -------------------------------------------------------------------------------- 1 | cameras: 2 | - camera: 3 | distortion: 4 | parameters: 5 | cols: 1 6 | rows: 0 7 | data: [] 8 | type: none 9 | image_height: 480 10 | image_width: 752 11 | intrinsics: 12 | cols: 1 13 | data: [-2.065170e+02, 0.000000e+00, 2.207161e-03, -4.879622e-06, 1.865656e-08, 394.552874, 241.800066, 1.000330, 0.000219, 0.000257, 294.182260, 152.289570, -12.191217, 27.959546, 8.241525, -1.970397, 10.689256, 1.871609, -6.437684, 0.430037, 3.215544, 0.921484, 0.0, 1.0] 14 | rows: 24 15 | label: cam0 16 | line-delay-nanoseconds: 0 17 | type: omni 18 | mask: 25000826_fisheye_mask.png 19 | T_B_C: 20 | cols: 4 21 | rows: 4 22 | data: [ 1., 0., 0., 0.0, 23 | 0., 1., 0., 0.0, 24 | 0., 0., 1., 0.0, 25 | 0., 0., 0., 1.] 26 | label: /fisheye_ocam_model 27 | -------------------------------------------------------------------------------- /svo_ros/calib/euroc_mono_calib.yaml: -------------------------------------------------------------------------------- 1 | label: "Euroc" 2 | id: 412eab8e4058621f7036b5e765dfe812 3 | cameras: 4 | - camera: 5 | label: cam0 6 | id: 54812562fa109c40fe90b29a59dd7798 7 | line-delay-nanoseconds: 0 8 | image_height: 480 9 | image_width: 752 10 | type: pinhole 11 | intrinsics: 12 | cols: 1 13 | rows: 4 14 | data: [458.6548807207614, 457.2966964634893, 367.2158039615726, 248.37534060980727] 15 | distortion: 16 | type: radial-tangential 17 | parameters: 18 | cols: 1 19 | rows: 4 20 | data: [-0.28340811217029355, 0.07395907389290132, 0.00019359502856909603, 21 | 1.7618711454538528e-05] 22 | T_B_C: 23 | cols: 4 24 | rows: 4 25 | data: [0.0148655429818, -0.999880929698, 0.00414029679422, -0.0216401454975, 26 | 0.999557249008, 0.0149672133247, 0.025715529948, -0.064676986768, 27 | -0.0257744366974, 0.00375618835797, 0.999660727178, 0.00981073058949, 28 | 0.0, 0.0, 0.0, 1.0] 29 | imu_params: 30 | delay_imu_cam: 0.0 31 | max_imu_delta_t: 0.01 32 | acc_max: 176.0 33 | omega_max: 7.8 34 | sigma_omega_c: 12.0e-4 35 | sigma_acc_c: 8.0e-3 36 | sigma_omega_bias_c: 2.0e-5 37 | sigma_acc_bias_c: 5.5e-5 38 | sigma_integration: 0.0 39 | g: 9.8082 40 | imu_rate: 800 41 | 42 | imu_initialization: 43 | velocity: [0.0, 0, 0.0] 44 | omega_bias: [0.0, 0, 0.0] 45 | acc_bias: [0.0, 0.0, 0.0] 46 | velocity_sigma: 2.0 47 | omega_bias_sigma: 0.01 48 | acc_bias_sigma: 0.1 49 | -------------------------------------------------------------------------------- /svo_ros/calib/euroc_stereo_calib.yaml: -------------------------------------------------------------------------------- 1 | label: "Euroc" 2 | id: 412eab8e4058621f7036b5e765dfe812 3 | cameras: 4 | - camera: 5 | label: cam0 6 | id: 54812562fa109c40fe90b29a59dd7798 7 | line-delay-nanoseconds: 0 8 | image_height: 480 9 | image_width: 752 10 | type: pinhole 11 | intrinsics: 12 | cols: 1 13 | rows: 4 14 | data: [458.6548807207614, 457.2966964634893, 367.2158039615726, 248.37534060980727] 15 | distortion: 16 | type: radial-tangential 17 | parameters: 18 | cols: 1 19 | rows: 4 20 | data: [-0.28340811217029355, 0.07395907389290132, 0.00019359502856909603, 21 | 1.7618711454538528e-05] 22 | T_B_C: 23 | cols: 4 24 | rows: 4 25 | data: [0.0148655429818, -0.999880929698, 0.00414029679422, -0.0216401454975, 26 | 0.999557249008, 0.0149672133247, 0.025715529948, -0.064676986768, 27 | -0.0257744366974, 0.00375618835797, 0.999660727178, 0.00981073058949, 28 | 0.0, 0.0, 0.0, 1.0] 29 | - camera: 30 | label: cam1 31 | id: 54812562fa109c40fe90b29a59dd7723 32 | line-delay-nanoseconds: 0 33 | image_height: 480 34 | image_width: 752 35 | type: pinhole 36 | 37 | intrinsics: 38 | cols: 1 39 | rows: 4 40 | data: [457.5874266035361, 456.13442556023665, 379.9994465203525, 255.2381853862733] 41 | distortion: 42 | type: radial-tangential 43 | parameters: 44 | cols: 1 45 | rows: 4 46 | data: [-0.283683654496, 0.0745128430929, -0.000104738949098, -3.55590700274e-05] 47 | T_B_C: 48 | cols: 4 49 | rows: 4 50 | data: [0.0125552670891, -0.999755099723, 0.0182237714554, -0.0198435579556, 51 | 0.999598781151, 0.0130119051815, 0.0251588363115, 0.0453689425024, 52 | -0.0253898008918, 0.0179005838253, 0.999517347078, 0.00786212447038, 53 | 0.0, 0.0, 0.0, 1.0] 54 | 55 | imu_params: 56 | delay_imu_cam: 0.0 57 | max_imu_delta_t: 0.01 58 | acc_max: 176.0 59 | omega_max: 7.8 60 | sigma_omega_c: 12.0e-4 61 | sigma_acc_c: 8.0e-3 62 | sigma_omega_bias_c: 2.0e-5 63 | sigma_acc_bias_c: 5.5e-5 64 | sigma_integration: 0.0 65 | g: 9.8082 66 | imu_rate: 800 67 | 68 | imu_initialization: 69 | velocity: [0.0, 0, 0.0] 70 | omega_bias: [0.0, 0, 0.0] 71 | acc_bias: [0.0, 0.0, 0.0] 72 | velocity_sigma: 2.0 73 | omega_bias_sigma: 0.01 74 | acc_bias_sigma: 0.1 75 | -------------------------------------------------------------------------------- /svo_ros/calib/fla_stereo_imu.yaml: -------------------------------------------------------------------------------- 1 | cameras: 2 | - camera: 3 | distortion: 4 | parameters: 5 | cols: 1 6 | rows: 4 7 | data: [-0.24252919348116794, 0.03930108103982388, 0.0008221249960346152, -0.002043418590796441] 8 | type: radial-tangential 9 | image_height: 1024 10 | image_width: 1280 11 | intrinsics: 12 | cols: 1 13 | rows: 4 14 | data: [637.7135749684152, 635.2114915927144, 664.9670062685065, 489.8734610241461] 15 | label: /sync/cam0/image_raw 16 | line-delay-nanoseconds: 0 17 | type: pinhole 18 | T_B_C: 19 | cols: 4 20 | rows: 4 21 | data: [0.99984983, -0.00373198, 0.01692284, -0.074783895, 22 | 0.00403133, 0.9998354 , -0.01768969, -0.0005 , 23 | -0.01685403, 0.01775525, 0.9997003 , -0.0041, 24 | 0. , 0. , 0. , 1. ] 25 | serial_no: 0 26 | calib_date: 0 27 | description: 'left cam' 28 | - camera: 29 | distortion: 30 | parameters: 31 | cols: 1 32 | rows: 4 33 | data: [-0.25170969692935957, 0.04478134226090527, 0.0002848490302781225, 0.002230802727272928] 34 | type: radial-tangential 35 | image_height: 1024 36 | image_width: 1280 37 | intrinsics: 38 | cols: 1 39 | rows: 4 40 | data: [645.9125584269032, 643.8200253341743, 606.2090195940297, 508.4909138768748] 41 | label: /sync/cam1/image_raw 42 | line-delay-nanoseconds: 0 43 | type: pinhole 44 | T_B_C: 45 | cols: 4 46 | rows: 4 47 | data: [0.99746136, -0.00394329, -0.07110054, 0.074783895, 48 | 0.00236645, 0.99974967, -0.02224833, 0.0005, 49 | 0.07117047, 0.02202359, 0.997221 , 0.0041, 50 | 0. , 0. , 0. , 1. ] 51 | serial_no: 0 52 | calib_date: 0 53 | description: 'right cam' 54 | label: fla1_forward_stereo 55 | 56 | imu_params: 57 | delay_imu_cam: 0.0 58 | max_imu_delta_t: 0.01 59 | acc_max: 176.0 60 | omega_max: 7.8 61 | sigma_omega_c: 0.005 62 | sigma_acc_c: 0.01 63 | sigma_omega_bias_c: 4.0e-6 64 | sigma_acc_bias_c: 0.0002 65 | sigma_integration: 0.0 66 | g: 9.81007 67 | imu_rate: 200.0 68 | 69 | imu_initialization: 70 | velocity: [0, 0, 0] 71 | omega_bias: [0, 0, 0] 72 | acc_bias: [0, 0, 0] 73 | velocity_sigma: 0.5 74 | omega_bias_sigma: 0.005 75 | acc_bias_sigma: 0.05 76 | -------------------------------------------------------------------------------- /svo_ros/calib/svo_test_pinhole.yaml: -------------------------------------------------------------------------------- 1 | cameras: 2 | - camera: 3 | distortion: 4 | parameters: 5 | cols: 1 6 | rows: 1 7 | data: [0.9320] 8 | type: fisheye 9 | image_height: 480 10 | image_width: 752 11 | intrinsics: 12 | cols: 1 13 | rows: 4 14 | data: [383.013152, 382.39248, 344.7056, 244.32688] 15 | label: cam 16 | line-delay-nanoseconds: 0 17 | type: pinhole 18 | T_B_C: 19 | cols: 4 20 | rows: 4 21 | data: [1, 0, 0, 0, 22 | 0, -1, 0, 0, 23 | 0, 0, -1, 0, 24 | 0, 0, 0, 0] 25 | label: test_camera 26 | -------------------------------------------------------------------------------- /svo_ros/doc/advanced.md: -------------------------------------------------------------------------------- 1 | In this document, we will see how to use `SVO` with stereo setup and IMU information. We will also see how to adapt the parameters for a different resolution. 2 | 3 | Let's start with an example first. Download the dataset from http://rpg.ifi.uzh.ch/datasets/fla_stereo_imu.bag. This bag file contains synchronized stereo images and IMU measurements. You can run `SVO` on this bag by 4 | 5 | roslaunch svo_ros fla_stereo_imu.launch 6 | rosbag play fla_stereo_imu.bag 7 | 8 | You should be able to observer the output similar to ![stereo_imu](./imgs/stereo_imu.png) 9 | 10 | Now we will go through important things that need to be specified. 11 | 12 | ### The calibration file 13 | #### Stereo 14 | The calibration file for stereo cameras (e.g., `param/fla_stereo_imu.yaml`) would look like this: 15 | 16 | cameras: 17 | - camera: 18 | 19 | T_B_C: 20 | cols: 4 21 | rows: 4 22 | data: [0.99984983, -0.00373198, 0.01692284, -0.074783895, 23 | 0.00403133, 0.9998354 , -0.01768969, -0.0005 , 24 | -0.01685403, 0.01775525, 0.9997003 , -0.0041, 25 | 0. , 0. , 0. , 1. ] 26 | <......> 27 | - camera: 28 | distortion: 29 | 30 | T_B_C: 31 | cols: 4 32 | rows: 4 33 | data: [0.99746136, -0.00394329, -0.07110054, 0.074783895, 34 | 0.00236645, 0.99974967, -0.02224833, 0.0005, 35 | 0.07117047, 0.02202359, 0.997221 , 0.0041, 36 | 0. , 0. , 0. , 1. ] 37 | <......> 38 | label: fla_forward_stereo 39 | 40 | `T_B_C`s are the transformations of the camera in the body frame, and the body frame is usually defined the same as the IMU frame (see notations [here](https://github.com/uzh-rpg/rpg_svo/wiki/Notation)). 41 | 42 | #### IMU 43 | To use IMU measurements, IMU noise characteristics/initial values need to be specified: 44 | 45 | imu_params: 46 | delay_imu_cam: 0.0 47 | max_imu_delta_t: 0.01 48 | acc_max: 176.0 49 | omega_max: 7.8 50 | sigma_omega_c: 0.005 51 | sigma_acc_c: 0.01 52 | sigma_omega_bias_c: 4.0e-6 53 | sigma_acc_bias_c: 0.0002 54 | sigma_integration: 0.0 55 | g: 9.81007 56 | imu_rate: 200.0 57 | 58 | imu_initialization: 59 | velocity: [0, 0, 0] 60 | omega_bias: [0, 0, 0] 61 | acc_bias: [0, 0, 0] 62 | velocity_sigma: 0.5 63 | omega_bias_sigma: 0.005 64 | acc_bias_sigma: 0.05 65 | 66 | Most importantly are the noise characteristics `sigma_omega_c`, `sigma_acc_c`, `sigma_omega_bias_c` and `sigma_acc_bias_c`, which are the continuous time noise sigma for additive noise and bias random walk. 67 | For a good description of IMU noise model, see [this page](https://github.com/ethz-asl/kalibr/wiki/IMU-Noise-Model). 68 | 69 | To calibrate the extrinsics and/or the intrinsics of the IMU-camera system, we recommend to use [Kalibr](https://github.com/ethz-asl/kalibr). Please refer to the manual of the package for more details. 70 | We have a script (`scripts/kalibr_to_svo.py`) to convert the output of `Kalibr` to `SVO`. 71 | In practice, we sometimes find that the translation components between the IMU and cameras are not well calibrated (probably due to the lack of excitation of the accelerometers), therefore it is better to double check your calibration result before using. 72 | 73 | 74 | ### The launch file 75 | You need to specify the camera and IMU topics in the launch file (e.g., `launch/fla_stereo_imu.launch`) as: 76 | 77 | 78 | 79 | 80 | 81 | ### The parameters 82 | #### Stereo 83 | To use stereo, you need to specify the following in your parameter file: 84 | 85 | pipeline_is_stereo: True 86 | automatic_reinitialization: True # When lost, stereo can recover immediately 87 | 88 | The following parameters are also important: 89 | 90 | max_depth_inv: 0.05 91 | min_depth_inv: 2.0 92 | mean_depth_inv: 0.3 93 | 94 | These parameters affect the range of epipolar search for triangulation. It is usually OK to use the default range, but if your application scenario is very different, please adapt it accordingly. 95 | Another thing we can do in a stereo setup is to use multi-thread for reprojecting features 96 | 97 | use_async_reprojectors: True 98 | 99 | #### IMU 100 | In `SVO`, IMU measurements are mostly used for providing a rotational prior for image alignment and pose optimization. 101 | This is especially important for image alignment under aggressive motion (e.g., in the example bag), since the optimization needs a good initialization. 102 | To use IMU measurements, you need to specify the following 103 | 104 | use_imu: True 105 | img_align_prior_lambda_rot: 5.0 # Gyroscope prior in sparse image alignment 106 | poseoptim_prior_lambda: 2.0 # Gyroscope prior in pose optimization 107 | 108 | The higher the prior lambdas are, the more weight the IMU priors have. If you have a good IMU, you can set them higher and vice versa. If `use_imu` is set to false and the prior lambdas are nonzero, a constant velocity prior will be used, which often does not perform very well in practice. Therefore remember to set the prior lambdas to zero when IMU is not used. 109 | 110 | #### Image Resolutions 111 | The image resolution from the example bag is `1280x1040`. Therefore you can see some different parameters in `fla_stereo_imu.yaml` and `pinhole.yaml`, which is suited for `752x480`. Most notably are 112 | 113 | img_align_max_level: 5 # maximum pyramid level for the coarse-to-fine optimization 114 | n_pyr_levels: 4 # create more pyramid levels for large images, it is always image align max level minus one 115 | 116 | grid_size: 50 # use a bigger grid for feature bucketing for larger images 117 | poseoptim_thresh: 3.0 # outlier threshold in pixel, should be higher for larger images 118 | 119 | init_min_disparity: 30 # minimum disparity for monocular initialization, should be higher for larger images 120 | -------------------------------------------------------------------------------- /svo_ros/doc/camera_calibration.md: -------------------------------------------------------------------------------- 1 | Here we introduce how to calibrate cameras using several commonly used models and covert to svo format. 2 | 3 | ### Pinhole projection + Radial-tangential 4 | This is the distortion model used in opencv and ROS, also known as `plumb_bob`. We can calibrate it using the tool provided by ROS: 5 | ``` 6 | sudo apt-get install ros-indigo-camera-calibration 7 | rosrun camera_calibration cameracalibrator.py 8 | ``` 9 | Make sure to adapt size for the checkerboard actually used. What you get is in the format: 10 | ``` 11 | camera matrix 12 | fx 0 cx 13 | 0 fy cy 14 | 0 0 1 15 | 16 | distortion 17 | d0 d1 d2 d3 0 18 | 19 | ......... 20 | ``` 21 | 22 | For use with svo, copy the values to the following template (values with $ prefix have to be filled in): 23 | ``` 24 | cameras: 25 | - camera: 26 | distortion: 27 | parameters: 28 | cols: 1 29 | rows: 4 30 | data: [$d0, $d1 , $d2 , $d3] 31 | type: radial-tangential 32 | image_height: $image_height 33 | image_width: $image_width 34 | intrinsics: 35 | cols: 1 36 | rows: 4 37 | data: [$fx, $fy, $cx, $cy] 38 | label: cam0 39 | line-delay-nanoseconds: 0 40 | type: pinhole 41 | T_B_C: 42 | cols: 4 43 | rows: 4 44 | data: [ 1., 0., 0., 0., 45 | 0., 1., 0., 0., 46 | 0., 0., 1., 0., 47 | 0., 0., 0., 1.] 48 | serial_no: 0 49 | calib_date: 0 50 | description: '$camera_name' 51 | label: $camera_name 52 | 53 | ``` 54 | 55 | `T_B_C` is the pose of the camera frame in the IMU frame. This is used when SVO is set to use the IMU. 56 | 57 | ### Pinhole projection + Equidistant 58 | This is a generic distortion model that can model very different field of views ([paper](http://www.ee.oulu.fi/mvg/files/pdf/pdf_697.pdf)), therefore we can use it for pinhole as well as fisheye cameras. OpenCV (from 3.0) also [supports this model](http://docs.opencv.org/master/db/d58/group__calib3d__fisheye.html). To calibrate a camera using a equidistant camera model, we can use [Kalibr](https://github.com/ethz-asl/kalibr). For details of Kalibr calibration, please refer to [this official manual](https://github.com/ethz-asl/kalibr/wiki/multiple-camera-calibration). 59 | 60 | Afterwards, you can use the script `kalibr_to_svo.py` under `svo_ros/scripts` to convert the output to svo format: 61 | 62 | ./kalibr_to_svo --kalibr 63 | 64 | 65 | ### Omnidirectional 66 | This is a special model that combines projection and distortion together. It works for fisheye as well as catadioptric cameras. To use this camera model, you need to calibrate the camera using [this Matlab Toolbox](https://sites.google.com/site/scarabotix/ocamcalib-toolbox). Please refer to the page of the toolbox for details. 67 | 68 | Afterwards, you can use the script `omni_matlab_to_rpg.py` under `svo_ros/scripts` to convert the output to svo format. 69 | -------------------------------------------------------------------------------- /svo_ros/doc/euroc.md: -------------------------------------------------------------------------------- 1 | We provide two launch files for [EuRoC](http://projects.asl.ethz.ch/datasets/doku.php?id=kmavvisualinertialdatasets) for monocular and stereo setups: 2 | 3 | roslaunch svo_ros euroc_mono_imu.launch 4 | roslaunch svo_ros euroc_stereo_imu.launch 5 | 6 | These launch files read the parameters from `param/euroc_mono_imu.yaml` and `param/euroc_stereo_imu.yaml`. The parameters are not necessarily optimal for every sequence, but should be enough as a good starting point. 7 | 8 | ### Staring Position when Playing Bag 9 | The first several images of many EuRoC sequences are not good for initialize `SVO`, especially for the monocular case. Therefore it is better to start the bag at a later time, for example: 10 | 11 | rosbag play MH_01_easy.bag -s 50 12 | rosbag play MH_02_easy.bag -s 45 13 | rosbag play V1_01_easy.bag 14 | rosbag play V1_02_medium.bag -s 13 15 | rosbag play V2_01_easy.bag 16 | rosbag play V2_02_medium.bag -s 13 17 | 18 | This is to avoid, for example, strong rotation for monocular initialization. 19 | -------------------------------------------------------------------------------- /svo_ros/doc/get_started.md: -------------------------------------------------------------------------------- 1 | ### Run example bags 2 | First source the setup file of the overlay workspace: 3 | 4 | source ~/svo_install_overlay_ws/devel/setup.bash 5 | 6 | Download the test bag file from http://rpg.ifi.uzh.ch/datasets/airground_rig_s3_2013-03-18_21-38-48.bag. Launch svo node: 7 | 8 | roslaunch svo_ros run_from_topic.launch cam_name:=svo_test_pinhole 9 | 10 | This will also start RVIZ. Then play the bag file: 11 | 12 | rosbag play airground_rig_s3_2013-03-18_21-38-48.bag 13 | 14 | Now you should be able to observe the camera motion and sparse map in RVIZ. 15 | 16 | You can also download another bag recorded with a fisheye camera: http://rpg.ifi.uzh.ch/datasets/test_fisheye.bag. Then you need to change the following line in `run_from_topic.launch`: 17 | 18 | 19 | 20 | to 21 | 22 | 23 | And launch svo via: 24 | 25 | roslaunch svo_ros run_from_topic.launch cam_name:=bluefox_25000826_fisheye 26 | 27 | ### Customize launch files 28 | We provide several example launch files under `svo_ros/launch`: 29 | * `run_from_topic.launch`: run svo on an topic that publishes images 30 | * `live_nodelet.launch`/`live_nodelet_fisheye.launch`: run svo as nodelet. These files can not be launched directly, since they depend on some private packages. But they can be used as an example for using svo with other nodes. 31 | 32 | In the launch file, you have to specify the following for svo node/nodelet, as in `run_from_topic.launch`: 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | Note that SVO also supports stereo cameras. 44 | 45 | #### Parameter files 46 | We provide two example parameter files under `svo_ros/param`: 47 | * `pinhole.yaml`: for relatively small field of view cameras 48 | * `fisheye.yaml`: for cameras with wide angle lens (e.g., fisheye and catadioptric) 49 | 50 | The parameters in these files are typical values. If you wish to change the parameters, please refer to the comments in these two files. For a example of using `SVO` with a stereo-IMU setup and a different resolution, please refer to `advanced.md`. 51 | 52 | #### Camera calibration files 53 | If you want to use your own camera, make sure a global shutter camera is used. A good choice is [the Bluefox camera](https://www.matrix-vision.com/USB2.0-single-board-camera-mvbluefox-mlc.html) from MatrixVision. 54 | You can put camera calibration files under `svo_ros/calib` and load them as in `run_from_topic.launch`. We use yaml files to specify camera parameters. Please refer to `camera_calibration.md` for more details. 55 | -------------------------------------------------------------------------------- /svo_ros/doc/imgs/stereo_imu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uzh-rpg/rpg_svo_example/d465dd9898f213e774e92c87bad7ccc481932321/svo_ros/doc/imgs/stereo_imu.png -------------------------------------------------------------------------------- /svo_ros/doc/imgs/test_rqt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uzh-rpg/rpg_svo_example/d465dd9898f213e774e92c87bad7ccc481932321/svo_ros/doc/imgs/test_rqt.png -------------------------------------------------------------------------------- /svo_ros/doc/imgs/test_rviz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uzh-rpg/rpg_svo_example/d465dd9898f213e774e92c87bad7ccc481932321/svo_ros/doc/imgs/test_rviz.png -------------------------------------------------------------------------------- /svo_ros/doc/install.md: -------------------------------------------------------------------------------- 1 | ### Prerequisites 2 | #### System 3 | Make sure your system meets the following requirements: 4 | 5 | * Ubuntu 14.04 64 bit 6 | * gcc version 4.8 7 | * ROS version `indigo` ([installation guide](http://wiki.ros.org/indigo/Installation/Ubuntu)). 8 | 9 | or 10 | 11 | * Ubuntu 16.04 64 bit 12 | * gcc version 5.4 13 | * ROS version `kinetic` ([installation guide](http://wiki.ros.org/kinetic/Installation/Ubuntu)). 14 | 15 | We also support ARM architecture (tested on Odroid XU3/4) for 16 | 17 | * Ubuntu 14.04 32 bit 18 | * gcc version 4.8 19 | * ROS version `indigo` 20 | 21 | If you have multiple `gcc` installed, make sure the aforementioned version is the default one (use `update-alternatives`). 22 | 23 | #### Install catkin tools 24 | We use [catkin tools](https://catkin-tools.readthedocs.io/en/latest/) to build workspace. Use catkin 0.3.1: 25 | 26 | sudo apt-get install python-pip 27 | sudo pip install catkin-tools==0.3.1 28 | 29 | Remember to remove the previous version that you are using. 30 | 31 | 32 | ### Install 33 | 34 | If you are compiling on an ARM processor, make sure the `ARM_ARCHITECTURE` environment variable is defined, for example: 35 | ``` 36 | export ARM_ARCHITECTURE=armv7-a 37 | ``` 38 | 39 | #### Create the install workspace 40 | Copy the `svo_install_ws` to where you want to install the binaries (e.g., your home folder in this documentation): 41 | 42 | cp -r /svo_install_ws/ ~/ 43 | 44 | Now we should have a folder `~/svo_install_ws` with a subfolder named `install`. 45 | 46 | Run the script within the workspace to fix some hardcoded paths: 47 | 48 | cd ~ 49 | cd svo_install_ws 50 | ./fix_path.sh 51 | 52 | There may be some warnings with `opengv`, which can be safely ignored. 53 | 54 | #### Create an overlay workspace 55 | Now we will create a workspace to use the binaries we just downloaded. Before proceeding, make sure you have already source the setup file from ROS : 56 | 57 | source /opt/ros//setup.bash 58 | 59 | Then source the install workspace: 60 | 61 | cd ~ 62 | source svo_install_ws/install/setup.bash 63 | 64 | Create a new catkin workspace: 65 | 66 | mkdir svo_install_overlay_ws && cd svo_install_overlay_ws 67 | catkin config --init --mkdirs --cmake-args -DCMAKE_BUILD_TYPE=Release 68 | 69 | Now, this workspace should overlay both the ros installation and the `svo_install_ws`. Typing `catkin config`, you should see: 70 | 71 | Extending: [env] /home//svo_install_ws/install:/opt/ros/ 72 | 73 | Copy the `rpg_svo_example` folder to the `src` folder and build the `svo_install_overlay_ws` 74 | 75 | cp -r /rpg_svo_example ~/svo_install_overlay_ws/src 76 | cd ~/svo_install_overlay_ws 77 | catkin build 78 | 79 | ### Validate Your Installation 80 | 81 | If you are using an ARM processor, follow the instructions in `run_on_arm.md` from this point. 82 | 83 | Source the setup file of the overlay workspace: 84 | 85 | source ~/svo_install_overlay_ws/devel/setup.bash 86 | 87 | Download the test bag from [here](http://rpg.ifi.uzh.ch/svo2/svo_test_short.bag). Then run the following commands: 88 | 89 | roslaunch svo_ros run_from_topic.launch cam_name:=svo_test_pinhole 90 | rosbag play svo_test_short.bag 91 | 92 | Then you should be able to observe the camera motion and the sparse map in RVIZ, as shown below 93 | 94 | ![test rviz](./imgs/test_rviz.png) 95 | 96 | You can also check the number of feature tracked and the pipeline status in the following rqt window 97 | 98 | ![test rqt](./imgs/test_rqt.png) 99 | 100 | At the first launch, the `SVO Namespace` may be incorrect and the information is not displayed. Simply typing `svo` as shown in the screenshot will solve the problem. 101 | -------------------------------------------------------------------------------- /svo_ros/doc/run_on_arm.md: -------------------------------------------------------------------------------- 1 | # Running on ARM 2 | The ARM binaries do not include the GUI for SVO status (`rqt_svo`), since we usually run the applications on an ARM processor in a headless mode (e.g., via ssh). Therefore, you need to run the GUI (i.e., RVIZ and `rqt_svo` window) on another PC. In order to use the `rqt_svo` window, you need to have `SVO` set up on the PC as well. 3 | 4 | After installing `SVO` on both ARM and PC, you need to make the programs on the ARM and the PC share the same ROS master. Suppose the ARM and the PC are in the same network, first change the `ROS_MASTER_URI` to the IP address of the PC, where we will run the `roscore`, in `scripts/setupros_ip_arm.sh`: 5 | ``` 6 | # On the ARM, in setupros_ip_arm.sh 7 | ... 8 | # change the ip to your PC where you run the roscore 9 | export ROS_MASTER_URI=http://192.168.200.7:11311 10 | ... 11 | ``` 12 | Then run the following scripts (under `scripts` folder) respectively: 13 | ``` 14 | # On the PC 15 | source setupros_ip_pc.sh 16 | # On the ARM: 17 | source setupros_ip_arm.sh 18 | ``` 19 | You may want to put the above lines in your `.bashrc`, otherwise you have to do it for **every new terminal** you open. 20 | 21 | Then run `roscore` on the PC: 22 | ``` 23 | # On the PC 24 | roscore 25 | ``` 26 | You should double check that the output of `roscore` reflects that the master is on the IP you set. 27 | 28 | Afte setting up the `roscore`, run the following on the PC 29 | ``` 30 | # On the PC: you also need to manually load the rviz configuration `rviz_config.rviz` 31 | rosrun rviz rviz 32 | # After sourcing your SVO installation on the PC 33 | rqt_svo 34 | ``` 35 | and on the ARM: 36 | ``` 37 | # On the ARM 38 | source ~/svo_install_overlay_ws/devel/setup.bash 39 | roslaunch svo_ros run_from_topic_arm.launch cam_name:=svo_test_pinhole 40 | ``` 41 | Download the test bag from [here](http://rpg.ifi.uzh.ch/svo2/svo_test_short.bag), and run in another terminal on the ARM: 42 | ``` 43 | # On the ARM 44 | # Yes, you can play the bag on the PC but there will be some delay depending on your network status 45 | rosbag play svo_test_short.bag 46 | ``` 47 | Now you should see something similar to the output in `install.md`. Note that if you have different version of ROS on the PC and ARM, some messages will not appear properly. 48 | 49 | If you do not care about visualization, you can just install `SVO` on the ARM, launch the node and play the bag and ignore the settings on PC and the ROS master. The topics containing the output of `SVO` are published anyway. 50 | 51 | ## Parameters for ARM 52 | Now we have a look at the what settings you need to use to make `SVO` run on an ARM processor. The example launch file is `launch/run_from_topic_arm.launch`. 53 | 54 | Because the ARM processor has limited computational power, and publishing images requires quite some computation for serialization, we set the following: 55 | ``` 56 | 57 | 58 | ``` 59 | which means we publish once every 10 images and publish the down-sampled one. 60 | 61 | We also provide a more lightweight setting in `param/fast_pinhole.yaml` to further reduce the required computation (e.g., track less features). It is fine to use the normal setup `param/pinhole.yaml` if you only run `SVO`. But if you have other applications to run on the ARM processor, it is usually needed to tune the parameters to fit the computational budget. 62 | 63 | 64 | ## Nodelet 65 | As mentioned above, image serialization is expensive. A common use case for an ARM processor is to use the camera attached to the embedded system and do computation on-board. In this situation, we can use [nodelet](http://wiki.ros.org/nodelet) provided by ROS to reducing the overhead of serializing/deserializing the images. 66 | 67 | The binaries allow to use `SVO` in a nodelet form, and an example can be found in `launch/run_from_topic_arm.launch`. You will need to run your camera driver in the same nodelet. For porting nodes to nodelets and running nodelets, please refer to [the official tutorial](http://wiki.ros.org/nodelet/Tutorials). 68 | -------------------------------------------------------------------------------- /svo_ros/include/svo_ros/svo_factory.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | namespace svo { 8 | 9 | // forward declarations 10 | class ImuHandler; 11 | class FrameHandlerMono; 12 | class FrameHandlerStereo; 13 | class FrameHandlerArray; 14 | class FrameHandlerDenseMono; 15 | 16 | namespace factory { 17 | 18 | /// Get IMU Handler. 19 | std::shared_ptr getImuHandler( 20 | const ros::NodeHandle& pnh); 21 | 22 | /// Factory for Mono-SVO. 23 | std::shared_ptr makeMono( 24 | const ros::NodeHandle& pnh, 25 | const CameraBundlePtr& cam = nullptr); 26 | 27 | /// Factory for Stereo-SVO. 28 | std::shared_ptr makeStereo( 29 | const ros::NodeHandle& pnh, 30 | const CameraBundlePtr& cam = nullptr); 31 | 32 | /// Factory for Camera-Array-SVO. 33 | std::shared_ptr makeArray( 34 | const ros::NodeHandle& pnh, 35 | const CameraBundlePtr& cam = nullptr); 36 | 37 | /// Factory for Camera-Array-SVO 38 | std::shared_ptr makeDenseMono( 39 | const ros::NodeHandle& pnh); 40 | 41 | } // namespace factory 42 | } // namespace mono 43 | -------------------------------------------------------------------------------- /svo_ros/include/svo_ros/svo_interface.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | #include 6 | #include // user-input 7 | #include 8 | #include 9 | 10 | 11 | #include 12 | #include 13 | #include 14 | 15 | namespace svo { 16 | 17 | // forward declarations 18 | class FrameHandlerBase; 19 | class Visualizer; 20 | class ImuHandler; 21 | class BackendInterface; 22 | class BackendVisualizer; 23 | 24 | enum class PipelineType { 25 | kMono, 26 | kStereo, 27 | kArray 28 | }; 29 | 30 | /// SVO Interface 31 | class SvoInterface 32 | { 33 | public: 34 | 35 | // ROS subscription and publishing. 36 | ros::NodeHandle nh_; 37 | ros::NodeHandle pnh_; 38 | PipelineType pipeline_type_; 39 | ros::Subscriber sub_remote_key_; 40 | std::string remote_input_; 41 | std::unique_ptr imu_thread_; 42 | std::unique_ptr image_thread_; 43 | 44 | // SVO modules. 45 | std::shared_ptr svo_; 46 | std::shared_ptr visualizer_; 47 | std::shared_ptr imu_handler_; 48 | std::shared_ptr backend_interface_; 49 | std::shared_ptr backend_visualizer_; 50 | 51 | CameraBundlePtr ncam_; 52 | 53 | // Parameters 54 | bool set_initial_attitude_from_gravity_ = true; 55 | 56 | // System state. 57 | bool quit_ = false; 58 | bool idle_ = false; 59 | bool automatic_reinitialization_ = false; 60 | 61 | SvoInterface(const PipelineType& pipeline_type, 62 | const ros::NodeHandle& nh, 63 | const ros::NodeHandle& private_nh); 64 | 65 | virtual ~SvoInterface(); 66 | 67 | // Processing 68 | void processImageBundle( 69 | const std::vector& images, 70 | int64_t timestamp_nanoseconds); 71 | 72 | void setImuPrior(const int64_t timestamp_nanoseconds); 73 | 74 | void publishResults( 75 | const std::vector& images, 76 | const int64_t timestamp_nanoseconds); 77 | 78 | // Subscription and callbacks 79 | void monoCallback(const sensor_msgs::ImageConstPtr& msg); 80 | void stereoCallback( 81 | const sensor_msgs::ImageConstPtr& msg0, 82 | const sensor_msgs::ImageConstPtr& msg1); 83 | void imuCallback(const sensor_msgs::ImuConstPtr& imu_msg); 84 | void inputKeyCallback(const std_msgs::StringConstPtr& key_input); 85 | 86 | 87 | // These functions are called before and after monoCallback or stereoCallback. 88 | // a derived class can implement some additional logic here. 89 | virtual void imageCallbackPreprocessing(int64_t timestamp_nanoseconds) {} 90 | virtual void imageCallbackPostprocessing() {} 91 | 92 | void subscribeImu(); 93 | void subscribeImage(); 94 | void subscribeRemoteKey(); 95 | 96 | void imuLoop(); 97 | void monoLoop(); 98 | void stereoLoop(); 99 | }; 100 | 101 | } // namespace svo 102 | -------------------------------------------------------------------------------- /svo_ros/include/svo_ros/svo_node_base.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "svo_ros/svo_interface.h" 4 | 5 | namespace svo_ros { 6 | 7 | class SvoNodeBase { 8 | public: 9 | // Initializes glog, gflags and ROS. 10 | static void initThirdParty(int argc, char **argv); 11 | 12 | SvoNodeBase(); 13 | 14 | void run(); 15 | 16 | private: 17 | ros::NodeHandle node_handle_; 18 | ros::NodeHandle private_node_handle_; 19 | svo::PipelineType type_; 20 | 21 | public: 22 | svo::SvoInterface svo_interface_; 23 | }; 24 | 25 | } // namespace svo_ros 26 | -------------------------------------------------------------------------------- /svo_ros/include/svo_ros/svo_nodelet.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | 6 | namespace svo { 7 | 8 | // forward declarations 9 | class SvoInterface; 10 | 11 | class SvoNodelet : public nodelet::Nodelet 12 | { 13 | public: 14 | SvoNodelet() = default; 15 | virtual ~SvoNodelet(); 16 | 17 | private: 18 | virtual void onInit(); 19 | 20 | std::unique_ptr svo_interface_; 21 | }; 22 | 23 | } // namespace svo 24 | -------------------------------------------------------------------------------- /svo_ros/include/svo_ros/visualizer.h: -------------------------------------------------------------------------------- 1 | // This file is part of SVO - Semi-direct Visual Odometry. 2 | // 3 | // Copyright (C) 2014 Christian Forster 4 | // (Robotics and Perception Group, University of Zurich, Switzerland). 5 | 6 | #pragma once 7 | 8 | #include // std::pair 9 | #include 10 | 11 | #include 12 | 13 | // ros 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | 28 | namespace svo { 29 | 30 | // forward declarations 31 | class FrameHandlerBase; 32 | 33 | /// Publish visualisation messages to ROS. 34 | class Visualizer 35 | { 36 | public: 37 | typedef std::shared_ptr Ptr; 38 | typedef pcl::PointXYZI PointType; 39 | typedef pcl::PointCloud PointCloud; 40 | 41 | const std::string kWorldFrame = "world"; 42 | 43 | ros::NodeHandle pnh_; 44 | size_t trace_id_ = 0; 45 | std::string trace_dir_; 46 | size_t img_pub_level_; 47 | size_t img_pub_nth_; 48 | size_t dense_pub_nth_; 49 | ros::Publisher pub_frames_; 50 | ros::Publisher pub_points_; 51 | ros::Publisher pub_imu_pose_; 52 | ros::Publisher pub_info_; 53 | ros::Publisher pub_markers_; 54 | ros::Publisher pub_pc_; 55 | PointCloud::Ptr pc_; 56 | std::vector pub_cam_poses_; 57 | std::vector pub_dense_; 58 | std::vector pub_images_; 59 | ros::Publisher pub_loop_closure_; 60 | 61 | tf::TransformBroadcaster br_; 62 | bool publish_world_in_cam_frame_; 63 | bool publish_map_every_frame_; 64 | ros::Duration publish_points_display_time_; 65 | bool publish_seeds_; 66 | bool publish_seeds_uncertainty_; 67 | bool trace_pointcloud_; 68 | double vis_scale_; 69 | std::ofstream ofs_states_; 70 | std::ofstream ofs_pointcloud_; 71 | 72 | Visualizer(const std::string& trace_dir, 73 | const ros::NodeHandle& nh_private, 74 | const size_t num_cameras); 75 | 76 | ~Visualizer() = default; 77 | 78 | void publishSvoInfo( 79 | const svo::FrameHandlerBase* const svo, 80 | const int64_t timestamp_nanoseconds); 81 | 82 | void publishImages( 83 | const std::vector& images, 84 | const int64_t timestamp_nanoseconds); 85 | 86 | void publishImagesWithFeatures( 87 | const FrameBundlePtr& frame_bundle, 88 | const int64_t timestamp); 89 | 90 | void publishImuPose( 91 | const Transformation& T_world_imu, 92 | const Eigen::Matrix Covariance, 93 | const int64_t timestamp_nanoseconds); 94 | 95 | void publishCameraPoses( 96 | const FrameBundlePtr& frame_bundle, 97 | const int64_t timestamp_nanoseconds); 98 | 99 | void publishBundleFeatureTracks( 100 | const FrameBundlePtr frames_ref, 101 | const FrameBundlePtr frames_cur, 102 | int64_t timestamp); 103 | 104 | void publishFeatureTracks( 105 | const Keypoints& px_ref, 106 | const Keypoints& px_cur, 107 | const std::vector>& matches_ref_cur, 108 | const ImgPyr& img_pyr, 109 | const Level& level, 110 | const uint64_t timestamp, 111 | const size_t frame_index); 112 | 113 | void visualizeHexacopter( 114 | const Transformation& T_frame_world, 115 | const uint64_t timestamp); 116 | 117 | void visualizeQuadrocopter( 118 | const Transformation& T_frame_world, 119 | const uint64_t timestamp); 120 | 121 | void visualizeMarkers( 122 | const FrameBundlePtr& frame_bundle, 123 | const std::vector& close_kfs, 124 | const MapPtr& map); 125 | 126 | void publishTrajectoryPoint( 127 | const Eigen::Vector3d& pos_in_vision, 128 | const uint64_t timestamp, 129 | const int id); 130 | 131 | void visualizeMarkersWithUncertainty( 132 | const FramePtr& frame, 133 | const std::vector& close_kfs, 134 | const MapPtr& map, 135 | const float sigma_threshold); 136 | 137 | void publishSeedsBinary(const MapPtr& map,const float sigma_threshold); 138 | 139 | void publishSeeds(const MapPtr& map); 140 | 141 | void publishSeedsAsPointcloud( 142 | const Frame& frame, 143 | bool only_converged_seeds, 144 | bool reset_pc_before_publishing = true); 145 | 146 | void publishVelocity( 147 | const Eigen::Vector3d& velocity_imu, 148 | const uint64_t timestamp); 149 | 150 | void publishMapRegion(const std::vector& frames); 151 | 152 | void publishKeyframeWithPoints( 153 | const FramePtr& frame, 154 | const uint64_t timestamp, 155 | const double marker_scale = 0.05); 156 | 157 | void publishLoopClosure( 158 | const FramePtr& query, const FramePtr& match, 159 | const Transformation& T_match_query); 160 | 161 | void exportToDense(const FrameBundlePtr& frame_bundle); 162 | 163 | void publishSeedsUncertainty(const MapPtr &map); 164 | 165 | void visualizeCoordinateFrames(const Transformation& T_world_cam); 166 | }; 167 | 168 | } // end namespace svo 169 | 170 | -------------------------------------------------------------------------------- /svo_ros/launch/euroc_mono_imu.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /svo_ros/launch/euroc_stereo_imu.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /svo_ros/launch/fla_stereo_imu.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /svo_ros/launch/live_nodelet.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /svo_ros/launch/live_nodelet_fisheye.launch: -------------------------------------------------------------------------------- 1 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /svo_ros/launch/run_from_topic.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /svo_ros/launch/run_from_topic_arm.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /svo_ros/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | svo_ros 4 | 0.1.0 5 | 6 | Visual Odometry - ROS Nodes 7 | 8 | 9 | Christian Forster 10 | Christian Forster 11 | GPLv3 12 | 13 | catkin 14 | catkin_simple 15 | 16 | nodelet 17 | roscpp 18 | cmake_modules 19 | nav_msgs 20 | std_msgs 21 | visualization_msgs 22 | sensor_msgs 23 | image_transport 24 | cv_bridge 25 | tf 26 | svo 27 | svo_msgs 28 | svo_cmake 29 | svo_common 30 | svo_tracker 31 | vikit_common 32 | vikit_ros 33 | eigen_catkin 34 | minkindr 35 | pcl_ros 36 | rpg_common 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /svo_ros/param/euroc_mono_imu.yaml: -------------------------------------------------------------------------------- 1 | grid_size: 35 2 | use_imu: True 3 | poseoptim_prior_lambda: 0.0 4 | img_align_prior_lambda_rot: 0.5 5 | img_align_prior_lambda_trans: 0.0 6 | 7 | # If set to false, we process the next frame(s) only when the depth update is finished 8 | use_threaded_depthfilter: False 9 | # If the number of features are below this number, consider as failure 10 | quality_min_fts: 40 11 | # If the number of features reduce by this number for consecutive frames, consider as failure 12 | quality_max_drop_fts: 80 13 | 14 | map_scale: 5.0 15 | kfselect_criterion: FORWARD 16 | kfselect_numkfs_upper_thresh: 180 17 | kfselect_numkfs_lower_thresh: 90 18 | kfselect_min_dist_metric: 0.001 19 | kfselect_min_angle: 6 20 | kfselect_min_disparity: 40 21 | kfselect_min_num_frames_between_kfs: 1 22 | 23 | img_align_est_illumination_gain: true 24 | img_align_est_illumination_offset: true 25 | depth_filter_affine_est_offset: true 26 | depth_filter_affine_est_gain: true 27 | reprojector_affine_est_offset: true 28 | reprojector_affine_est_gain: true 29 | -------------------------------------------------------------------------------- /svo_ros/param/euroc_stereo_imu.yaml: -------------------------------------------------------------------------------- 1 | pipeline_is_stereo: True 2 | 3 | grid_size: 35 4 | use_async_reprojectors: True 5 | use_imu: False 6 | poseoptim_prior_lambda: 0.0 7 | img_align_prior_lambda_rot: 0.0 8 | img_align_prior_lambda_trans: 0.0 9 | 10 | # If set to false, we process the next frame(s) only when the depth update is finished 11 | use_threaded_depthfilter: False 12 | # if the number of features are below this number, consider as failure 13 | quality_min_fts: 40 14 | # if the number of features reduce by this number for consecutive frames, consider as failure 15 | quality_max_drop_fts: 80 16 | 17 | max_depth_inv: 0.1 18 | min_depth_inv: 10.0 19 | mean_depth_inv: 0.5 20 | 21 | map_scale: 5.0 22 | kfselect_criterion: FORWARD 23 | kfselect_numkfs_upper_thresh: 180 24 | kfselect_numkfs_lower_thresh: 90 25 | kfselect_min_dist_metric: 0.001 26 | kfselect_min_angle: 6 27 | kfselect_min_disparity: 40 28 | kfselect_min_num_frames_between_kfs: 0 29 | 30 | img_align_est_illumination_gain: true 31 | img_align_est_illumination_offset: true 32 | depth_filter_affine_est_offset: true 33 | depth_filter_affine_est_gain: true 34 | reprojector_affine_est_offset: true 35 | reprojector_affine_est_gain: true 36 | -------------------------------------------------------------------------------- /svo_ros/param/fast_pinhole.yaml: -------------------------------------------------------------------------------- 1 | map_scale: 1.0 2 | relocalization_max_trials: 100 3 | 4 | # The number of features and quality criteria 5 | max_fts: 120 6 | # If the number of features is lower, considered as failure. 7 | quality_min_fts: 70 8 | # If the number of features decreases more than this value, considered as failure. 9 | quality_max_drop_fts: 40 10 | 11 | # Use forward looking mode 12 | kfselect_criterion: FORWARD 13 | # Based on the number of landmarks (NOT seeds) 14 | kfselect_numkfs_upper_thresh: 110 15 | kfselect_numkfs_lower_thresh: 70 16 | # must have enough disparity w.r.t. the last keyframe 17 | kfselect_min_disparity: 40 18 | # should be sufficiently far from any keyframes 19 | kfselect_min_angle: 10 20 | kfselect_min_dist_metric: 0.01 21 | # If it is acutally used as a forward looking camera 22 | # set this value to true 23 | update_seeds_with_old_keyframes: True 24 | -------------------------------------------------------------------------------- /svo_ros/param/fisheye.yaml: -------------------------------------------------------------------------------- 1 | max_fts: 180 2 | grid_size: 25 3 | map_scale: 5 4 | 5 | kfselect_criterion: FORWARD 6 | kfselect_numkfs_upper_thresh: 120 7 | kfselect_numkfs_lower_thresh: 40 8 | kfselect_min_dist_metric: 0.2 9 | kfselect_min_angle: 10 10 | kfselect_min_disparity: 25 11 | update_seeds_with_old_keyframes: True 12 | 13 | T_world_imuinit/qx: 1 14 | T_world_imuinit/qy: 0 15 | T_world_imuinit/qz: 0 16 | T_world_imuinit/qw: 0 17 | 18 | publish_every_nth_dense_input: 5 19 | publish_marker_scale: 0.8 20 | 21 | # fisheye specific 22 | # the distortion of fisheye is not negligible, 23 | # therefore the jacobian need to be considered 24 | img_align_use_distortion_jacobian: True 25 | # pose optimization and epipolar search should be done on unit sphere 26 | poseoptim_using_unit_sphere: True 27 | scan_epi_unit_sphere: True 28 | -------------------------------------------------------------------------------- /svo_ros/param/fla_stereo_imu.yaml: -------------------------------------------------------------------------------- 1 | # Basic 2 | pipeline_is_stereo: True 3 | automatic_reinitialization: True # When lost, stereo can recover immediately 4 | 5 | # Stereo triangulation depth range 6 | max_depth_inv: 0.05 7 | min_depth_inv: 2.0 8 | mean_depth_inv: 0.3 9 | 10 | # IMU 11 | use_imu: True 12 | img_align_prior_lambda_rot: 5.0 # Gyroscope prior in sparse image alignment 13 | poseoptim_prior_lambda: 2.0 # Gyroscope prior in pose optimization 14 | 15 | 16 | # Keyframe selection 17 | max_n_kfs: 10 18 | kfselect_criterion: FORWARD 19 | kfselect_numkfs_upper_thresh: 160 # If it has more features, it never creates a keyframe 20 | kfselect_numkfs_lower_thresh: 70 # If it has less features, it always creates a keyframe 21 | kfselect_min_num_frames_between_kfs: 2 # ..except this. 22 | kfselect_min_disparity: 30 23 | kfselect_min_angle: 20 24 | kfselect_min_dist_metric: 0.01 25 | update_seeds_with_old_keyframes: True 26 | 27 | # Image Alignment 28 | img_align_max_level: 5 # increase this level if image is double the size of VGA 29 | img_align_min_level: 2 30 | poseoptim_thresh: 3.0 # reprojection threshold, maybe make a bit larger for larger images. 31 | 32 | # Reprojection 33 | use_async_reprojectors: True # For stereo, use multithreading in reprojector 34 | reprojector_max_n_kfs: 10 # Local map size. Larger is computationally more intensive. More reduces drift. 35 | 36 | # Feature detection 37 | max_fts: 180 38 | grid_size: 50 # Larger for larger images, for every cell you have max one feature 39 | n_pyr_levels: 4 # Increase for larger images (image align max minus one) 40 | detector_threshold_primary: 7 # Fast detector threshold 41 | detector_threshold_secondary: 250 # Edgelet detector threshold 42 | -------------------------------------------------------------------------------- /svo_ros/param/pinhole.yaml: -------------------------------------------------------------------------------- 1 | # Instructions: 2 | # - Most of the time you will just need to change the basic parameters. 3 | # - The parameters in this file are set for the resolution of 752x480. 4 | # If you use a different resolution, change the parameters for 5 | # each module according to the comments. 6 | 7 | ############################ 8 | ##### Basic parameters ##### 9 | ############################ 10 | 11 | # Pipeline type 12 | pipeline_is_stereo: False 13 | 14 | # Feature and keyframe number 15 | # To run faster, you can decrease `max_fts` and `max_n_kfs`, for example: 16 | # max_fts: 120 17 | # max_n_kfs: 5 18 | max_fts: 180 19 | max_n_kfs: 30 20 | 21 | # Map scale when initialized (not used for stereo) 22 | # Increase if the initial scene depth is larger 23 | map_scale: 1.0 24 | 25 | # Initial rotation 26 | T_world_imuinit/qx: 0 27 | T_world_imuinit/qy: 0 28 | T_world_imuinit/qz: 0 29 | T_world_imuinit/qw: 1 30 | 31 | # Keyframe selection 32 | kfselect_criterion: FORWARD # alterntive: DOWNLOOKING 33 | # The following kfselect_* ONLY affects FORWARD 34 | # If the number of features: >upper, no keyframe; d " 100 | } 101 | }, 102 | "groups": {} 103 | } 104 | } 105 | }, 106 | "plugin__rqt_image_view__ImageView__1": { 107 | "keys": {}, 108 | "groups": { 109 | "dock_widget__ImageViewWidget": { 110 | "keys": { 111 | "dockable": { 112 | "type": "repr", 113 | "repr": "True" 114 | }, 115 | "parent": { 116 | "type": "repr", 117 | "repr": "None" 118 | } 119 | }, 120 | "groups": {} 121 | }, 122 | "plugin": { 123 | "keys": { 124 | "dynamic_range": { 125 | "type": "repr", 126 | "repr": "False" 127 | }, 128 | "max_range": { 129 | "type": "repr", 130 | "repr": "10.0" 131 | }, 132 | "topic": { 133 | "type": "repr", 134 | "repr": "u'/svo/image'" 135 | }, 136 | "zoom1": { 137 | "type": "repr", 138 | "repr": "False" 139 | } 140 | }, 141 | "groups": {} 142 | } 143 | } 144 | }, 145 | "plugin__svo_rqt__Svo__1": { 146 | "keys": {}, 147 | "groups": { 148 | "dock_widget__SvoWidget": { 149 | "keys": { 150 | "dockable": { 151 | "type": "repr", 152 | "repr": "True" 153 | }, 154 | "parent": { 155 | "type": "repr", 156 | "repr": "None" 157 | } 158 | }, 159 | "groups": {} 160 | }, 161 | "dock_widget__SvoUi": { 162 | "keys": { 163 | "dockable": { 164 | "type": "repr", 165 | "repr": "u'true'" 166 | }, 167 | "parent": { 168 | "type": "repr", 169 | "repr": "None" 170 | } 171 | }, 172 | "groups": {} 173 | } 174 | } 175 | }, 176 | "plugin__rqt_shell__Shell__1": { 177 | "keys": {}, 178 | "groups": { 179 | "plugin": { 180 | "keys": { 181 | "shell_type": { 182 | "type": "repr", 183 | "repr": "u'0'" 184 | } 185 | }, 186 | "groups": {} 187 | } 188 | } 189 | }, 190 | "plugin__rqt_console__Console__1": { 191 | "keys": {}, 192 | "groups": { 193 | "plugin": { 194 | "keys": { 195 | "show_highlighted_only": { 196 | "type": "repr", 197 | "repr": "u'false'" 198 | }, 199 | "message_limit": { 200 | "type": "repr", 201 | "repr": "u'20000'" 202 | }, 203 | "exclude_filters": { 204 | "type": "repr", 205 | "repr": "u'severity'" 206 | }, 207 | "table_splitter": { 208 | "type": "repr(QByteArray.hex)", 209 | "repr(QByteArray.hex)": "QtCore.QByteArray('000000ff00000000000000020000004e000000ed01000000090100000002')", 210 | "pretty-print": " N " 211 | }, 212 | "highlight_filters": { 213 | "type": "repr", 214 | "repr": "u'message'" 215 | }, 216 | "paused": { 217 | "type": "repr", 218 | "repr": "u'false'" 219 | }, 220 | "settings_exist": { 221 | "type": "repr", 222 | "repr": "u'true'" 223 | }, 224 | "filter_splitter": { 225 | "type": "repr(QByteArray.hex)", 226 | "repr(QByteArray.hex)": "QtCore.QByteArray('000000ff0000000000000002000000720000007201000000090100000002')", 227 | "pretty-print": " r r " 228 | } 229 | }, 230 | "groups": { 231 | "exclude_filter_0": { 232 | "keys": { 233 | "enabled": { 234 | "type": "repr", 235 | "repr": "u'true'" 236 | }, 237 | "itemlist": { 238 | "type": "repr", 239 | "repr": "u''" 240 | } 241 | }, 242 | "groups": {} 243 | }, 244 | "highlight_filter_0": { 245 | "keys": { 246 | "regex": { 247 | "type": "repr", 248 | "repr": "u'false'" 249 | }, 250 | "text": { 251 | "type": "repr", 252 | "repr": "u''" 253 | }, 254 | "enabled": { 255 | "type": "repr", 256 | "repr": "u'true'" 257 | } 258 | }, 259 | "groups": {} 260 | } 261 | } 262 | } 263 | } 264 | } 265 | } 266 | }, 267 | "mainwindow": { 268 | "keys": { 269 | "geometry": { 270 | "type": "repr(QByteArray.hex)", 271 | "repr(QByteArray.hex)": "QtCore.QByteArray('01d9d0cb00010000000007a9fffffffc00001187000005a7000007b1000000180000117f0000059f000000000000')", 272 | "pretty-print": " " 273 | }, 274 | "state": { 275 | "type": "repr(QByteArray.hex)", 276 | "repr(QByteArray.hex)": "QtCore.QByteArray('000000ff00000000fd0000000100000003000009cf00000559fc0100000005fb0000002c00730076006f005f007200710074005f005f00530076006f005f005f0031005f005f00530076006f005500690100000000000003890000000000000000fb0000006a007200710074005f0072006f0062006f0074005f007300740065006500720069006e0067005f005f0052006f0062006f0074005300740065006500720069006e0067005f005f0031005f005f0052006f0062006f0074005300740065006500720069006e00670055006901000000000000020b0000000000000000fc00000000000003530000022e00fffffffc0200000004fb0000003400730076006f005f007200710074005f005f00530076006f005f005f0031005f005f00530076006f0057006900640067006500740100000019000000a30000009900fffffffb0000004c007200710074005f0063006f006e0073006f006c0065005f005f0043006f006e0073006f006c0065005f005f0031005f005f0043006f006e0073006f006c006500570069006400670065007401000000c3000003b50000000000000000fb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0031005f005f0049006d006100670065005600690065007700570069006400670065007401000000c2000002700000005100fffffffb0000006c007200710074005f007200650063006f006e006600690067007500720065005f005f0050006100720061006d005f005f0031005f005f005f0070006c007500670069006e0063006f006e007400610069006e00650072005f0074006f0070005f00770069006400670065007401000003380000023a0000010f00fffffffb00000026007200710074005f007200760069007a005f005f005200560069007a005f005f0031005f005f010000035900000676000000a700fffffffb00000040007200710074005f007300680065006c006c005f005f005300680065006c006c005f005f0031005f005f0058005400650072006d005700690064006700650074010000091c000000b30000000000000000000009cf0000000000000004000000040000000800000008fc00000001000000030000000100000036004d0069006e0069006d0069007a006500640044006f0063006b00570069006400670065007400730054006f006f006c0062006100720000000000ffffffff0000000000000000')", 277 | "pretty-print": " ,svo_rqt__Svo__1__SvoUi jrqt_robot_steering__RobotSteering__1__RobotSteeringUi S . p Q 8 : Y v " 278 | } 279 | }, 280 | "groups": { 281 | "toolbar_areas": { 282 | "keys": { 283 | "MinimizedDockWidgetsToolbar": { 284 | "type": "repr", 285 | "repr": "8" 286 | } 287 | }, 288 | "groups": {} 289 | } 290 | } 291 | } 292 | } 293 | } -------------------------------------------------------------------------------- /svo_ros/rviz_config.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 0 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - //svo/points1/Namespaces1 8 | - //svo/loop_closures1 9 | Splitter Ratio: 0.5 10 | Tree Height: 350 11 | - Class: rviz/Selection 12 | Name: Selection 13 | - Class: rviz/Tool Properties 14 | Expanded: 15 | - /2D Pose Estimate1 16 | - /2D Nav Goal1 17 | - /Publish Point1 18 | Name: Tool Properties 19 | Splitter Ratio: 0.588679 20 | - Class: rviz/Views 21 | Expanded: 22 | - /Current View1 23 | Name: Views 24 | Splitter Ratio: 0.5 25 | - Class: rviz/Time 26 | Experimental: false 27 | Name: Time 28 | SyncMode: 0 29 | SyncSource: /svo/image/0 30 | Visualization Manager: 31 | Class: "" 32 | Displays: 33 | - Alpha: 0.5 34 | Cell Size: 1 35 | Class: rviz/Grid 36 | Color: 160; 160; 164 37 | Enabled: true 38 | Line Style: 39 | Line Width: 0.03 40 | Value: Lines 41 | Name: Grid 42 | Normal Cell Count: 0 43 | Offset: 44 | X: 0 45 | Y: 0 46 | Z: 0 47 | Plane: XY 48 | Plane Cell Count: 10 49 | Reference Frame: 50 | Value: true 51 | - Class: rviz/TF 52 | Enabled: true 53 | Frame Timeout: 15 54 | Frames: 55 | All Enabled: true 56 | cam_pos: 57 | Value: true 58 | world: 59 | Value: true 60 | Marker Scale: 1 61 | Name: TF 62 | Show Arrows: true 63 | Show Axes: true 64 | Show Names: true 65 | Tree: 66 | world: 67 | {} 68 | Update Interval: 0 69 | Value: true 70 | - Class: rviz/Marker 71 | Enabled: false 72 | Marker Topic: /svo/keyframes 73 | Name: /svo/keyframes 74 | Namespaces: 75 | {} 76 | Queue Size: 100 77 | Value: false 78 | - Class: rviz/Marker 79 | Enabled: true 80 | Marker Topic: /svo/points 81 | Name: /svo/points 82 | Namespaces: 83 | {} 84 | Queue Size: 100 85 | Value: true 86 | - Class: rviz/Image 87 | Enabled: false 88 | Image Topic: /remode/depth 89 | Max Value: 1 90 | Median window: 5 91 | Min Value: 0 92 | Name: /remode/depth 93 | Normalize Range: true 94 | Queue Size: 2 95 | Transport Hint: raw 96 | Value: false 97 | - Alpha: 1 98 | Autocompute Intensity Bounds: true 99 | Autocompute Value Bounds: 100 | Max Value: 10 101 | Min Value: -10 102 | Value: true 103 | Axis: Z 104 | Channel Name: intensity 105 | Class: rviz/PointCloud2 106 | Color: 170; 0; 0 107 | Color Transformer: FlatColor 108 | Decay Time: 0 109 | Enabled: false 110 | Invert Rainbow: false 111 | Max Color: 255; 255; 255 112 | Max Intensity: 255 113 | Min Color: 0; 0; 0 114 | Min Intensity: 25 115 | Name: /remode/pointcloud 116 | Position Transformer: XYZ 117 | Queue Size: 10 118 | Selectable: true 119 | Size (Pixels): 3 120 | Size (m): 0.005 121 | Style: Flat Squares 122 | Topic: /remode/pointcloud 123 | Use Fixed Frame: true 124 | Use rainbow: false 125 | Value: false 126 | - Class: rviz/Image 127 | Enabled: true 128 | Image Topic: /svo/image/0 129 | Max Value: 1 130 | Median window: 5 131 | Min Value: 0 132 | Name: /svo/image/0 133 | Normalize Range: true 134 | Queue Size: 2 135 | Transport Hint: raw 136 | Value: true 137 | - Class: rviz/Marker 138 | Enabled: true 139 | Marker Topic: /svo/loop_closures 140 | Name: /svo/loop_closures 141 | Namespaces: 142 | {} 143 | Queue Size: 100 144 | Value: true 145 | Enabled: true 146 | Global Options: 147 | Background Color: 255; 255; 255 148 | Fixed Frame: world 149 | Frame Rate: 30 150 | Name: root 151 | Tools: 152 | - Class: rviz/Interact 153 | Hide Inactive Objects: true 154 | - Class: rviz/MoveCamera 155 | - Class: rviz/Select 156 | - Class: rviz/FocusCamera 157 | - Class: rviz/Measure 158 | - Class: rviz/SetInitialPose 159 | Topic: /initialpose 160 | - Class: rviz/SetGoal 161 | Topic: /move_base_simple/goal 162 | - Class: rviz/PublishPoint 163 | Single click: true 164 | Topic: /clicked_point 165 | Value: true 166 | Views: 167 | Current: 168 | Class: rviz/Orbit 169 | Distance: 7.0305 170 | Enable Stereo Rendering: 171 | Stereo Eye Separation: 0.06 172 | Stereo Focal Distance: 1 173 | Swap Stereo Eyes: false 174 | Value: false 175 | Focal Point: 176 | X: 0.583405 177 | Y: 0.297882 178 | Z: -0.138986 179 | Name: Current View 180 | Near Clip Distance: 0.01 181 | Pitch: 0.414796 182 | Target Frame: 183 | Value: Orbit (rviz) 184 | Yaw: 4.51357 185 | Saved: ~ 186 | Window Geometry: 187 | /remode/depth: 188 | collapsed: false 189 | /svo/image/0: 190 | collapsed: false 191 | Displays: 192 | collapsed: false 193 | Height: 1056 194 | Hide Left Dock: false 195 | Hide Right Dock: false 196 | QMainWindow State: 000000ff00000000fd000000040000000000000285000003dafc020000000cfb0000001200530065006c0065006300740069006f006e00000000280000009b0000006400fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000000280000019f000000dd00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb00000018002f00730076006f002f0069006d006100670065002f003001000001cd000002350000001600fffffffb0000001a002f00720065006d006f00640065002f0064006500700074006800000002f1000001110000001600fffffffb0000000a0049006d006100670065010000024e000003cc0000000000000000fb0000000a0049006d0061006700650100000433000001e70000000000000000000000010000010f000003dafc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a005600690065007700730100000028000003da000000b000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000003ce0000003efc0100000002fb0000000800540069006d00650000000000000003ce000002f600fffffffb0000000800540069006d00650100000000000004500000000000000000000003e0000003da00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 197 | Selection: 198 | collapsed: false 199 | Time: 200 | collapsed: false 201 | Tool Properties: 202 | collapsed: false 203 | Views: 204 | collapsed: false 205 | Width: 1920 206 | X: 1200 207 | Y: 636 208 | -------------------------------------------------------------------------------- /svo_ros/scripts/kalibr_to_svo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import yaml 4 | import sys 5 | import argparse 6 | import numpy as np 7 | 8 | from collections import OrderedDict 9 | 10 | class UnsortableList(list): 11 | def sort(self, *args, **kwargs): 12 | pass 13 | 14 | class UnsortableOrderedDict(OrderedDict): 15 | def items(self, *args, **kwargs): 16 | return UnsortableList(OrderedDict.items(self, *args, **kwargs)) 17 | 18 | yaml.add_representer(UnsortableOrderedDict, yaml.representer.SafeRepresenter.represent_dict) 19 | parser = argparse.ArgumentParser(description='Convert calibration from Kalibr to SVO format.') 20 | parser.add_argument('--kalibr', help='kalibr camchain-imu yaml file.') 21 | parser.add_argument('--imu', help='imu parameters yaml file.') 22 | args = parser.parse_args(sys.argv[1:]) 23 | 24 | if args.kalibr is None: 25 | sys.exit('You must provide yaml files for the Kalibr format.') 26 | has_imu = True 27 | if args.imu is None: 28 | print('No IMU specification provided, will process camera parameters only.') 29 | has_imu = False 30 | 31 | # cameras 32 | with open(args.kalibr, 'r') as f: 33 | K = yaml.load(f) 34 | cams = [] 35 | for c in K: 36 | cam = UnsortableOrderedDict() 37 | cam['camera'] = UnsortableOrderedDict() 38 | cam['camera']['distortion'] = UnsortableOrderedDict() 39 | cam['camera']['distortion']['parameters'] = UnsortableOrderedDict() 40 | cam['camera']['distortion']['parameters']['cols'] = 1 41 | cam['camera']['distortion']['parameters']['rows'] = len(K[c]['distortion_coeffs']) 42 | cam['camera']['distortion']['parameters']['data'] = K[c]['distortion_coeffs'] 43 | cam['camera']['distortion']['type'] = K[c]['distortion_model'] 44 | cam['camera']['image_height'] = K[c]['resolution'][1] 45 | cam['camera']['image_width'] = K[c]['resolution'][0] 46 | cam['camera']['intrinsics'] = UnsortableOrderedDict() 47 | cam['camera']['intrinsics']['cols'] = 1 48 | cam['camera']['intrinsics']['rows'] = len(K[c]['intrinsics']) 49 | cam['camera']['intrinsics']['data'] = K[c]['intrinsics'] 50 | cam['camera']['label'] = c 51 | cam['camera']['line-delay-nanoseconds'] = 0 52 | cam['camera']['type'] = K[c]['camera_model'] 53 | 54 | cam['T_B_C'] = UnsortableOrderedDict() 55 | T_B_C = np.eye(4) 56 | if has_imu: 57 | T_C_B = np.reshape(np.array(K[c]['T_cam_imu']), (4, 4)) 58 | T_B_C = np.linalg.inv(T_C_B) 59 | else: 60 | print('No IMU is specified, so T_B_C is set to identity.') 61 | cam['T_B_C']['cols'] = T_B_C.shape[1] 62 | cam['T_B_C']['rows'] = T_B_C.shape[0] 63 | cam['T_B_C']['data'] = T_B_C.flatten().tolist() 64 | 65 | cam['serial_no'] = 0 66 | cam['calib_date'] = 0 67 | cam['description'] = K[c]['rostopic'] 68 | cams.append(cam) 69 | S = UnsortableOrderedDict() 70 | S['cameras'] = cams 71 | S['label'] = args.kalibr 72 | 73 | # IMU 74 | if has_imu: 75 | with open(args.imu, 'r') as f: 76 | I = yaml.load(f) 77 | Sip = UnsortableOrderedDict() 78 | Sip['imu_params'] = UnsortableOrderedDict() 79 | Sip['imu_params']['delay_imu_cam'] = 0.0 80 | Sip['imu_params']['max_imu_delta_t'] = 0.01 81 | Sip['imu_params']['acc_max'] = 176.0 82 | Sip['imu_params']['omega_max'] = 7.8 83 | Sip['imu_params']['sigma_omega_c'] = I['gyroscope_noise_density'] 84 | Sip['imu_params']['sigma_acc_c'] = I['accelerometer_noise_density'] 85 | Sip['imu_params']['sigma_omega_bias_c'] = I['gyroscope_random_walk'] 86 | Sip['imu_params']['sigma_acc_bias_c'] = I['accelerometer_random_walk'] 87 | Sip['imu_params']['sigma_integration'] = 0.0 88 | Sip['imu_params']['g'] = 9.8082 89 | Sip['imu_params']['imu_rate'] = I['update_rate'] 90 | 91 | Sii = UnsortableOrderedDict() 92 | Sii['imu_initialization'] = UnsortableOrderedDict() 93 | Sii['imu_initialization']['velocity'] = [0.0, 0.0, 0.0] 94 | Sii['imu_initialization']['omega_bias'] = [0.0, 0.0, 0.0] 95 | Sii['imu_initialization']['acc_bias'] = [0.0, 0.0, 0.0] 96 | Sii['imu_initialization']['velocity_sigma'] = 1.0 97 | Sii['imu_initialization']['omega_bias_sigma'] = I['gyroscope_noise_density'] 98 | Sii['imu_initialization']['acc_bias_sigma'] = I['accelerometer_noise_density'] 99 | 100 | output_file = 'svo_' + args.kalibr 101 | print('Writing to {0}.'.format(output_file)) 102 | f = open(output_file, 'w') 103 | f.write(yaml.dump(S, default_flow_style=None) ) 104 | if has_imu: 105 | f.write('\n') 106 | f.write( yaml.dump(Sip, default_flow_style=False) ) 107 | f.write('\n') 108 | f.write( yaml.dump(Sii, default_flow_style=None) ) 109 | f.close() 110 | print('Done.') 111 | -------------------------------------------------------------------------------- /svo_ros/scripts/omni_matlab_to_rpg.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import yaml 5 | import argparse 6 | import os 7 | import time 8 | import numpy as np 9 | from collections import OrderedDict 10 | 11 | 12 | class UnsortableList(list): 13 | def sort(self, *args, **kwargs): 14 | pass 15 | 16 | 17 | class UnsortableOrderedDict(OrderedDict): 18 | def items(self, *args, **kwargs): 19 | return UnsortableList(OrderedDict.items(self, *args, **kwargs)) 20 | 21 | 22 | def strToFloatList(raw_str, delimiter=' '): 23 | str_list = raw_str.split(' ') 24 | return [float(s) for s in str_list if s != ''] 25 | 26 | 27 | def parseMatlabFormat(matlab_filename): 28 | f = open(matlab_filename, 'r') 29 | with open(matlab_filename) as f: 30 | lines = f.readlines() 31 | filtered = [l[:-1] for l in lines if l[0] != '\n' and l[0] != '#'] 32 | 33 | pol = strToFloatList(filtered[0])[1:] 34 | invpol = strToFloatList(filtered[1])[1:] 35 | invpol += [0.0] * (12 - len(invpol)) 36 | center = strToFloatList(filtered[2])[::-1] 37 | affine = strToFloatList(filtered[3]) 38 | img_size = strToFloatList(filtered[4])[::-1] 39 | 40 | print('parsed parameters:') 41 | print('polynomial: {0}\ninverse polynomial: {1}'.format(pol, invpol)) 42 | print('image size: {0}\nimage center: {1}'.format(img_size, center)) 43 | print('affine correction: {0}'.format(affine)) 44 | 45 | return {'pol': pol, 'invpol': invpol, 46 | 'center': center, 'affine': affine, 'img_size': img_size} 47 | 48 | 49 | def dictToYaml(cam_params, serial_num, matlab_filename): 50 | cams = [] 51 | cam = UnsortableOrderedDict() 52 | cam['camera'] = UnsortableOrderedDict() 53 | cam['camera']['distortion'] = UnsortableOrderedDict() 54 | cam['camera']['distortion']['parameters'] = UnsortableOrderedDict() 55 | cam['camera']['distortion']['parameters']['cols'] = 1 56 | cam['camera']['distortion']['parameters']['rows'] = 0 57 | cam['camera']['distortion']['parameters']['data'] = [] 58 | cam['camera']['distortion']['type'] = 'none' 59 | cam['camera']['image_height'] = int(cam_params['img_size'][1]) 60 | cam['camera']['image_width'] = int(cam_params['img_size'][0]) 61 | cam['camera']['intrinsics'] = UnsortableOrderedDict() 62 | cam['camera']['intrinsics']['cols'] = 1 63 | cam['camera']['intrinsics']['rows'] = 24 64 | cam['camera']['intrinsics']['data'] = \ 65 | cam_params['pol'] + cam_params['center'] + \ 66 | cam_params['affine'] + cam_params['invpol'] + [0.0, 0.0] 67 | cam['camera']['label'] = 'cam0' 68 | cam['camera']['line-delay-nanoseconds'] = 0 69 | cam['camera']['type'] = 'omni' 70 | cam['camera']['mask'] = str(serial_num)+'_fisheye_mask.png' 71 | 72 | cam['T_B_C'] = UnsortableOrderedDict() 73 | T_B_C = np.eye(4, dtype=float) 74 | cam['T_B_C']['cols'] = T_B_C.shape[1] 75 | cam['T_B_C']['rows'] = T_B_C.shape[0] 76 | cam['T_B_C']['data'] = T_B_C.flatten().tolist() 77 | cam['serial_no'] = int(serial_num) 78 | cam['calib_date'] = time.strftime("%d/%m/%Y") 79 | cams.append(cam) 80 | 81 | S = UnsortableOrderedDict() 82 | S['cameras'] = cams 83 | S['label'] = matlab_filename 84 | 85 | return S 86 | 87 | 88 | if __name__ == '__main__': 89 | parser = argparse.ArgumentParser( 90 | description='convert the format of matlabtoolbox to rpg format') 91 | parser.add_argument('matlab_filename', help='matlab output file') 92 | parser.add_argument('serial_num', help='camera serial number') 93 | 94 | args = parser.parse_args() 95 | 96 | if not os.path.isfile(args.matlab_filename): 97 | print('please input the correct file name for the matlab format') 98 | exit(-1) 99 | 100 | rpg_filename = 'bluefox_' + args.serial_num + '_fisheye.yaml' 101 | print('converting {0} to {1}'.format(args.matlab_filename, rpg_filename)) 102 | 103 | cam_params = parseMatlabFormat(args.matlab_filename) 104 | cam_yaml = dictToYaml(cam_params, args.serial_num, args.matlab_filename) 105 | 106 | yaml.add_representer(UnsortableOrderedDict, 107 | yaml.representer.SafeRepresenter.represent_dict) 108 | 109 | f = open(rpg_filename, 'w') 110 | f.write(yaml.dump(cam_yaml, default_flow_style=None)) 111 | f.close() 112 | 113 | print('Finished. Do not forget to create the mask before usage.') 114 | -------------------------------------------------------------------------------- /svo_ros/scripts/setupros_ip_arm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # change the ip to your PC where you run the roscore 4 | export ROS_MASTER_URI=http://192.168.200.7:11311 5 | 6 | ip=$(ip addr | grep 'state UP' -A2 | tail -n1 | awk '{print $2}' | cut -f1 -d'/') 7 | 8 | export ROS_IP=$ip 9 | export ROS_HOSTNAME=$ip 10 | -------------------------------------------------------------------------------- /svo_ros/scripts/setupros_ip_pc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # if you run everything locally 4 | # ip=127.0.0.1 5 | # run on multiple platforms 6 | ip=$(ip addr | grep 'state UP' -A2 | tail -n1 | awk '{print $2}' | cut -f1 -d'/') 7 | 8 | core_ip=$ip 9 | 10 | export ROS_IP=$ip 11 | export ROS_HOSTNAME=$ip 12 | export ROS_MASTER_URI=http://${core_ip}:11311 13 | 14 | export ROSLAUNCH_SSH_UNKNOWN=1 15 | -------------------------------------------------------------------------------- /svo_ros/src/svo_factory.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #ifdef SVO_USE_VIN_BACKEND 11 | #include 12 | #include 13 | #include 14 | #endif 15 | 16 | namespace svo { 17 | namespace factory { 18 | 19 | BaseOptions loadBaseOptions(const ros::NodeHandle& pnh, bool forward_default) 20 | { 21 | BaseOptions o; 22 | o.max_n_kfs = vk::param(pnh, "max_n_kfs", 5); 23 | o.use_imu = vk::param(pnh, "use_imu", false); 24 | o.trace_dir = vk::param(pnh, "trace_dir", ros::package::getPath("svo")+"/trace"); 25 | o.quality_min_fts = vk::param(pnh, "quality_min_fts", 50); 26 | o.quality_max_fts_drop = vk::param(pnh, "quality_max_drop_fts", 40); 27 | o.relocalization_max_trials = vk::param(pnh, "relocalization_max_trials", 50); 28 | o.poseoptim_prior_lambda = vk::param(pnh, "poseoptim_prior_lambda", 0.0); 29 | o.poseoptim_using_unit_sphere = vk::param(pnh, "poseoptim_using_unit_sphere", false); 30 | o.img_align_prior_lambda_rot = vk::param(pnh, "img_align_prior_lambda_rot", 0.0); 31 | o.img_align_prior_lambda_trans = vk::param(pnh, "img_align_prior_lambda_trans", 0.0); 32 | o.structure_optimization_max_pts = vk::param(pnh, "structure_optimization_max_pts", 20); 33 | o.init_map_scale = vk::param(pnh, "map_scale", 1.0); 34 | std::string default_kf_criterion = forward_default ? "FORWARD" : "DOWNLOOKING"; 35 | if(vk::param(pnh, "kfselect_criterion", default_kf_criterion) == "FORWARD") 36 | o.kfselect_criterion = KeyframeCriterion::FORWARD; 37 | else 38 | o.kfselect_criterion = KeyframeCriterion::DOWNLOOKING; 39 | o.kfselect_min_dist = vk::param(pnh, "kfselect_min_dist", 0.12); 40 | o.kfselect_numkfs_upper_thresh = vk::param(pnh, "kfselect_numkfs_upper_thresh", 120); 41 | o.kfselect_numkfs_lower_thresh = vk::param(pnh, "kfselect_numkfs_lower_thresh", 70); 42 | o.kfselect_min_dist_metric = vk::param(pnh, "kfselect_min_dist_metric", 0.01); 43 | o.kfselect_min_angle = vk::param(pnh, "kfselect_min_angle", 20); 44 | o.kfselect_min_disparity = vk::param(pnh, "kfselect_min_disparity", 40); 45 | o.kfselect_min_num_frames_between_kfs = vk::param(pnh, "kfselect_min_num_frames_between_kfs", 2); 46 | o.img_align_max_level = vk::param(pnh, "img_align_max_level", 4); 47 | o.img_align_min_level = vk::param(pnh, "img_align_min_level", 2); 48 | o.img_align_robustification = vk::param(pnh, "img_align_robustification", false); 49 | o.img_align_use_distortion_jacobian = 50 | vk::param(pnh, "img_align_use_distortion_jacobian", false); 51 | o.img_align_est_illumination_gain = 52 | vk::param(pnh, "img_align_est_illumination_gain", false); 53 | o.img_align_est_illumination_offset = 54 | vk::param(pnh, "img_align_est_illumination_offset", false); 55 | o.poseoptim_thresh = vk::param(pnh, "poseoptim_thresh", 2.0); 56 | o.update_seeds_with_old_keyframes = 57 | vk::param(pnh, "update_seeds_with_old_keyframes", true); 58 | o.use_async_reprojectors = vk::param(pnh, "use_async_reprojectors", false); 59 | return o; 60 | } 61 | 62 | DetectorOptions loadDetectorOptions(const ros::NodeHandle& pnh) 63 | { 64 | DetectorOptions o; 65 | o.cell_size = vk::param(pnh, "grid_size", 35); 66 | o.max_level = vk::param(pnh, "n_pyr_levels", 3) - 1; 67 | o.threshold_primary = vk::param(pnh, "detector_threshold_primary", 10); 68 | o.threshold_secondary = vk::param(pnh, "detector_threshold_secondary", 200); 69 | if(vk::param(pnh, "use_edgelets", true)) 70 | o.detector_type = DetectorType::kFastGrad; 71 | else 72 | o.detector_type = DetectorType::kFast; 73 | return o; 74 | } 75 | 76 | DepthFilterOptions loadDepthFilterOptions(const ros::NodeHandle& pnh) 77 | { 78 | DepthFilterOptions o; 79 | o.max_search_level = vk::param(pnh, "n_pyr_levels", 3) - 1; 80 | o.use_threaded_depthfilter = vk::param(pnh, "use_threaded_depthfilter", true); 81 | o.seed_convergence_sigma2_thresh = vk::param(pnh, "seed_convergence_sigma2_thresh", 200.0); 82 | o.scan_epi_unit_sphere = vk::param(pnh, "scan_epi_unit_sphere", false); 83 | o.affine_est_offset= vk::param(pnh, "depth_filter_affine_est_offset", true); 84 | o.affine_est_gain = vk::param(pnh, "depth_filter_affine_est_gain", false); 85 | o.max_n_seeds_per_frame = 86 | vk::param(pnh, "max_fts", 120) * vk::param(pnh, "max_seeds_ratio", 3.0); 87 | return o; 88 | } 89 | 90 | InitializationOptions loadInitializationOptions(const ros::NodeHandle& pnh) 91 | { 92 | InitializationOptions o; 93 | o.init_min_features = vk::param(pnh, "init_min_features", 100); 94 | o.init_min_tracked = vk::param(pnh, "init_min_tracked", 80); 95 | o.init_min_inliers = vk::param(pnh, "init_min_inliers", 70); 96 | o.init_min_disparity = vk::param(pnh, "init_min_disparity", 40.0); 97 | o.init_min_features_factor = vk::param(pnh, "init_min_features_factor", 2.0); 98 | o.reproj_error_thresh = vk::param(pnh, "reproj_err_thresh", 2.0); 99 | o.init_disparity_pivot_ratio = vk::param(pnh, "init_disparity_pivot_ratio", 0.5); 100 | std::string init_method = vk::param(pnh, "init_method", "FivePoint"); 101 | if(init_method == "Homography") 102 | o.init_type = InitializerType::kHomography; 103 | else if(init_method == "TwoPoint") 104 | o.init_type = InitializerType::kTwoPoint; 105 | else if(init_method == "FivePoint") 106 | o.init_type = InitializerType::kFivePoint; 107 | else if(init_method == "OneShot") 108 | o.init_type = InitializerType::kOneShot; 109 | else 110 | SVO_ERROR_STREAM("Initialization Method not supported: " << init_method); 111 | return o; 112 | } 113 | 114 | FeatureTrackerOptions loadTrackerOptions(const ros::NodeHandle& pnh) 115 | { 116 | FeatureTrackerOptions o; 117 | o.klt_max_level = vk::param(pnh, "klt_max_level", 4); 118 | o.klt_min_level = vk::param(pnh, "klt_min_level", 0.001); 119 | return o; 120 | } 121 | 122 | ReprojectorOptions loadReprojectorOptions(const ros::NodeHandle& pnh) 123 | { 124 | ReprojectorOptions o; 125 | o.max_n_kfs = vk::param(pnh, "reprojector_max_n_kfs", 5); 126 | o.max_n_features_per_frame = vk::param(pnh, "max_fts", 160); 127 | o.cell_size = vk::param(pnh, "grid_size", 35); 128 | o.reproject_unconverged_seeds = vk::param(pnh, "reproject_unconverged_seeds", true); 129 | o.affine_est_offset = vk::param(pnh, "reprojector_affine_est_offset", true); 130 | o.affine_est_gain = vk::param(pnh, "reprojector_affine_est_gain", false); 131 | return o; 132 | } 133 | 134 | CameraBundle::Ptr loadCameraFromYaml(const ros::NodeHandle& pnh) 135 | { 136 | std::string calib_file = vk::param(pnh, "calib_file", "~/cam.yaml"); 137 | CameraBundle::Ptr ncam = CameraBundle::loadFromYaml(calib_file); 138 | std::cout << "loaded " << ncam->numCameras() << " cameras"; 139 | for(const auto& cam : ncam->getCameraVector()) 140 | cam->printParameters(std::cout, ""); 141 | return ncam; 142 | } 143 | 144 | StereoTriangulationOptions loadStereoOptions(const ros::NodeHandle& pnh) 145 | { 146 | StereoTriangulationOptions o; 147 | o.triangulate_n_features = vk::param(pnh, "max_fts", 120); 148 | o.max_depth_inv = vk::param(pnh, "max_depth_inv", 1.0/50.0); 149 | o.min_depth_inv = vk::param(pnh, "min_depth_inv", 1.0/0.5); 150 | o.mean_depth_inv = vk::param(pnh, "mean_depth_inv", 1.0/2.0); 151 | return o; 152 | } 153 | 154 | ImuHandler::Ptr getImuHandler(const ros::NodeHandle& pnh) 155 | { 156 | std::string calib_file = vk::param(pnh, "calib_file", ""); 157 | ImuCalibration imu_calib = ImuHandler::loadCalibrationFromFile(calib_file); 158 | imu_calib.print("Loaded IMU Calibration"); 159 | ImuInitialization imu_init = ImuHandler::loadInitializationFromFile(calib_file); 160 | imu_init.print("Loaded IMU Initialization"); 161 | ImuHandler::Ptr imu_handler(new ImuHandler(imu_calib, imu_init)); 162 | return imu_handler; 163 | } 164 | 165 | void setInitialPose(const ros::NodeHandle& pnh, FrameHandlerBase& vo) 166 | { 167 | Transformation T_world_imuinit( 168 | Quaternion(vk::param(pnh, "T_world_imuinit/qw", 1.0), 169 | vk::param(pnh, "T_world_imuinit/qx", 0.0), 170 | vk::param(pnh, "T_world_imuinit/qy", 0.0), 171 | vk::param(pnh, "T_world_imuinit/qz", 0.0)), 172 | Vector3d(vk::param(pnh, "T_world_imuinit/tx", 0.0), 173 | vk::param(pnh, "T_world_imuinit/ty", 0.0), 174 | vk::param(pnh, "T_world_imuinit/tz", 0.0))); 175 | vo.setInitialImuPose(T_world_imuinit); 176 | } 177 | 178 | 179 | FrameHandlerMono::Ptr makeMono(const ros::NodeHandle& pnh, const CameraBundlePtr& cam) 180 | { 181 | // Create camera 182 | CameraBundle::Ptr ncam = (cam) ? cam : loadCameraFromYaml(pnh); 183 | 184 | // Init VO 185 | FrameHandlerMono::Ptr vo = 186 | std::make_shared( 187 | loadBaseOptions(pnh, false), 188 | loadDepthFilterOptions(pnh), 189 | loadDetectorOptions(pnh), 190 | loadInitializationOptions(pnh), 191 | loadReprojectorOptions(pnh), 192 | loadTrackerOptions(pnh), 193 | ncam); 194 | 195 | // Get initial position and orientation of IMU 196 | setInitialPose(pnh, *vo); 197 | 198 | return vo; 199 | } 200 | 201 | FrameHandlerStereo::Ptr makeStereo(const ros::NodeHandle& pnh, const CameraBundlePtr& cam) 202 | { 203 | // Load cameras 204 | CameraBundle::Ptr ncam = (cam) ? cam : loadCameraFromYaml(pnh); 205 | 206 | // Init VO 207 | InitializationOptions init_options = loadInitializationOptions(pnh); 208 | init_options.init_type = InitializerType::kStereo; 209 | FrameHandlerStereo::Ptr vo = 210 | std::make_shared( 211 | loadBaseOptions(pnh, true), 212 | loadDepthFilterOptions(pnh), 213 | loadDetectorOptions(pnh), 214 | init_options, 215 | loadStereoOptions(pnh), 216 | loadReprojectorOptions(pnh), 217 | loadTrackerOptions(pnh), 218 | ncam); 219 | 220 | // Get initial position and orientation of IMU 221 | setInitialPose(pnh, *vo); 222 | 223 | return vo; 224 | } 225 | 226 | FrameHandlerArray::Ptr makeArray(const ros::NodeHandle& pnh, const CameraBundlePtr& cam) 227 | { 228 | // Load cameras 229 | CameraBundle::Ptr ncam = (cam) ? cam : loadCameraFromYaml(pnh); 230 | 231 | // Init VO 232 | InitializationOptions init_options = loadInitializationOptions(pnh); 233 | init_options.init_type = InitializerType::kArrayGeometric; 234 | init_options.init_min_disparity = 25; 235 | DepthFilterOptions depth_filter_options = loadDepthFilterOptions(pnh); 236 | depth_filter_options.verbose = true; 237 | FrameHandlerArray::Ptr vo = 238 | std::make_shared( 239 | loadBaseOptions(pnh, true), 240 | depth_filter_options, 241 | loadDetectorOptions(pnh), 242 | init_options, 243 | loadReprojectorOptions(pnh), 244 | loadTrackerOptions(pnh), 245 | ncam); 246 | 247 | // Get initial position and orientation of IMU 248 | setInitialPose(pnh, *vo); 249 | 250 | return vo; 251 | } 252 | 253 | } // namespace factory 254 | } // namespace svo 255 | -------------------------------------------------------------------------------- /svo_ros/src/svo_interface.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | 28 | #ifdef SVO_USE_BACKEND 29 | #include 30 | #include 31 | #include 32 | #include 33 | #endif 34 | 35 | namespace svo { 36 | 37 | SvoInterface::SvoInterface( 38 | const PipelineType& pipeline_type, 39 | const ros::NodeHandle& nh, 40 | const ros::NodeHandle& private_nh) 41 | : nh_(nh) 42 | , pnh_(private_nh) 43 | , pipeline_type_(pipeline_type) 44 | , set_initial_attitude_from_gravity_( 45 | vk::param(pnh_, "set_initial_attitude_from_gravity", true)) 46 | , automatic_reinitialization_( 47 | vk::param(pnh_, "automatic_reinitialization", false)) 48 | { 49 | switch (pipeline_type) 50 | { 51 | case PipelineType::kMono: 52 | svo_ = factory::makeMono(pnh_); 53 | break; 54 | case PipelineType::kStereo: 55 | svo_ = factory::makeStereo(pnh_); 56 | break; 57 | case PipelineType::kArray: 58 | svo_ = factory::makeArray(pnh_); 59 | break; 60 | default: 61 | LOG(FATAL) << "Unknown pipeline"; 62 | break; 63 | } 64 | ncam_ = svo_->getNCamera(); 65 | visualizer_.reset( 66 | new Visualizer(svo_->options_.trace_dir, pnh_, ncam_->getNumCameras())); 67 | if(vk::param(pnh_, "use_imu", false)) 68 | imu_handler_ = factory::getImuHandler(pnh_); 69 | 70 | #ifdef SVO_USE_BACKEND 71 | if(vk::param(pnh_, "use_backend", false)) 72 | { 73 | backend_interface_ = svo::backend_factory::makeBackend(pnh_); 74 | backend_visualizer_.reset(new BackendVisualizer(svo_->options_.trace_dir, pnh_)); 75 | svo_->setBundleAdjuster(backend_interface_); 76 | backend_interface_->imu_handler_ = imu_handler_; 77 | } 78 | #endif 79 | 80 | svo_->start(); 81 | } 82 | 83 | SvoInterface::~SvoInterface() 84 | { 85 | if (imu_thread_) 86 | imu_thread_->join(); 87 | if (image_thread_) 88 | image_thread_->join(); 89 | VLOG(1) << "Destructed SVO."; 90 | } 91 | 92 | void SvoInterface::processImageBundle( 93 | const std::vector& images, 94 | const int64_t timestamp_nanoseconds) 95 | { 96 | svo_->addImageBundle(images, timestamp_nanoseconds); 97 | } 98 | 99 | void SvoInterface::publishResults( 100 | const std::vector& images, 101 | const int64_t timestamp_nanoseconds) 102 | { 103 | CHECK_NOTNULL(svo_.get()); 104 | CHECK_NOTNULL(visualizer_.get()); 105 | visualizer_->publishSvoInfo(svo_.get(), timestamp_nanoseconds); 106 | switch (svo_->stage()) 107 | { 108 | case Stage::kTracking: { 109 | Eigen::Matrix Covariance; // TODO 110 | visualizer_->publishImuPose( 111 | svo_->getLastFrames()->get_T_W_B(), Covariance, timestamp_nanoseconds); 112 | visualizer_->publishCameraPoses(svo_->getLastFrames(), timestamp_nanoseconds); 113 | visualizer_->publishImagesWithFeatures( 114 | svo_->getLastFrames(), timestamp_nanoseconds); 115 | visualizer_->visualizeMarkers( 116 | svo_->getLastFrames(), svo_->closeKeyframes(), svo_->map()); 117 | visualizer_->exportToDense(svo_->getLastFrames()); 118 | break; 119 | } 120 | case Stage::kInitializing: { 121 | visualizer_->publishBundleFeatureTracks( 122 | svo_->initializer_->frames_ref_, svo_->getLastFrames(), timestamp_nanoseconds); 123 | break; 124 | } 125 | case Stage::kPaused: 126 | case Stage::kRelocalization: 127 | visualizer_->publishImages(images, timestamp_nanoseconds); 128 | break; 129 | default: 130 | LOG(FATAL) << "Unknown stage"; 131 | break; 132 | } 133 | 134 | #ifdef SVO_USE_BACKEND 135 | if(svo_->stage() == Stage::kTracking && backend_interface_) 136 | { 137 | if(svo_->getLastFrames()->isKeyframe()) 138 | { 139 | std::lock_guard estimate_lock(backend_interface_->optimizer_->estimate_mut_); 140 | const gtsam::Values& state = backend_interface_->optimizer_->estimate_; 141 | backend_visualizer_->visualizeFrames(state); 142 | if(backend_interface_->options_.add_imu_factors) 143 | backend_visualizer_->visualizeVelocity(state); 144 | backend_visualizer_->visualizePoints(state); 145 | backend_visualizer_->publishPointcloud(state); 146 | //vin_visualizer_->visualizeSmartFactors(ba_->graph_manager_->smart_factors_map_); 147 | } 148 | } 149 | #endif 150 | } 151 | 152 | void SvoInterface::setImuPrior(const int64_t timestamp_nanoseconds) 153 | { 154 | if(imu_handler_ && !svo_->hasStarted() && set_initial_attitude_from_gravity_) 155 | { 156 | // set initial orientation 157 | Quaternion R_imu_world; 158 | if(imu_handler_->getInitialAttitude( 159 | timestamp_nanoseconds * common::conversions::kNanoSecondsToSeconds, 160 | R_imu_world)) 161 | { 162 | VLOG(3) << "Set initial orientation from accelerometer measurements."; 163 | svo_->setRotationPrior(R_imu_world); 164 | } 165 | } 166 | else if(imu_handler_ && svo_->getLastFrames()) 167 | { 168 | // set incremental rotation prior 169 | ImuMeasurements imu_measurements; 170 | if(imu_handler_->getMeasurements( 171 | svo_->getLastFrames()->getMinTimestampNanoseconds() * 172 | common::conversions::kNanoSecondsToSeconds, 173 | timestamp_nanoseconds * common::conversions::kNanoSecondsToSeconds, 174 | false, imu_measurements)) 175 | { 176 | Quaternion R_lastimu_newimu; 177 | if(imu_handler_->getRelativeRotationPrior( 178 | svo_->getLastFrames()->getMinTimestampNanoseconds() * 179 | common::conversions::kNanoSecondsToSeconds, 180 | timestamp_nanoseconds * common::conversions::kNanoSecondsToSeconds, 181 | false, R_lastimu_newimu)) 182 | { 183 | VLOG(3) << "Set incremental rotation prior from IMU."; 184 | svo_->setRotationIncrementPrior(R_lastimu_newimu); 185 | } 186 | } 187 | } 188 | } 189 | 190 | void SvoInterface::monoCallback(const sensor_msgs::ImageConstPtr& msg) 191 | { 192 | if(idle_) 193 | return; 194 | 195 | cv::Mat image; 196 | try 197 | { 198 | image = cv_bridge::toCvCopy(msg)->image; 199 | } 200 | catch (cv_bridge::Exception& e) 201 | { 202 | ROS_ERROR("cv_bridge exception: %s", e.what()); 203 | } 204 | 205 | std::vector images; 206 | images.push_back(image.clone()); 207 | 208 | setImuPrior(msg->header.stamp.toNSec()); 209 | 210 | imageCallbackPreprocessing(msg->header.stamp.toNSec()); 211 | 212 | processImageBundle(images, msg->header.stamp.toNSec()); 213 | publishResults(images, msg->header.stamp.toNSec()); 214 | 215 | if(svo_->stage() == Stage::kPaused && automatic_reinitialization_) 216 | svo_->start(); 217 | 218 | imageCallbackPostprocessing(); 219 | } 220 | 221 | void SvoInterface::stereoCallback( 222 | const sensor_msgs::ImageConstPtr& msg0, 223 | const sensor_msgs::ImageConstPtr& msg1) 224 | { 225 | if(idle_) 226 | return; 227 | 228 | cv::Mat img0, img1; 229 | try { 230 | img0 = cv_bridge::toCvShare(msg0, "mono8")->image; 231 | img1 = cv_bridge::toCvShare(msg1, "mono8")->image; 232 | } catch (cv_bridge::Exception& e) { 233 | ROS_ERROR("cv_bridge exception: %s", e.what()); 234 | } 235 | 236 | setImuPrior(msg0->header.stamp.toNSec()); 237 | 238 | imageCallbackPreprocessing(msg0->header.stamp.toNSec()); 239 | 240 | processImageBundle({img0, img1}, msg0->header.stamp.toNSec()); 241 | publishResults({img0, img1}, msg0->header.stamp.toNSec()); 242 | 243 | if(svo_->stage() == Stage::kPaused && automatic_reinitialization_) 244 | svo_->start(); 245 | 246 | imageCallbackPostprocessing(); 247 | } 248 | 249 | void SvoInterface::imuCallback(const sensor_msgs::ImuConstPtr& msg) 250 | { 251 | const Eigen::Vector3d omega_imu( 252 | msg->angular_velocity.x, msg->angular_velocity.y, msg->angular_velocity.z); 253 | const Eigen::Vector3d lin_acc_imu( 254 | msg->linear_acceleration.x, msg->linear_acceleration.y, msg->linear_acceleration.z); 255 | const ImuMeasurement m(msg->header.stamp.toSec(), omega_imu, lin_acc_imu); 256 | if(imu_handler_) 257 | imu_handler_->addImuMeasurement(m); 258 | else 259 | SVO_ERROR_STREAM("SvoNode has no ImuHandler"); 260 | } 261 | 262 | void SvoInterface::inputKeyCallback(const std_msgs::StringConstPtr& key_input) 263 | { 264 | std::string remote_input = key_input->data; 265 | char input = remote_input.c_str()[0]; 266 | switch(input) 267 | { 268 | case 'q': 269 | quit_ = true; 270 | SVO_INFO_STREAM("SVO user input: QUIT"); 271 | break; 272 | case 'r': 273 | svo_->reset(); 274 | idle_ = true; 275 | SVO_INFO_STREAM("SVO user input: RESET"); 276 | break; 277 | case 's': 278 | svo_->start(); 279 | idle_ = false; 280 | SVO_INFO_STREAM("SVO user input: START"); 281 | break; 282 | default: ; 283 | } 284 | } 285 | 286 | void SvoInterface::subscribeImu() 287 | { 288 | imu_thread_ = std::unique_ptr( 289 | new std::thread(&SvoInterface::imuLoop, this)); 290 | sleep(3); 291 | } 292 | 293 | void SvoInterface::subscribeImage() 294 | { 295 | if(pipeline_type_ == PipelineType::kMono) 296 | image_thread_ = std::unique_ptr( 297 | new std::thread(&SvoInterface::monoLoop, this)); 298 | else if(pipeline_type_ == PipelineType::kStereo) 299 | image_thread_ = std::unique_ptr( 300 | new std::thread(&SvoInterface::stereoLoop, this)); 301 | } 302 | 303 | void SvoInterface::subscribeRemoteKey() 304 | { 305 | std::string remote_key_topic = 306 | vk::param(pnh_, "remote_key_topic", "svo/remote_key"); 307 | sub_remote_key_ = 308 | nh_.subscribe(remote_key_topic, 5, &svo::SvoInterface::inputKeyCallback, this); 309 | } 310 | 311 | void SvoInterface::imuLoop() 312 | { 313 | SVO_INFO_STREAM("SvoNode: Started IMU loop."); 314 | ros::NodeHandle nh; 315 | ros::CallbackQueue queue; 316 | nh.setCallbackQueue(&queue); 317 | std::string imu_topic = vk::param(pnh_, "imu_topic", "imu"); 318 | ros::Subscriber sub_imu = 319 | nh.subscribe(imu_topic, 10, &svo::SvoInterface::imuCallback, this); 320 | while(ros::ok() && !quit_) 321 | { 322 | queue.callAvailable(ros::WallDuration(0.1)); 323 | } 324 | } 325 | 326 | void SvoInterface::monoLoop() 327 | { 328 | SVO_INFO_STREAM("SvoNode: Started Image loop."); 329 | 330 | ros::NodeHandle nh; 331 | ros::CallbackQueue queue; 332 | nh.setCallbackQueue(&queue); 333 | 334 | image_transport::ImageTransport it(nh); 335 | std::string image_topic = 336 | vk::param(pnh_, "cam0_topic", "camera/image_raw"); 337 | image_transport::Subscriber it_sub = 338 | it.subscribe(image_topic, 5, &svo::SvoInterface::monoCallback, this); 339 | 340 | while(ros::ok() && !quit_) 341 | { 342 | queue.callAvailable(ros::WallDuration(0.1)); 343 | } 344 | } 345 | 346 | void SvoInterface::stereoLoop() 347 | { 348 | typedef message_filters::sync_policies::ExactTime ExactPolicy; 349 | typedef message_filters::Synchronizer ExactSync; 350 | 351 | ros::NodeHandle nh(nh_, "image_thread"); 352 | ros::CallbackQueue queue; 353 | nh.setCallbackQueue(&queue); 354 | 355 | // subscribe to cam msgs 356 | std::string cam0_topic(vk::param(pnh_, "cam0_topic", "/cam0/image_raw")); 357 | std::string cam1_topic(vk::param(pnh_, "cam1_topic", "/cam1/image_raw")); 358 | image_transport::ImageTransport it(nh); 359 | image_transport::SubscriberFilter sub0(it, cam0_topic, 1, std::string("raw")); 360 | image_transport::SubscriberFilter sub1(it, cam1_topic, 1, std::string("raw")); 361 | ExactSync sync_sub(ExactPolicy(5), sub0, sub1); 362 | sync_sub.registerCallback(boost::bind(&svo::SvoInterface::stereoCallback, this, _1, _2)); 363 | 364 | while(ros::ok() && !quit_) 365 | { 366 | queue.callAvailable(ros::WallDuration(0.1)); 367 | } 368 | } 369 | 370 | } // namespace svo 371 | -------------------------------------------------------------------------------- /svo_ros/src/svo_node.cpp: -------------------------------------------------------------------------------- 1 | #include "svo_ros/svo_node_base.h" 2 | 3 | int main(int argc, char **argv) 4 | { 5 | svo_ros::SvoNodeBase::initThirdParty(argc, argv); 6 | 7 | svo_ros::SvoNodeBase node; 8 | node.run(); 9 | } 10 | -------------------------------------------------------------------------------- /svo_ros/src/svo_node_base.cpp: -------------------------------------------------------------------------------- 1 | #include "svo_ros/svo_node_base.h" 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | namespace svo_ros { 10 | 11 | void SvoNodeBase::initThirdParty(int argc, char **argv) 12 | { 13 | google::InitGoogleLogging(argv[0]); 14 | google::ParseCommandLineFlags(&argc, &argv, true); 15 | google::InstallFailureSignalHandler(); 16 | 17 | ros::init(argc, argv, "svo"); 18 | } 19 | 20 | SvoNodeBase::SvoNodeBase() 21 | : node_handle_(), private_node_handle_("~"), type_( 22 | vk::param(private_node_handle_, "pipeline_is_stereo", false) ? 23 | svo::PipelineType::kStereo : svo::PipelineType::kMono), 24 | svo_interface_(type_, node_handle_, private_node_handle_) 25 | { 26 | if (svo_interface_.imu_handler_) 27 | { 28 | svo_interface_.subscribeImu(); 29 | } 30 | svo_interface_.subscribeImage(); 31 | svo_interface_.subscribeRemoteKey(); 32 | } 33 | 34 | void SvoNodeBase::run() 35 | { 36 | ros::spin(); 37 | SVO_INFO_STREAM("SVO quit"); 38 | svo_interface_.quit_ = true; 39 | SVO_INFO_STREAM("SVO terminated.\n"); 40 | } 41 | 42 | } // namespace svo_ros 43 | -------------------------------------------------------------------------------- /svo_ros/src/svo_nodelet.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | PLUGINLIB_EXPORT_CLASS(svo::SvoNodelet, nodelet::Nodelet) 8 | 9 | namespace svo { 10 | 11 | SvoNodelet::~SvoNodelet() 12 | { 13 | NODELET_INFO_STREAM("SVO quit"); 14 | svo_interface_->quit_ = true; 15 | } 16 | 17 | void SvoNodelet::onInit() 18 | { 19 | ros::NodeHandle nh(getNodeHandle()); 20 | ros::NodeHandle pnh(getPrivateNodeHandle()); 21 | 22 | NODELET_INFO_STREAM("Initialized " << getName() << " nodelet."); 23 | svo::PipelineType type = svo::PipelineType::kMono; 24 | if(vk::param(pnh, "pipeline_is_stereo", false)) 25 | type = svo::PipelineType::kStereo; 26 | 27 | svo_interface_.reset(new SvoInterface(type, nh, pnh)); 28 | if(svo_interface_->imu_handler_) 29 | svo_interface_->subscribeImu(); 30 | svo_interface_->subscribeImage(); 31 | svo_interface_->subscribeRemoteKey(); 32 | } 33 | 34 | } // namespace svo 35 | -------------------------------------------------------------------------------- /svo_ros/src/visualizer.cpp: -------------------------------------------------------------------------------- 1 | // This file is part of SVO - Semi-direct Visual Odometry. 2 | // 3 | // Copyright (C) 2014 Christian Forster 4 | // (Robotics and Perception Group, University of Zurich, Switzerland). 5 | 6 | #include 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | #include 14 | 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | 23 | #include 24 | #include 25 | #include 26 | 27 | #include 28 | #include 29 | #include 30 | 31 | #include 32 | #include 33 | #include 34 | #include 35 | #include 36 | #include 37 | #include 38 | #include 39 | #include 40 | #include 41 | 42 | namespace svo { 43 | 44 | Visualizer::Visualizer( 45 | const std::string& trace_dir, 46 | const ros::NodeHandle& nh_private, 47 | const size_t n_cameras) 48 | : pnh_(nh_private) 49 | , trace_dir_(trace_dir) 50 | , img_pub_level_(vk::param(pnh_, "publish_img_pyr_level", 0)) 51 | , img_pub_nth_(vk::param(pnh_, "publish_every_nth_img", 1)) 52 | , dense_pub_nth_(vk::param(pnh_, "publish_every_nth_dense_input", 1)) 53 | , pc_(new PointCloud) 54 | , publish_world_in_cam_frame_(vk::param(pnh_, "publish_world_in_cam_frame", true)) 55 | , publish_map_every_frame_(vk::param(pnh_, "publish_map_every_frame", false)) 56 | , publish_points_display_time_(vk::param(pnh_, "publish_point_display_time", 0)) 57 | , publish_seeds_(vk::param(pnh_, "publish_seeds", true)) 58 | , publish_seeds_uncertainty_(vk::param(pnh_, "publish_seeds_uncertainty", false)) 59 | , trace_pointcloud_(vk::param(pnh_, "trace_pointcloud", false)) 60 | , vis_scale_(vk::param(pnh_, "publish_marker_scale", 1.2)) 61 | { 62 | // Init ROS Marker Publishers 63 | pub_frames_ = pnh_.advertise("keyframes", 10); 64 | pub_points_ = pnh_.advertise("points", 10000); 65 | pub_imu_pose_ = pnh_.advertise("pose_imu",10); 66 | pub_info_ = pnh_.advertise("info", 10); 67 | pub_markers_ = pnh_.advertise("markers", 100); 68 | pub_pc_ = pnh_.advertise("pointcloud", 1); 69 | pub_dense_.resize(n_cameras); 70 | pub_images_.resize(n_cameras); 71 | pub_cam_poses_.resize(n_cameras); 72 | image_transport::ImageTransport it(pnh_); 73 | for(size_t i=0; i("dense_input/"+std::to_string(i), 2); 76 | pub_images_.at(i) = it.advertise("image/"+std::to_string(i), 10); 77 | pub_cam_poses_.at(i) = pnh_.advertise("pose_cam/"+std::to_string(i), 10); 78 | } 79 | pub_loop_closure_ = pnh_.advertise( 80 | "loop_closures", 10); 81 | 82 | // init tracing 83 | std::string states_trace_name(trace_dir_+"/trace_states.txt"); 84 | ofs_states_.open(states_trace_name.c_str()); 85 | ofs_states_.precision(10); 86 | } 87 | 88 | void Visualizer::publishSvoInfo( 89 | const svo::FrameHandlerBase* const svo, 90 | const int64_t timestamp_nanoseconds) 91 | { 92 | CHECK_NOTNULL(svo); 93 | ++trace_id_; 94 | 95 | if(pub_info_.getNumSubscribers() == 0) 96 | return; 97 | VLOG(100) << "Publish SVO info"; 98 | 99 | svo_msgs::Info msg_info; 100 | msg_info.header.frame_id = "/cam"; 101 | msg_info.header.seq = trace_id_; 102 | msg_info.header.stamp = ros::Time().fromNSec(timestamp_nanoseconds); 103 | msg_info.processing_time = svo->lastProcessingTime(); 104 | msg_info.stage = static_cast(svo->stage()); 105 | msg_info.tracking_quality = static_cast(svo->trackingQuality()); 106 | msg_info.num_matches = svo->lastNumObservations(); 107 | pub_info_.publish(msg_info); 108 | } 109 | 110 | void Visualizer::publishImuPose( 111 | const Transformation& T_world_imu, 112 | const Eigen::Matrix Covariance, 113 | const int64_t timestamp_nanoseconds) 114 | { 115 | if(pub_imu_pose_.getNumSubscribers() == 0) 116 | return; 117 | VLOG(100) << "Publish IMU Pose"; 118 | 119 | Eigen::Quaterniond q = T_world_imu.getRotation().toImplementation(); 120 | Eigen::Vector3d p = T_world_imu.getPosition(); 121 | geometry_msgs::PoseWithCovarianceStampedPtr msg_pose( 122 | new geometry_msgs::PoseWithCovarianceStamped); 123 | msg_pose->header.seq = trace_id_; 124 | msg_pose->header.stamp = ros::Time().fromNSec(timestamp_nanoseconds); 125 | msg_pose->header.frame_id = "/imu"; 126 | msg_pose->pose.pose.position.x = p[0]; 127 | msg_pose->pose.pose.position.y = p[1]; 128 | msg_pose->pose.pose.position.z = p[2]; 129 | msg_pose->pose.pose.orientation.x = q.x(); 130 | msg_pose->pose.pose.orientation.y = q.y(); 131 | msg_pose->pose.pose.orientation.z = q.z(); 132 | msg_pose->pose.pose.orientation.w = q.w(); 133 | for(size_t i=0; i<36; ++i) 134 | msg_pose->pose.covariance[i] = Covariance(i%6, i/6); 135 | pub_imu_pose_.publish(msg_pose); 136 | } 137 | 138 | void Visualizer::publishCameraPoses( 139 | const FrameBundlePtr& frame_bundle, 140 | const int64_t timestamp_nanoseconds) 141 | { 142 | vk::output_helper::publishTfTransform( 143 | frame_bundle->at(0)->T_cam_world(), ros::Time().fromNSec(timestamp_nanoseconds), 144 | "cam_pos", kWorldFrame, br_); 145 | 146 | for(size_t i = 0; i < frame_bundle->size(); ++i) 147 | { 148 | if(pub_cam_poses_.at(i).getNumSubscribers() == 0) 149 | return; 150 | VLOG(100) << "Publish camera pose " << i; 151 | 152 | Eigen::Quaterniond q = frame_bundle->at(i)->T_world_cam().getRotation().toImplementation(); 153 | Eigen::Vector3d p = frame_bundle->at(i)->T_world_cam().getPosition(); 154 | geometry_msgs::PoseStampedPtr msg_pose(new geometry_msgs::PoseStamped); 155 | msg_pose->header.seq = trace_id_; 156 | msg_pose->header.stamp = ros::Time().fromNSec(timestamp_nanoseconds); 157 | msg_pose->header.frame_id = "/cam"+std::to_string(i); 158 | msg_pose->pose.position.x = p[0]; 159 | msg_pose->pose.position.y = p[1]; 160 | msg_pose->pose.position.z = p[2]; 161 | msg_pose->pose.orientation.x = q.x(); 162 | msg_pose->pose.orientation.y = q.y(); 163 | msg_pose->pose.orientation.z = q.z(); 164 | msg_pose->pose.orientation.w = q.w(); 165 | pub_cam_poses_.at(i).publish(msg_pose); 166 | } 167 | } 168 | 169 | void Visualizer::publishBundleFeatureTracks( 170 | const FrameBundlePtr frames_ref, 171 | const FrameBundlePtr frames_cur, 172 | int64_t timestamp) 173 | { 174 | if(trace_id_ % img_pub_nth_ != 0 || !frames_ref) 175 | return; 176 | VLOG(100) << "Publish bundle feature tracks."; 177 | 178 | for(size_t i = 0; i < frames_ref->size(); ++i) 179 | { 180 | std::vector> matches_ref_cur; 181 | feature_tracking_utils::getFeatureMatches( 182 | *frames_ref->at(i), *frames_cur->at(i), &matches_ref_cur); 183 | publishFeatureTracks( 184 | frames_ref->at(i)->px_vec_, frames_cur->at(i)->px_vec_, 185 | matches_ref_cur, frames_cur->at(i)->img_pyr_, 186 | img_pub_level_, timestamp, i); 187 | } 188 | } 189 | 190 | void Visualizer::publishFeatureTracks( 191 | const Keypoints& px_ref, 192 | const Keypoints& px_cur, 193 | const std::vector>& matches_ref_cur, 194 | const ImgPyr& img_pyr, 195 | const Level& level, 196 | const uint64_t timestamp, 197 | const size_t frame_index) 198 | { 199 | if(pub_images_.at(frame_index).getNumSubscribers() == 0) 200 | return; 201 | VLOG(100) << "Publish feature tracks."; 202 | const int scale = (1<& images, 226 | const int64_t timestamp_nanoseconds) 227 | { 228 | if(trace_id_ % img_pub_nth_ != 0) 229 | return; 230 | VLOG(100) << "Publish images."; 231 | 232 | for(size_t i = 0; i < images.size(); ++i) 233 | { 234 | if(pub_images_.at(i).getNumSubscribers() == 0) 235 | continue; 236 | 237 | // Downsample image for publishing. 238 | ImgPyr img_pyr; 239 | if (images[i].type() == CV_8UC1) 240 | { 241 | frame_utils::createImgPyramid(images[i], img_pub_level_+1, img_pyr); 242 | } 243 | else if (images[i].type() == CV_8UC3) 244 | { 245 | cv::Mat gray_image; 246 | cv::cvtColor(images[i], gray_image, CV_BGR2GRAY); 247 | frame_utils::createImgPyramid(gray_image, img_pub_level_+1, img_pyr); 248 | } 249 | else 250 | { 251 | LOG(FATAL) << "Unknown image type " << images[i].type() << "!"; 252 | } 253 | 254 | cv_bridge::CvImage img_msg; 255 | img_msg.header.stamp = ros::Time().fromNSec(timestamp_nanoseconds); 256 | img_msg.header.frame_id = "/cam"+std::to_string(i); 257 | img_msg.image = img_pyr.at(img_pub_level_); 258 | img_msg.encoding = sensor_msgs::image_encodings::MONO8; 259 | pub_images_.at(i).publish(img_msg.toImageMsg()); 260 | } 261 | } 262 | 263 | void Visualizer::publishImagesWithFeatures( 264 | const FrameBundlePtr& frame_bundle, 265 | const int64_t timestamp) 266 | { 267 | if(trace_id_ % img_pub_nth_ != 0) 268 | return; 269 | 270 | for(size_t i = 0; i < frame_bundle->size(); ++i) 271 | { 272 | if(pub_images_.at(i).getNumSubscribers() == 0) 273 | continue; 274 | VLOG(100) << "Publish image with features " << i; 275 | 276 | FramePtr frame = frame_bundle->at(i); 277 | cv::Mat img_rgb; 278 | feature_detection_utils::drawFeatures(*frame, img_pub_level_, true, &img_rgb); 279 | cv_bridge::CvImage img_msg; 280 | img_msg.header.frame_id = "/cam"; 281 | img_msg.header.seq = trace_id_; 282 | img_msg.header.stamp = ros::Time().fromNSec(timestamp); 283 | img_msg.image = img_rgb; 284 | img_msg.encoding = sensor_msgs::image_encodings::BGR8; 285 | pub_images_.at(i).publish(img_msg.toImageMsg()); 286 | } 287 | } 288 | 289 | void Visualizer::visualizeHexacopter( 290 | const Transformation& T_frame_world, 291 | const uint64_t timestamp) 292 | { 293 | if(pub_frames_.getNumSubscribers() > 0) 294 | { 295 | vk::output_helper::publishCameraMarker( 296 | pub_frames_, "cam_pos", "cams", ros::Time().fromNSec(timestamp), 297 | 1, 0, 0.8, Vector3d(0.,0.,1.)); 298 | } 299 | } 300 | 301 | void Visualizer::visualizeQuadrocopter( 302 | const Transformation& T_frame_world, 303 | const uint64_t timestamp) 304 | { 305 | vk::output_helper::publishTfTransform( 306 | T_frame_world, ros::Time().fromNSec(timestamp), "cam_pos", kWorldFrame, br_); 307 | 308 | if(pub_frames_.getNumSubscribers() > 0) 309 | { 310 | vk::output_helper::publishQuadrocopterMarkers( 311 | pub_frames_, "cam_pos", "cams", ros::Time().fromNSec(timestamp), 312 | 1, 0, 0.8, Vector3d(0.,0.,1.)); 313 | } 314 | } 315 | 316 | 317 | void Visualizer::visualizeMarkers( 318 | const FrameBundlePtr& frame_bundle, 319 | const std::vector& close_kfs, 320 | const Map::Ptr& map) 321 | { 322 | FramePtr frame = frame_bundle->at(0); // TODO 323 | visualizeHexacopter(frame->T_f_w_, ros::Time::now().toNSec()); 324 | publishTrajectoryPoint(frame->pos(), ros::Time::now().toNSec(), trace_id_); 325 | if(frame->isKeyframe() || publish_map_every_frame_) 326 | { 327 | std::vector frames_to_visualize = close_kfs; 328 | frames_to_visualize.push_back(frame); 329 | publishMapRegion(frames_to_visualize); 330 | } 331 | 332 | if(publish_seeds_) 333 | publishSeeds(map); 334 | if(publish_seeds_uncertainty_) 335 | publishSeedsUncertainty(map); 336 | } 337 | 338 | void Visualizer::publishTrajectoryPoint( 339 | const Eigen::Vector3d& pos_in_vision, 340 | const uint64_t timestamp, 341 | const int id) 342 | { 343 | if(pub_points_.getNumSubscribers() > 0) 344 | { 345 | VLOG(100) << "Publish trajectory point."; 346 | vk::output_helper::publishPointMarker( 347 | pub_points_, pos_in_vision, "trajectory", 348 | ros::Time().fromNSec(timestamp), id, 0, 0.03*vis_scale_, Vector3d(0.,0.,0.5)); 349 | } 350 | } 351 | 352 | void Visualizer::publishSeeds(const Map::Ptr& map) 353 | { 354 | VLOG(100) << "Publish seeds."; 355 | double marker_scale = 0.03*vis_scale_; 356 | visualization_msgs::Marker m; 357 | m.header.frame_id = kWorldFrame; 358 | m.header.stamp = ros::Time(); 359 | m.ns = "seeds"; 360 | m.id = 0; 361 | m.type = visualization_msgs::Marker::POINTS; 362 | m.action = 0; // add/modify 363 | m.scale.x = marker_scale; 364 | m.scale.y = marker_scale; 365 | m.scale.z = marker_scale; 366 | m.color.a = 1.0; 367 | m.color.r = 1.0; 368 | m.color.g = 0.0; 369 | m.color.b = 0.0; 370 | m.points.reserve(1000); 371 | for(auto kf : map->keyframes_) 372 | { 373 | const FramePtr& frame = kf.second; 374 | const Transformation T_w_f = frame->T_world_cam(); 375 | for(size_t i = 0; i < frame->num_features_; ++i) 376 | { 377 | if(isSeed(frame->type_vec_[i])) 378 | { 379 | CHECK(!frame->seed_ref_vec_[i].keyframe) << "Data inconsistent"; 380 | const Vector3d xyz = T_w_f * frame->getSeedPosInFrame(i); 381 | geometry_msgs::Point p; 382 | p.x = xyz.x(); 383 | p.y = xyz.y(); 384 | p.z = xyz.z(); 385 | m.points.push_back(p); 386 | } 387 | } 388 | } 389 | pub_points_.publish(m); 390 | } 391 | 392 | void Visualizer::publishSeedsAsPointcloud( 393 | const Frame& frame, 394 | bool only_converged_seeds, 395 | bool reset_pc_before_publishing) 396 | { 397 | if(pub_pc_.getNumSubscribers() == 0) 398 | return; 399 | VLOG(100) << "Publish seeds as pointcloud."; 400 | 401 | if(reset_pc_before_publishing) 402 | pc_->clear(); 403 | 404 | pc_->header.frame_id = kWorldFrame; 405 | pc_->header.stamp = ros::Time::now().toNSec(); 406 | pc_->reserve(frame.num_features_); 407 | for(size_t i = 0; i < frame.num_features_; ++i) 408 | { 409 | if((only_converged_seeds && isConvergedSeed(frame.type_vec_.at(i))) 410 | || !only_converged_seeds) 411 | { 412 | const Eigen::Vector3d xyz = frame.getSeedPosInFrame(i); 413 | PointType p; 414 | p.x = xyz.x(); 415 | p.y = xyz.y(); 416 | p.z = xyz.z(); 417 | p.intensity = frame.img().at(frame.px_vec_(1,i), frame.px_vec_(0,i)); 418 | pc_->push_back(p); 419 | } 420 | } 421 | VLOG(30) << "Publish pointcloud of size " << pc_->size(); 422 | pub_pc_.publish(pc_); 423 | } 424 | 425 | void Visualizer::publishSeedsUncertainty(const Map::Ptr& map) 426 | { 427 | VLOG(100) << "Publish seed uncertainty."; 428 | double marker_scale = 0.01*vis_scale_; 429 | visualization_msgs::Marker msg_variance; 430 | msg_variance.header.frame_id = kWorldFrame; 431 | msg_variance.header.stamp = ros::Time(); 432 | msg_variance.ns = "seeds_variance"; 433 | msg_variance.id = 0; 434 | msg_variance.type = visualization_msgs::Marker::LINE_LIST; 435 | msg_variance.action = 0; // add/modify 436 | msg_variance.scale.x = marker_scale; 437 | msg_variance.scale.y = marker_scale; 438 | msg_variance.scale.z = marker_scale; 439 | msg_variance.color.a = 1.0; 440 | msg_variance.color.r = 1.0; 441 | msg_variance.color.g = 0.0; 442 | msg_variance.color.b = 0.0; 443 | msg_variance.points.reserve(1000); 444 | for(auto kf : map->keyframes_) 445 | { 446 | const FramePtr& frame = kf.second; 447 | const Transformation T_w_f = frame->T_world_cam(); 448 | for(size_t i = 0; i < frame->num_features_; ++i) 449 | { 450 | if(isSeed(frame->type_vec_[i])) 451 | { 452 | CHECK(!frame->seed_ref_vec_[i].keyframe) << "Data inconsistent"; 453 | 454 | const FloatType z_inv_max = seed::getInvMaxDepth(frame->invmu_sigma2_a_b_vec_.col(i)); 455 | const FloatType z_inv_min = seed::getInvMinDepth(frame->invmu_sigma2_a_b_vec_.col(i)); 456 | const Vector3d p1 = T_w_f * (frame->f_vec_.col(i) * (1.0 / z_inv_min)); 457 | const Vector3d p2 = T_w_f * (frame->f_vec_.col(i) * (1.0 / z_inv_max)); 458 | 459 | geometry_msgs::Point msg_point; 460 | msg_point.x = p1.x(); 461 | msg_point.y = p1.y(); 462 | msg_point.z = p1.z(); 463 | msg_variance.points.push_back(msg_point); 464 | msg_point.x = p2.x(); 465 | msg_point.y = p2.y(); 466 | msg_point.z = p2.z(); 467 | msg_variance.points.push_back(msg_point); 468 | } 469 | } 470 | } 471 | pub_points_.publish(msg_variance); 472 | } 473 | 474 | void Visualizer::publishMapRegion(const std::vector& frames) 475 | { 476 | VLOG(100) << "Publish map region."; 477 | uint64_t ts = vk::Timer::getCurrentTime(); 478 | if(pub_pc_.getNumSubscribers() > 0) 479 | { 480 | pc_->header.frame_id = kWorldFrame; 481 | 482 | pcl_conversions::toPCL(ros::Time::now(), pc_->header.stamp); 483 | pc_->clear(); 484 | pc_->reserve(frames.size() * 150); 485 | PointType p; 486 | for(const FramePtr& frame : frames) 487 | { 488 | for(size_t i = 0; i < frame->num_features_; ++i) 489 | { 490 | if(frame->landmark_vec_[i] == nullptr) 491 | continue; 492 | 493 | Point& point = *frame->landmark_vec_[i]; 494 | if(point.last_published_ts_ == ts) 495 | continue; 496 | point.last_published_ts_ = ts; 497 | p.x = point.pos_.x(); 498 | p.y = point.pos_.y(); 499 | p.z = point.pos_.z(); 500 | p.intensity = isEdgelet(frame->type_vec_[i]) ? 60 : 0; 501 | pc_->push_back(p); 502 | } 503 | } 504 | VLOG(100) << "Publish pointcloud of size " << pc_->size(); 505 | pub_pc_.publish(pc_); 506 | } 507 | 508 | if(pub_points_.getNumSubscribers() > 0) 509 | { 510 | for(const FramePtr& frame : frames) 511 | publishKeyframeWithPoints(frame, ++ts); 512 | } 513 | } 514 | 515 | void Visualizer::publishKeyframeWithPoints( 516 | const FramePtr& frame, 517 | const uint64_t timestamp, 518 | const double marker_scale) 519 | { 520 | // publish keyframe 521 | Transformation T_world_cam(frame->T_f_w_.inverse()); 522 | vk::output_helper::publishFrameMarker( 523 | pub_frames_, T_world_cam.getRotationMatrix(), 524 | T_world_cam.getPosition(), "kfs", ros::Time::now(), frame->id_*10, 0, marker_scale*2.0); 525 | 526 | // publish point cloud and links 527 | Position xyz_world; 528 | int id = 0; 529 | for(size_t i = 0; i < frame->num_features_; ++i) 530 | { 531 | if(frame->landmark_vec_[i] != nullptr) 532 | { 533 | PointPtr& point = frame->landmark_vec_[i]; 534 | if(point->last_published_ts_ == timestamp) 535 | continue; 536 | point->last_published_ts_ = timestamp; 537 | xyz_world = point->pos(); 538 | id = point->id(); 539 | } 540 | else if(frame->seed_ref_vec_[i].keyframe != nullptr) 541 | { 542 | const SeedRef& ref = frame->seed_ref_vec_[i]; 543 | xyz_world = ref.keyframe->T_world_cam() * ref.keyframe->getSeedPosInFrame(ref.seed_id); 544 | id = -ref.keyframe->id()*1000+ref.seed_id; 545 | } 546 | else 547 | continue; 548 | 549 | if(isEdgelet(frame->type_vec_[i])) 550 | { 551 | vk::output_helper::publishPointMarker( 552 | pub_points_, xyz_world, "pts", 553 | ros::Time::now(), id, 0, marker_scale*vis_scale_, 554 | Vector3d(0,0.6,0), publish_points_display_time_); 555 | } 556 | else 557 | { 558 | vk::output_helper::publishPointMarker( 559 | pub_points_, xyz_world, "pts", 560 | ros::Time::now(), id, 0, marker_scale*vis_scale_, 561 | Vector3d(1,0,1), publish_points_display_time_); 562 | } 563 | 564 | if(trace_pointcloud_) 565 | { 566 | if(!ofs_pointcloud_.is_open()) 567 | ofs_pointcloud_.open(trace_dir_+"/pointcloud.txt"); 568 | ofs_pointcloud_ << xyz_world.x() << " " 569 | << xyz_world.y() << " " 570 | << xyz_world.z() << std::endl; 571 | } 572 | 573 | /* 574 | if(point->normal_set_) 575 | { 576 | vk::output_helper::publishArrowMarker( 577 | pub_points_, T_world_from_vision_*point->pos_, 578 | T_world_from_vision_.rotation_matrix()*point->normal_, 0.1, 579 | "normal", ros::Time::now(), point->id_, 0, 0.005, 580 | Vector3d(0.0, 0., 1.0)); 581 | } 582 | */ 583 | 584 | } 585 | } 586 | 587 | void Visualizer::publishLoopClosure( 588 | const FramePtr& query, const FramePtr& match, 589 | const Transformation& T_match_query) 590 | { 591 | CHECK(query); 592 | const Eigen::Vector3d p_w_query = query->T_f_w_.inverse().getPosition(); 593 | const Eigen::Vector3d p_w_match = match->T_f_w_.inverse().getPosition(); 594 | const Eigen::Vector3d p_w_query_should = 595 | (match->T_f_w_.inverse() * T_match_query).getPosition(); 596 | 597 | visualization_msgs::Marker marker; 598 | marker.header.frame_id = "/world"; 599 | marker.header.stamp = ros::Time::now(); 600 | marker.ns = "lcs"; 601 | marker.id = query->id() * 2; 602 | marker.type = visualization_msgs::Marker::ARROW; 603 | marker.action = visualization_msgs::Marker::ADD; 604 | marker.points.reserve(2); 605 | geometry_msgs::Point point; 606 | point.x = static_cast(p_w_match.x()); 607 | point.y = static_cast(p_w_match.y()); 608 | point.z = static_cast(p_w_match.z()); 609 | marker.points.push_back(point); 610 | point.x = static_cast(p_w_query_should.x()); 611 | point.y = static_cast(p_w_query_should.y()); 612 | point.z = static_cast(p_w_query_should.z()); 613 | marker.points.push_back(point); 614 | marker.scale.x = 0.01; 615 | marker.scale.y = 0.01; 616 | marker.color.a = 1.0; 617 | marker.color.r = 0.0; 618 | marker.color.g = 1.0; 619 | marker.color.b = 0.0; 620 | pub_loop_closure_.publish(marker); 621 | 622 | marker.id = query->id() * 2 + 1; 623 | marker.points.clear(); 624 | point.x = static_cast(p_w_query.x()); 625 | point.y = static_cast(p_w_query.y()); 626 | point.z = static_cast(p_w_query.z()); 627 | marker.points.push_back(point); 628 | point.x = static_cast(p_w_query_should.x()); 629 | point.y = static_cast(p_w_query_should.y()); 630 | point.z = static_cast(p_w_query_should.z()); 631 | marker.points.push_back(point); 632 | marker.color.r = 1.0; 633 | marker.color.g = 0.0; 634 | marker.color.b = 0.0; 635 | pub_loop_closure_.publish(marker); 636 | } 637 | 638 | void Visualizer::exportToDense(const FrameBundlePtr& frame_bundle) 639 | { 640 | VLOG(100) << "Publish dense input."; 641 | for(size_t cam_index = 0; cam_index < frame_bundle->size(); ++cam_index) 642 | { 643 | if(dense_pub_nth_ > 0 644 | && trace_id_%dense_pub_nth_ == 0 645 | && pub_dense_.at(cam_index).getNumSubscribers() > 0) 646 | { 647 | const FramePtr& frame = frame_bundle->at(cam_index); 648 | svo_msgs::DenseInputWithFeatures msg; 649 | msg.header.stamp = ros::Time().fromNSec(frame->getTimestampNSec()); 650 | msg.header.frame_id = "/world"; 651 | msg.frame_id = frame->id_; 652 | 653 | cv_bridge::CvImage img_msg; 654 | img_msg.header.stamp = msg.header.stamp; 655 | img_msg.header.frame_id = "camera"; 656 | if (!frame->original_color_image_.empty()) 657 | { 658 | img_msg.image = frame->original_color_image_; 659 | img_msg.encoding = sensor_msgs::image_encodings::BGR8; 660 | } 661 | else 662 | { 663 | img_msg.image = frame->img(); 664 | img_msg.encoding = sensor_msgs::image_encodings::MONO8; 665 | } 666 | msg.image = *img_msg.toImageMsg(); 667 | 668 | double min_z = std::numeric_limits::max(); 669 | double max_z = std::numeric_limits::min(); 670 | 671 | Position xyz_world; 672 | for(size_t i = 0; i < frame->num_features_; ++i) 673 | { 674 | if(frame->landmark_vec_[i] != nullptr) 675 | { 676 | xyz_world = frame->landmark_vec_[i]->pos(); 677 | } 678 | else if(frame->seed_ref_vec_[i].keyframe != nullptr) 679 | { 680 | const SeedRef& ref = frame->seed_ref_vec_[i]; 681 | xyz_world = ref.keyframe->T_world_cam() * ref.keyframe->getSeedPosInFrame(ref.seed_id); 682 | } 683 | else 684 | continue; 685 | 686 | svo_msgs::Feature feature; 687 | feature.x = xyz_world(0); 688 | feature.y = xyz_world(1); 689 | feature.z = xyz_world(2); 690 | msg.features.push_back(feature); 691 | 692 | Position pos_in_frame = frame->T_f_w_ * xyz_world; 693 | min_z = std::min(pos_in_frame[2], min_z); 694 | max_z = std::max(pos_in_frame[2], max_z); 695 | } 696 | msg.min_depth = (float) min_z; 697 | msg.max_depth = (float) max_z; 698 | 699 | // publish cam in world frame 700 | Transformation T_world_from_cam(frame->T_f_w_.inverse()); 701 | const Eigen::Quaterniond& q = T_world_from_cam.getRotation().toImplementation(); 702 | const Vector3d& p = T_world_from_cam.getPosition(); 703 | 704 | msg.pose.position.x = p[0]; 705 | msg.pose.position.y = p[1]; 706 | msg.pose.position.z = p[2]; 707 | msg.pose.orientation.w = q.w(); 708 | msg.pose.orientation.x = q.x(); 709 | msg.pose.orientation.y = q.y(); 710 | msg.pose.orientation.z = q.z(); 711 | pub_dense_.at(cam_index).publish(msg); 712 | } 713 | } 714 | } 715 | 716 | void Visualizer::visualizeCoordinateFrames(const Transformation& T_world_cam) 717 | { 718 | if(pub_markers_.getNumSubscribers() == 0) 719 | return; 720 | 721 | // camera frame 722 | vk::output_helper::publishFrameMarker( 723 | pub_markers_, T_world_cam.getRotationMatrix(), T_world_cam.getPosition(), 724 | "cam", ros::Time::now(), 0, 0, 0.2); 725 | 726 | // origin frame 727 | vk::output_helper::publishFrameMarker( 728 | pub_markers_, Matrix3d::Identity(), Vector3d::Zero(), 729 | kWorldFrame, ros::Time::now(), 0, 0, 0.2); 730 | } 731 | 732 | } // end namespace svo 733 | -------------------------------------------------------------------------------- /svo_ros/svo_nodelet.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | A nodelet wrapper for SVO. 7 | 8 | 9 | 10 | --------------------------------------------------------------------------------