├── CMakeLists.txt ├── LICENSE ├── README.md ├── config ├── doc │ ├── demo.gif │ ├── device-boat.png │ ├── device-hand-2.png │ ├── device-hand.png │ ├── device-jackal.png │ ├── gps-demo.gif │ ├── imu-debug.gif │ ├── imu-transform.png │ ├── kitti-demo.gif │ ├── kitti-map.png │ ├── kitti2bag │ │ ├── README.md │ │ └── kitti2bag.py │ ├── loop-closure-2.gif │ ├── loop-closure.gif │ ├── ouster-demo.gif │ ├── ouster-device.jpg │ ├── paper.pdf │ └── system.png ├── params.yaml ├── params_outdoor.yaml └── params_park.yaml ├── factor.png ├── frames.pdf ├── include └── utility.h ├── launch ├── include │ ├── config │ │ ├── robot.urdf.xacro │ │ └── rviz.rviz │ ├── module_loam.launch │ ├── module_navsat.launch │ ├── module_robot_state_publisher.launch │ ├── module_rviz.launch │ └── rosconsole │ │ ├── rosconsole_error.conf │ │ ├── rosconsole_info.conf │ │ └── rosconsole_warn.conf └── run.launch ├── msg └── cloud_info.msg ├── package.xml ├── rosgraph.png ├── src ├── featureExtraction.cpp ├── imageProjection.cpp ├── imuPreintegration.cpp └── mapOptmization.cpp ├── srv └── save_map.srv ├── system.png └── 运行截图.png /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(lio_sam) 3 | 4 | set(CMAKE_BUILD_TYPE "Debug") 5 | set(CMAKE_CXX_FLAGS "-std=c++14") 6 | set(CMAKE_CXX_FLAGS_DEBUG "-O3 -Wall -g -pthread") 7 | 8 | find_package(catkin REQUIRED COMPONENTS 9 | tf 10 | roscpp 11 | rospy 12 | cv_bridge 13 | # pcl library 14 | pcl_conversions 15 | # msgs 16 | std_msgs 17 | sensor_msgs 18 | geometry_msgs 19 | nav_msgs 20 | message_generation 21 | visualization_msgs 22 | ) 23 | 24 | find_package( GTSAMCMakeTools ) 25 | 26 | find_package(OpenMP REQUIRED) 27 | find_package(PCL REQUIRED QUIET) 28 | find_package(OpenCV REQUIRED QUIET) 29 | find_package(GTSAM REQUIRED QUIET) 30 | 31 | add_message_files( 32 | DIRECTORY msg 33 | FILES 34 | cloud_info.msg 35 | ) 36 | 37 | add_service_files( 38 | DIRECTORY srv 39 | FILES 40 | save_map.srv 41 | ) 42 | 43 | generate_messages( 44 | DEPENDENCIES 45 | geometry_msgs 46 | std_msgs 47 | nav_msgs 48 | sensor_msgs 49 | ) 50 | 51 | catkin_package( 52 | INCLUDE_DIRS include 53 | DEPENDS PCL GTSAM 54 | 55 | CATKIN_DEPENDS 56 | std_msgs 57 | nav_msgs 58 | geometry_msgs 59 | sensor_msgs 60 | message_runtime 61 | message_generation 62 | visualization_msgs 63 | ) 64 | 65 | # include directories 66 | include_directories( 67 | include 68 | ${catkin_INCLUDE_DIRS} 69 | ${PCL_INCLUDE_DIRS} 70 | ${OpenCV_INCLUDE_DIRS} 71 | ${GTSAM_INCLUDE_DIR} 72 | ) 73 | 74 | # link directories 75 | link_directories( 76 | include 77 | ${PCL_LIBRARY_DIRS} 78 | ${OpenCV_LIBRARY_DIRS} 79 | ${GTSAM_LIBRARY_DIRS} 80 | ) 81 | 82 | ########### 83 | ## Build ## 84 | ########### 85 | 86 | # Range Image Projection 87 | add_executable(${PROJECT_NAME}_imageProjection src/imageProjection.cpp) 88 | add_dependencies(${PROJECT_NAME}_imageProjection ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp) 89 | target_link_libraries(${PROJECT_NAME}_imageProjection ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES}) 90 | 91 | # Feature Association 92 | add_executable(${PROJECT_NAME}_featureExtraction src/featureExtraction.cpp) 93 | add_dependencies(${PROJECT_NAME}_featureExtraction ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp) 94 | target_link_libraries(${PROJECT_NAME}_featureExtraction ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES}) 95 | 96 | # Mapping Optimization 97 | add_executable(${PROJECT_NAME}_mapOptmization src/mapOptmization.cpp) 98 | add_dependencies(${PROJECT_NAME}_mapOptmization ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp) 99 | target_compile_options(${PROJECT_NAME}_mapOptmization PRIVATE ${OpenMP_CXX_FLAGS}) 100 | target_link_libraries(${PROJECT_NAME}_mapOptmization ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam) 101 | 102 | # IMU Preintegration 103 | add_executable(${PROJECT_NAME}_imuPreintegration src/imuPreintegration.cpp) 104 | target_link_libraries(${PROJECT_NAME}_imuPreintegration ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} gtsam) 105 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020, Tixiao Shan 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LIO-SAM-DetailedNote 2 | LIO-SAM源码详细注释,3D SLAM融合激光、IMU、GPS,因子图优化。 3 | 4 | LIO-SAM的代码十分轻量,只有四个cpp文件,很值得读一读呢。 5 | 6 | 关于LIO-SAM的论文解读,网上已经有很多文章啦,同系列的LOAM、A-LOAM、LEGO-LOAM等,在网上都可以找到相关的解读文章。所以本文旨在对源代码进行阅读学习,积累一些工程上的经验。这里记录下来,希望可以帮到有需要的同学,如有错误的地方,请您批评指正。 7 | 8 | :) 如果对您有帮助,帮我点个star呦~ 9 | 10 | ## 目录(知乎) 11 | - [LIO-SAM源码解析:准备篇](https://zhuanlan.zhihu.com/p/352039509) 12 | - [LIO-SAM源码解析(一):ImageProjection](https://zhuanlan.zhihu.com/p/352120054) 13 | - [LIO-SAM源码解析(二):FeatureExtraction](https://zhuanlan.zhihu.com/p/352144126) 14 | - [LIO-SAM源码解析(三):IMUPreintegration](https://zhuanlan.zhihu.com/p/352146800) 15 | - [LIO-SAM源码解析(四):MapOptimization](https://zhuanlan.zhihu.com/p/352148894) 16 | 17 | ## 整体流程 18 | 19 | 代码结构图 20 | ![Image](https://github.com/smilefacehh/LIO-SAM-DetailedNote/blob/main/system.png) 21 | 22 | 因子图 23 | ![Image](https://github.com/smilefacehh/LIO-SAM-DetailedNote/blob/main/factor.png) 24 | 25 | 1、激光运动畸变校正。利用当前帧起止时刻之间的IMU数据、IMU里程计数据计算预积分,得到每一时刻的激光点位姿,从而变换到初始时刻激光点坐标系下,实现校正。 26 | 27 | 2、提取特征。对经过运动畸变校正之后的当前帧激光点云,计算每个点的曲率,进而提取角点、平面点特征。 28 | 29 | 3、scan-to-map匹配。提取布局关键帧map的特征点,与当前帧特征点执行scan-to-map匹配,更新当前帧的位姿。 30 | 31 | 4、因子图优化。添加激光里程计因子、GPS因子、闭环因子,执行因子图优化,更新所有关键帧位姿。 32 | 33 | 5、闭环检测。在历史关键帧中找候选闭环匹配帧,执行scan-to-map匹配,得到位姿变换,构建闭环因子,加入到因子图中一并优化。 34 | 35 | ## 一、激光运动畸变校正(ImageProjection) 36 | 37 | **功能简介** 38 | 39 | 1.利用当前激光帧起止时刻间的imu数据计算旋转增量,IMU里程计数据(来自ImuPreintegration)计算平移增量,进而对该帧激光每一时刻的激光点进行运动畸变校正(利用相对于激光帧起始时刻的位姿增量,变换当前激光点到起始时刻激光点的坐标系下,实现校正); 40 | 41 | 2.同时用IMU数据的姿态角(RPY,roll、pitch、yaw)、IMU里程计数据的的位姿,对当前帧激光位姿进行粗略初始化。 42 | 43 | **订阅** 44 | 45 | 1.订阅原始IMU数据; 46 | 47 | 2.订阅IMU里程计数据,来自ImuPreintegration,表示每一时刻对应的位姿; 48 | 49 | 3.订阅原始激光点云数据。 50 | 51 | **发布** 52 | 53 | 1.发布当前帧激光运动畸变校正之后的有效点云,用于rviz展示; 54 | 55 | 2.发布当前帧激光运动畸变校正之后的点云信息,包括点云数据、初始位姿、姿态角、有效点云数据等,发布给FeatureExtraction进行特征提取。 56 | 57 | 58 | ## 二、点云特征提取(FeatureExtraction) 59 | 60 | **功能简介** 61 | 62 | 对经过运动畸变校正之后的当前帧激光点云,计算每个点的曲率,进而提取角点、平面点(用曲率的大小进行判定)。 63 | 64 | **订阅** 65 | 66 | 订阅当前激光帧运动畸变校正后的点云信息,来自ImageProjection。 67 | 68 | **发布** 69 | 70 | 1.发布当前激光帧提取特征之后的点云信息,包括的历史数据有:运动畸变校正,点云数据,初始位姿,姿态角,有效点云数据,角点点云,平面点点云等,发布给MapOptimization; 71 | 72 | 2.发布当前激光帧提取的角点点云,用于rviz展示; 73 | 74 | 3.发布当前激光帧提取的平面点点云,用于rviz展示。 75 | 76 | 77 | ## 三、IMU预积分(ImuPreintegration) 78 | 79 | ### 1.TransformFusion类 80 | 81 | **功能简介** 82 | 83 | 主要功能是订阅激光里程计(来自MapOptimization)和IMU里程计,根据前一时刻激光里程计,和该时刻到当前时刻的IMU里程计变换增量,计算当前时刻IMU里程计;rviz展示IMU里程计轨迹(局部)。 84 | 85 | **订阅** 86 | 87 | 1.订阅激光里程计,来自MapOptimization; 88 | 89 | 2.订阅imu里程计,来自ImuPreintegration。 90 | 91 | **发布** 92 | 93 | 1.发布IMU里程计,用于rviz展示; 94 | 95 | 2.发布IMU里程计轨迹,仅展示最近一帧激光里程计时刻到当前时刻之间的轨迹。 96 | 97 | 98 | ### 2. ImuPreintegration类 99 | 100 | **功能简介** 101 | 102 | 1.用激光里程计,两帧激光里程计之间的IMU预计分量构建因子图,优化当前帧的状态(包括位姿、速度、偏置); 103 | 104 | 2.以优化后的状态为基础,施加IMU预计分量,得到每一时刻的IMU里程计。 105 | 106 | **订阅** 107 | 108 | 1.订阅IMU原始数据,以因子图优化后的激光里程计为基础,施加两帧之间的IMU预计分量,预测每一时刻(IMU频率)的IMU里程计; 109 | 110 | 2.订阅激光里程计(来自MapOptimization),用两帧之间的IMU预计分量构建因子图,优化当前帧位姿(这个位姿仅用于更新每时刻的IMU里程计,以及下一次因子图优化)。 111 | 112 | **发布** 113 | 114 | 1.发布imu里程计; 115 | 116 | 117 | ## 四、因子图优化(MapOptimization) 118 | 119 | **功能简介** 120 | 121 | 1.scan-to-map匹配:提取当前激光帧特征点(角点、平面点),局部关键帧map的特征点,执行scan-to-map迭代优化,更新当前帧位姿; 122 | 123 | 2.关键帧因子图优化:关键帧加入因子图,添加激光里程计因子、GPS因子、闭环因子,执行因子图优化,更新所有关键帧位姿; 124 | 125 | 3.闭环检测:在历史关键帧中找距离相近,时间相隔较远的帧设为匹配帧,匹配帧周围提取局部关键帧map,同样执行scan-to-map匹配,得到位姿变换,构建闭环因子数据,加入因子图优化。 126 | 127 | **订阅** 128 | 129 | 1.订阅当前激光帧点云信息,来自FeatureExtraction; 130 | 131 | 2.订阅GPS里程计; 132 | 133 | 3.订阅来自外部闭环检测程序提供的闭环数据,本程序没有提供,这里实际没用上。 134 | 135 | **发布** 136 | 137 | 1.发布历史关键帧里程计; 138 | 139 | 2.发布局部关键帧map的特征点云; 140 | 141 | 3.发布激光里程计,rviz中表现为坐标轴; 142 | 143 | 4.发布激光里程计; 144 | 145 | 5.发布激光里程计路径,rviz中表现为载体的运行轨迹; 146 | 147 | 6.发布地图保存服务; 148 | 149 | 7.发布闭环匹配局部关键帧map; 150 | 151 | 8.发布当前关键帧经过闭环优化后的位姿变换之后的特征点云; 152 | 153 | 9.发布闭环边,rviz中表现为闭环帧之间的连线; 154 | 155 | 10.发布局部map的降采样平面点集合; 156 | 157 | 11.发布历史帧(累加的)的角点、平面点降采样集合; 158 | 159 | 12.发布当前帧原始点云配准之后的点云; 160 | 161 | 162 | 如有错误请您批评指正,希望内容对您有帮助,更多细节可以查看代码注释~ 163 | -------------------------------------------------------------------------------- /config/doc/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/demo.gif -------------------------------------------------------------------------------- /config/doc/device-boat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/device-boat.png -------------------------------------------------------------------------------- /config/doc/device-hand-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/device-hand-2.png -------------------------------------------------------------------------------- /config/doc/device-hand.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/device-hand.png -------------------------------------------------------------------------------- /config/doc/device-jackal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/device-jackal.png -------------------------------------------------------------------------------- /config/doc/gps-demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/gps-demo.gif -------------------------------------------------------------------------------- /config/doc/imu-debug.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/imu-debug.gif -------------------------------------------------------------------------------- /config/doc/imu-transform.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/imu-transform.png -------------------------------------------------------------------------------- /config/doc/kitti-demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/kitti-demo.gif -------------------------------------------------------------------------------- /config/doc/kitti-map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/kitti-map.png -------------------------------------------------------------------------------- /config/doc/kitti2bag/README.md: -------------------------------------------------------------------------------- 1 | # kitti2bag 2 | 3 | ## How to run it? 4 | 5 | ```bash 6 | wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_sync.zip 7 | wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_extract.zip 8 | wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_calib.zip 9 | unzip 2011_09_26_drive_0084_sync.zip 10 | unzip 2011_09_26_drive_0084_extract.zip 11 | unzip 2011_09_26_calib.zip 12 | python kitti2bag.py -t 2011_09_26 -r 0084 raw_synced . 13 | ``` 14 | 15 | That's it. You have a bag that contains your data. 16 | 17 | Other source files can be found at [KITTI raw data](http://www.cvlibs.net/datasets/kitti/raw_data.php) page. -------------------------------------------------------------------------------- /config/doc/kitti2bag/kitti2bag.py: -------------------------------------------------------------------------------- 1 | #!env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import sys 5 | 6 | try: 7 | import pykitti 8 | except ImportError as e: 9 | print('Could not load module \'pykitti\'. Please run `pip install pykitti`') 10 | sys.exit(1) 11 | 12 | import tf 13 | import os 14 | import cv2 15 | import rospy 16 | import rosbag 17 | from tqdm import tqdm 18 | from tf2_msgs.msg import TFMessage 19 | from datetime import datetime 20 | from std_msgs.msg import Header 21 | from sensor_msgs.msg import CameraInfo, Imu, PointField, NavSatFix 22 | import sensor_msgs.point_cloud2 as pcl2 23 | from geometry_msgs.msg import TransformStamped, TwistStamped, Transform 24 | from cv_bridge import CvBridge 25 | import numpy as np 26 | import argparse 27 | 28 | def save_imu_data(bag, kitti, imu_frame_id, topic): 29 | print("Exporting IMU") 30 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): 31 | q = tf.transformations.quaternion_from_euler(oxts.packet.roll, oxts.packet.pitch, oxts.packet.yaw) 32 | imu = Imu() 33 | imu.header.frame_id = imu_frame_id 34 | imu.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) 35 | imu.orientation.x = q[0] 36 | imu.orientation.y = q[1] 37 | imu.orientation.z = q[2] 38 | imu.orientation.w = q[3] 39 | imu.linear_acceleration.x = oxts.packet.af 40 | imu.linear_acceleration.y = oxts.packet.al 41 | imu.linear_acceleration.z = oxts.packet.au 42 | imu.angular_velocity.x = oxts.packet.wf 43 | imu.angular_velocity.y = oxts.packet.wl 44 | imu.angular_velocity.z = oxts.packet.wu 45 | bag.write(topic, imu, t=imu.header.stamp) 46 | 47 | def save_imu_data_raw(bag, kitti, imu_frame_id, topic): 48 | print("Exporting IMU Raw") 49 | synced_path = kitti.data_path 50 | unsynced_path = synced_path.replace('sync', 'extract') 51 | imu_path = os.path.join(unsynced_path, 'oxts') 52 | 53 | # read time stamp (convert to ros seconds format) 54 | with open(os.path.join(imu_path, 'timestamps.txt')) as f: 55 | lines = f.readlines() 56 | imu_datetimes = [] 57 | for line in lines: 58 | if len(line) == 1: 59 | continue 60 | timestamp = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f') 61 | imu_datetimes.append(float(timestamp.strftime("%s.%f"))) 62 | 63 | # fix imu time using a linear model (may not be ideal, ^_^) 64 | imu_index = np.asarray(range(len(imu_datetimes)), dtype=np.float64) 65 | z = np.polyfit(imu_index, imu_datetimes, 1) 66 | imu_datetimes_new = z[0] * imu_index + z[1] 67 | imu_datetimes = imu_datetimes_new.tolist() 68 | 69 | # get all imu data 70 | imu_data_dir = os.path.join(imu_path, 'data') 71 | imu_filenames = sorted(os.listdir(imu_data_dir)) 72 | imu_data = [None] * len(imu_filenames) 73 | for i, imu_file in enumerate(imu_filenames): 74 | imu_data_file = open(os.path.join(imu_data_dir, imu_file), "r") 75 | for line in imu_data_file: 76 | if len(line) == 1: 77 | continue 78 | stripped_line = line.strip() 79 | line_list = stripped_line.split() 80 | imu_data[i] = line_list 81 | 82 | assert len(imu_datetimes) == len(imu_data) 83 | 84 | for timestamp, data in zip(imu_datetimes, imu_data): 85 | roll, pitch, yaw = float(data[3]), float(data[4]), float(data[5]), 86 | q = tf.transformations.quaternion_from_euler(roll, pitch, yaw) 87 | imu = Imu() 88 | imu.header.frame_id = imu_frame_id 89 | imu.header.stamp = rospy.Time.from_sec(timestamp) 90 | imu.orientation.x = q[0] 91 | imu.orientation.y = q[1] 92 | imu.orientation.z = q[2] 93 | imu.orientation.w = q[3] 94 | imu.linear_acceleration.x = float(data[11]) 95 | imu.linear_acceleration.y = float(data[12]) 96 | imu.linear_acceleration.z = float(data[13]) 97 | imu.angular_velocity.x = float(data[17]) 98 | imu.angular_velocity.y = float(data[18]) 99 | imu.angular_velocity.z = float(data[19]) 100 | bag.write(topic, imu, t=imu.header.stamp) 101 | 102 | imu.header.frame_id = 'imu_enu_link' 103 | bag.write('/imu_correct', imu, t=imu.header.stamp) # for LIO-SAM GPS 104 | 105 | def save_dynamic_tf(bag, kitti, kitti_type, initial_time): 106 | print("Exporting time dependent transformations") 107 | if kitti_type.find("raw") != -1: 108 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): 109 | tf_oxts_msg = TFMessage() 110 | tf_oxts_transform = TransformStamped() 111 | tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) 112 | tf_oxts_transform.header.frame_id = 'world' 113 | tf_oxts_transform.child_frame_id = 'base_link' 114 | 115 | transform = (oxts.T_w_imu) 116 | t = transform[0:3, 3] 117 | q = tf.transformations.quaternion_from_matrix(transform) 118 | oxts_tf = Transform() 119 | 120 | oxts_tf.translation.x = t[0] 121 | oxts_tf.translation.y = t[1] 122 | oxts_tf.translation.z = t[2] 123 | 124 | oxts_tf.rotation.x = q[0] 125 | oxts_tf.rotation.y = q[1] 126 | oxts_tf.rotation.z = q[2] 127 | oxts_tf.rotation.w = q[3] 128 | 129 | tf_oxts_transform.transform = oxts_tf 130 | tf_oxts_msg.transforms.append(tf_oxts_transform) 131 | 132 | bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp) 133 | 134 | elif kitti_type.find("odom") != -1: 135 | timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps) 136 | for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0): 137 | tf_msg = TFMessage() 138 | tf_stamped = TransformStamped() 139 | tf_stamped.header.stamp = rospy.Time.from_sec(timestamp) 140 | tf_stamped.header.frame_id = 'world' 141 | tf_stamped.child_frame_id = 'camera_left' 142 | 143 | t = tf_matrix[0:3, 3] 144 | q = tf.transformations.quaternion_from_matrix(tf_matrix) 145 | transform = Transform() 146 | 147 | transform.translation.x = t[0] 148 | transform.translation.y = t[1] 149 | transform.translation.z = t[2] 150 | 151 | transform.rotation.x = q[0] 152 | transform.rotation.y = q[1] 153 | transform.rotation.z = q[2] 154 | transform.rotation.w = q[3] 155 | 156 | tf_stamped.transform = transform 157 | tf_msg.transforms.append(tf_stamped) 158 | 159 | bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp) 160 | 161 | def save_camera_data(bag, kitti_type, kitti, util, bridge, camera, camera_frame_id, topic, initial_time): 162 | print("Exporting camera {}".format(camera)) 163 | if kitti_type.find("raw") != -1: 164 | camera_pad = '{0:02d}'.format(camera) 165 | image_dir = os.path.join(kitti.data_path, 'image_{}'.format(camera_pad)) 166 | image_path = os.path.join(image_dir, 'data') 167 | image_filenames = sorted(os.listdir(image_path)) 168 | with open(os.path.join(image_dir, 'timestamps.txt')) as f: 169 | image_datetimes = map(lambda x: datetime.strptime(x[:-4], '%Y-%m-%d %H:%M:%S.%f'), f.readlines()) 170 | 171 | calib = CameraInfo() 172 | calib.header.frame_id = camera_frame_id 173 | calib.width, calib.height = tuple(util['S_rect_{}'.format(camera_pad)].tolist()) 174 | calib.distortion_model = 'plumb_bob' 175 | calib.K = util['K_{}'.format(camera_pad)] 176 | calib.R = util['R_rect_{}'.format(camera_pad)] 177 | calib.D = util['D_{}'.format(camera_pad)] 178 | calib.P = util['P_rect_{}'.format(camera_pad)] 179 | 180 | elif kitti_type.find("odom") != -1: 181 | camera_pad = '{0:01d}'.format(camera) 182 | image_path = os.path.join(kitti.sequence_path, 'image_{}'.format(camera_pad)) 183 | image_filenames = sorted(os.listdir(image_path)) 184 | image_datetimes = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps) 185 | 186 | calib = CameraInfo() 187 | calib.header.frame_id = camera_frame_id 188 | calib.P = util['P{}'.format(camera_pad)] 189 | 190 | iterable = zip(image_datetimes, image_filenames) 191 | for dt, filename in tqdm(iterable, total=len(image_filenames)): 192 | image_filename = os.path.join(image_path, filename) 193 | cv_image = cv2.imread(image_filename) 194 | calib.height, calib.width = cv_image.shape[:2] 195 | if camera in (0, 1): 196 | cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) 197 | encoding = "mono8" if camera in (0, 1) else "bgr8" 198 | image_message = bridge.cv2_to_imgmsg(cv_image, encoding=encoding) 199 | image_message.header.frame_id = camera_frame_id 200 | if kitti_type.find("raw") != -1: 201 | image_message.header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f"))) 202 | topic_ext = "/image_raw" 203 | elif kitti_type.find("odom") != -1: 204 | image_message.header.stamp = rospy.Time.from_sec(dt) 205 | topic_ext = "/image_rect" 206 | calib.header.stamp = image_message.header.stamp 207 | bag.write(topic + topic_ext, image_message, t = image_message.header.stamp) 208 | bag.write(topic + '/camera_info', calib, t = calib.header.stamp) 209 | 210 | def save_velo_data(bag, kitti, velo_frame_id, topic): 211 | print("Exporting velodyne data") 212 | velo_path = os.path.join(kitti.data_path, 'velodyne_points') 213 | velo_data_dir = os.path.join(velo_path, 'data') 214 | velo_filenames = sorted(os.listdir(velo_data_dir)) 215 | with open(os.path.join(velo_path, 'timestamps.txt')) as f: 216 | lines = f.readlines() 217 | velo_datetimes = [] 218 | for line in lines: 219 | if len(line) == 1: 220 | continue 221 | dt = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f') 222 | velo_datetimes.append(dt) 223 | 224 | iterable = zip(velo_datetimes, velo_filenames) 225 | 226 | count = 0 227 | 228 | for dt, filename in tqdm(iterable, total=len(velo_filenames)): 229 | if dt is None: 230 | continue 231 | 232 | velo_filename = os.path.join(velo_data_dir, filename) 233 | 234 | # read binary data 235 | scan = (np.fromfile(velo_filename, dtype=np.float32)).reshape(-1, 4) 236 | 237 | # get ring channel 238 | depth = np.linalg.norm(scan, 2, axis=1) 239 | pitch = np.arcsin(scan[:, 2] / depth) # arcsin(z, depth) 240 | fov_down = -24.8 / 180.0 * np.pi 241 | fov = (abs(-24.8) + abs(2.0)) / 180.0 * np.pi 242 | proj_y = (pitch + abs(fov_down)) / fov # in [0.0, 1.0] 243 | proj_y *= 64 # in [0.0, H] 244 | proj_y = np.floor(proj_y) 245 | proj_y = np.minimum(64 - 1, proj_y) 246 | proj_y = np.maximum(0, proj_y).astype(np.int32) # in [0,H-1] 247 | proj_y = proj_y.reshape(-1, 1) 248 | scan = np.concatenate((scan,proj_y), axis=1) 249 | scan = scan.tolist() 250 | for i in range(len(scan)): 251 | scan[i][-1] = int(scan[i][-1]) 252 | 253 | # create header 254 | header = Header() 255 | header.frame_id = velo_frame_id 256 | header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f"))) 257 | 258 | # fill pcl msg 259 | fields = [PointField('x', 0, PointField.FLOAT32, 1), 260 | PointField('y', 4, PointField.FLOAT32, 1), 261 | PointField('z', 8, PointField.FLOAT32, 1), 262 | PointField('intensity', 12, PointField.FLOAT32, 1), 263 | PointField('ring', 16, PointField.UINT16, 1)] 264 | pcl_msg = pcl2.create_cloud(header, fields, scan) 265 | pcl_msg.is_dense = True 266 | # print(pcl_msg) 267 | 268 | bag.write(topic, pcl_msg, t=pcl_msg.header.stamp) 269 | 270 | # count += 1 271 | # if count > 200: 272 | # break 273 | 274 | def get_static_transform(from_frame_id, to_frame_id, transform): 275 | t = transform[0:3, 3] 276 | q = tf.transformations.quaternion_from_matrix(transform) 277 | tf_msg = TransformStamped() 278 | tf_msg.header.frame_id = from_frame_id 279 | tf_msg.child_frame_id = to_frame_id 280 | tf_msg.transform.translation.x = float(t[0]) 281 | tf_msg.transform.translation.y = float(t[1]) 282 | tf_msg.transform.translation.z = float(t[2]) 283 | tf_msg.transform.rotation.x = float(q[0]) 284 | tf_msg.transform.rotation.y = float(q[1]) 285 | tf_msg.transform.rotation.z = float(q[2]) 286 | tf_msg.transform.rotation.w = float(q[3]) 287 | return tf_msg 288 | 289 | 290 | def inv(transform): 291 | "Invert rigid body transformation matrix" 292 | R = transform[0:3, 0:3] 293 | t = transform[0:3, 3] 294 | t_inv = -1 * R.T.dot(t) 295 | transform_inv = np.eye(4) 296 | transform_inv[0:3, 0:3] = R.T 297 | transform_inv[0:3, 3] = t_inv 298 | return transform_inv 299 | 300 | 301 | def save_static_transforms(bag, transforms, timestamps): 302 | print("Exporting static transformations") 303 | tfm = TFMessage() 304 | for transform in transforms: 305 | t = get_static_transform(from_frame_id=transform[0], to_frame_id=transform[1], transform=transform[2]) 306 | tfm.transforms.append(t) 307 | for timestamp in timestamps: 308 | time = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) 309 | for i in range(len(tfm.transforms)): 310 | tfm.transforms[i].header.stamp = time 311 | bag.write('/tf_static', tfm, t=time) 312 | 313 | 314 | def save_gps_fix_data(bag, kitti, gps_frame_id, topic): 315 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): 316 | navsatfix_msg = NavSatFix() 317 | navsatfix_msg.header.frame_id = gps_frame_id 318 | navsatfix_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) 319 | navsatfix_msg.latitude = oxts.packet.lat 320 | navsatfix_msg.longitude = oxts.packet.lon 321 | navsatfix_msg.altitude = oxts.packet.alt 322 | navsatfix_msg.status.service = 1 323 | bag.write(topic, navsatfix_msg, t=navsatfix_msg.header.stamp) 324 | 325 | 326 | def save_gps_vel_data(bag, kitti, gps_frame_id, topic): 327 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): 328 | twist_msg = TwistStamped() 329 | twist_msg.header.frame_id = gps_frame_id 330 | twist_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) 331 | twist_msg.twist.linear.x = oxts.packet.vf 332 | twist_msg.twist.linear.y = oxts.packet.vl 333 | twist_msg.twist.linear.z = oxts.packet.vu 334 | twist_msg.twist.angular.x = oxts.packet.wf 335 | twist_msg.twist.angular.y = oxts.packet.wl 336 | twist_msg.twist.angular.z = oxts.packet.wu 337 | bag.write(topic, twist_msg, t=twist_msg.header.stamp) 338 | 339 | 340 | if __name__ == "__main__": 341 | parser = argparse.ArgumentParser(description = "Convert KITTI dataset to ROS bag file the easy way!") 342 | # Accepted argument values 343 | kitti_types = ["raw_synced", "odom_color", "odom_gray"] 344 | odometry_sequences = [] 345 | for s in range(22): 346 | odometry_sequences.append(str(s).zfill(2)) 347 | 348 | parser.add_argument("kitti_type", choices = kitti_types, help = "KITTI dataset type") 349 | parser.add_argument("dir", nargs = "?", default = os.getcwd(), help = "base directory of the dataset, if no directory passed the deafult is current working directory") 350 | parser.add_argument("-t", "--date", help = "date of the raw dataset (i.e. 2011_09_26), option is only for RAW datasets.") 351 | parser.add_argument("-r", "--drive", help = "drive number of the raw dataset (i.e. 0001), option is only for RAW datasets.") 352 | parser.add_argument("-s", "--sequence", choices = odometry_sequences,help = "sequence of the odometry dataset (between 00 - 21), option is only for ODOMETRY datasets.") 353 | args = parser.parse_args() 354 | 355 | bridge = CvBridge() 356 | compression = rosbag.Compression.NONE 357 | # compression = rosbag.Compression.BZ2 358 | # compression = rosbag.Compression.LZ4 359 | 360 | # CAMERAS 361 | cameras = [ 362 | (0, 'camera_gray_left', '/kitti/camera_gray_left'), 363 | (1, 'camera_gray_right', '/kitti/camera_gray_right'), 364 | (2, 'camera_color_left', '/kitti/camera_color_left'), 365 | (3, 'camera_color_right', '/kitti/camera_color_right') 366 | ] 367 | 368 | if args.kitti_type.find("raw") != -1: 369 | 370 | if args.date == None: 371 | print("Date option is not given. It is mandatory for raw dataset.") 372 | print("Usage for raw dataset: kitti2bag raw_synced [dir] -t -r ") 373 | sys.exit(1) 374 | elif args.drive == None: 375 | print("Drive option is not given. It is mandatory for raw dataset.") 376 | print("Usage for raw dataset: kitti2bag raw_synced [dir] -t -r ") 377 | sys.exit(1) 378 | 379 | bag = rosbag.Bag("kitti_{}_drive_{}_{}.bag".format(args.date, args.drive, args.kitti_type[4:]), 'w', compression=compression) 380 | kitti = pykitti.raw(args.dir, args.date, args.drive) 381 | if not os.path.exists(kitti.data_path): 382 | print('Path {} does not exists. Exiting.'.format(kitti.data_path)) 383 | sys.exit(1) 384 | 385 | if len(kitti.timestamps) == 0: 386 | print('Dataset is empty? Exiting.') 387 | sys.exit(1) 388 | 389 | try: 390 | # IMU 391 | imu_frame_id = 'imu_link' 392 | imu_topic = '/kitti/oxts/imu' 393 | imu_raw_topic = '/imu_raw' 394 | gps_fix_topic = '/gps/fix' 395 | gps_vel_topic = '/gps/vel' 396 | velo_frame_id = 'velodyne' 397 | velo_topic = '/points_raw' 398 | 399 | T_base_link_to_imu = np.eye(4, 4) 400 | T_base_link_to_imu[0:3, 3] = [-2.71/2.0-0.05, 0.32, 0.93] 401 | 402 | # tf_static 403 | transforms = [ 404 | ('base_link', imu_frame_id, T_base_link_to_imu), 405 | (imu_frame_id, velo_frame_id, inv(kitti.calib.T_velo_imu)), 406 | (imu_frame_id, cameras[0][1], inv(kitti.calib.T_cam0_imu)), 407 | (imu_frame_id, cameras[1][1], inv(kitti.calib.T_cam1_imu)), 408 | (imu_frame_id, cameras[2][1], inv(kitti.calib.T_cam2_imu)), 409 | (imu_frame_id, cameras[3][1], inv(kitti.calib.T_cam3_imu)) 410 | ] 411 | 412 | util = pykitti.utils.read_calib_file(os.path.join(kitti.calib_path, 'calib_cam_to_cam.txt')) 413 | 414 | # Export 415 | # save_static_transforms(bag, transforms, kitti.timestamps) 416 | # save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=None) 417 | # save_imu_data(bag, kitti, imu_frame_id, imu_topic) 418 | save_imu_data_raw(bag, kitti, imu_frame_id, imu_raw_topic) 419 | save_gps_fix_data(bag, kitti, imu_frame_id, gps_fix_topic) 420 | save_gps_vel_data(bag, kitti, imu_frame_id, gps_vel_topic) 421 | for camera in cameras: 422 | save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=None) 423 | break 424 | save_velo_data(bag, kitti, velo_frame_id, velo_topic) 425 | 426 | finally: 427 | print("## OVERVIEW ##") 428 | print(bag) 429 | bag.close() 430 | 431 | elif args.kitti_type.find("odom") != -1: 432 | 433 | if args.sequence == None: 434 | print("Sequence option is not given. It is mandatory for odometry dataset.") 435 | print("Usage for odometry dataset: kitti2bag {odom_color, odom_gray} [dir] -s ") 436 | sys.exit(1) 437 | 438 | bag = rosbag.Bag("kitti_data_odometry_{}_sequence_{}.bag".format(args.kitti_type[5:],args.sequence), 'w', compression=compression) 439 | 440 | kitti = pykitti.odometry(args.dir, args.sequence) 441 | if not os.path.exists(kitti.sequence_path): 442 | print('Path {} does not exists. Exiting.'.format(kitti.sequence_path)) 443 | sys.exit(1) 444 | 445 | kitti.load_calib() 446 | kitti.load_timestamps() 447 | 448 | if len(kitti.timestamps) == 0: 449 | print('Dataset is empty? Exiting.') 450 | sys.exit(1) 451 | 452 | if args.sequence in odometry_sequences[:11]: 453 | print("Odometry dataset sequence {} has ground truth information (poses).".format(args.sequence)) 454 | kitti.load_poses() 455 | 456 | try: 457 | util = pykitti.utils.read_calib_file(os.path.join(args.dir,'sequences',args.sequence, 'calib.txt')) 458 | current_epoch = (datetime.utcnow() - datetime(1970, 1, 1)).total_seconds() 459 | # Export 460 | if args.kitti_type.find("gray") != -1: 461 | used_cameras = cameras[:2] 462 | elif args.kitti_type.find("color") != -1: 463 | used_cameras = cameras[-2:] 464 | 465 | save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=current_epoch) 466 | for camera in used_cameras: 467 | save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=current_epoch) 468 | 469 | finally: 470 | print("## OVERVIEW ##") 471 | print(bag) 472 | bag.close() 473 | 474 | -------------------------------------------------------------------------------- /config/doc/loop-closure-2.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/loop-closure-2.gif -------------------------------------------------------------------------------- /config/doc/loop-closure.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/loop-closure.gif -------------------------------------------------------------------------------- /config/doc/ouster-demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/ouster-demo.gif -------------------------------------------------------------------------------- /config/doc/ouster-device.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/ouster-device.jpg -------------------------------------------------------------------------------- /config/doc/paper.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/paper.pdf -------------------------------------------------------------------------------- /config/doc/system.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/config/doc/system.png -------------------------------------------------------------------------------- /config/params.yaml: -------------------------------------------------------------------------------- 1 | lio_sam: 2 | 3 | # Topics 4 | pointCloudTopic: "points_raw" # Point cloud data 5 | imuTopic: "imu_correct" # IMU data 6 | odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU 7 | gpsTopic: "odometry/gpsz" # GPS odometry topic from navsat, see module_navsat.launch file 8 | 9 | # Frames 10 | lidarFrame: "base_link" 11 | baselinkFrame: "base_link" 12 | odometryFrame: "odom" 13 | mapFrame: "map" 14 | 15 | # GPS Settings 16 | useImuHeadingInitialization: false # if using GPS data, set to "true" 17 | useGpsElevation: false # if GPS elevation is bad, set to "false" 18 | gpsCovThreshold: 2.0 # m^2, threshold for using GPS data 19 | poseCovThreshold: 25.0 # m^2, threshold for using GPS data 20 | 21 | # Export settings 22 | savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3 23 | savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation 24 | 25 | # Sensor Settings 26 | sensor: velodyne # lidar sensor type, either 'velodyne' or 'ouster' 27 | N_SCAN: 16 # number of lidar channel (i.e., 16, 32, 64, 128) 28 | Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048) 29 | downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1 30 | lidarMinRange: 1.0 # default: 1.0, minimum lidar range to be used 31 | lidarMaxRange: 1000.0 # default: 1000.0, maximum lidar range to be used 32 | 33 | # IMU Settings 34 | imuAccNoise: 3.9939570888238808e-03 35 | imuGyrNoise: 1.5636343949698187e-03 36 | imuAccBiasN: 6.4356659353532566e-05 37 | imuGyrBiasN: 3.5640318696367613e-05 38 | imuGravity: 9.80511 39 | imuRPYWeight: 0.01 40 | 41 | # Extrinsics (lidar -> IMU) 42 | extrinsicTrans: [0.0, 0.0, 0.0] 43 | # extrinsicRot: [-1, 0, 0, 44 | # 0, 1, 0, 45 | # 0, 0, -1] 46 | # extrinsicRPY: [0, 1, 0, 47 | # -1, 0, 0, 48 | # 0, 0, 1] 49 | extrinsicRot: [1, 0, 0, 50 | 0, 1, 0, 51 | 0, 0, 1] 52 | extrinsicRPY: [1, 0, 0, 53 | 0, 1, 0, 54 | 0, 0, 1] 55 | 56 | # LOAM feature threshold 57 | edgeThreshold: 1.0 58 | surfThreshold: 0.1 59 | edgeFeatureMinValidNum: 10 60 | surfFeatureMinValidNum: 100 61 | 62 | # voxel filter paprams 63 | odometrySurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor 64 | mappingCornerLeafSize: 0.2 # default: 0.2 - outdoor, 0.1 - indoor 65 | mappingSurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor 66 | 67 | # robot motion constraint (in case you are using a 2D robot) 68 | z_tollerance: 1000 # meters 69 | rotation_tollerance: 1000 # radians 70 | 71 | # CPU Params 72 | numberOfCores: 4 # number of cores for mapping optimization 73 | mappingProcessInterval: 0.15 # seconds, regulate mapping frequency 74 | 75 | # Surrounding map 76 | surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold 77 | surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold 78 | surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses 79 | surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled) 80 | 81 | # Loop closure 82 | loopClosureEnableFlag: true 83 | loopClosureFrequency: 1.0 # Hz, regulate loop closure constraint add frequency 84 | surroundingKeyframeSize: 50 # submap size (when loop closure enabled) 85 | historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure 86 | historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure 87 | historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure 88 | historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment 89 | 90 | # Visualization 91 | globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius 92 | globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density 93 | globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density 94 | 95 | 96 | 97 | 98 | # Navsat (convert GPS coordinates to Cartesian) 99 | navsat: 100 | frequency: 50 101 | wait_for_datum: false 102 | delay: 0.0 103 | magnetic_declination_radians: 0 104 | yaw_offset: 0 105 | zero_altitude: true 106 | broadcast_utm_transform: false 107 | broadcast_utm_transform_as_parent_frame: false 108 | publish_filtered_gps: false 109 | 110 | # EKF for Navsat 111 | ekf_gps: 112 | publish_tf: false 113 | map_frame: map 114 | odom_frame: odom 115 | base_link_frame: base_link 116 | world_frame: odom 117 | 118 | frequency: 50 119 | two_d_mode: false 120 | sensor_timeout: 0.01 121 | # ------------------------------------- 122 | # External IMU: 123 | # ------------------------------------- 124 | imu0: imu_correct 125 | # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link 126 | imu0_config: [false, false, false, 127 | true, true, true, 128 | false, false, false, 129 | false, false, true, 130 | true, true, true] 131 | imu0_differential: false 132 | imu0_queue_size: 50 133 | imu0_remove_gravitational_acceleration: true 134 | # ------------------------------------- 135 | # Odometry (From Navsat): 136 | # ------------------------------------- 137 | odom0: odometry/gps 138 | odom0_config: [true, true, true, 139 | false, false, false, 140 | false, false, false, 141 | false, false, false, 142 | false, false, false] 143 | odom0_differential: false 144 | odom0_queue_size: 10 145 | 146 | # x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot 147 | process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 148 | 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 149 | 0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 150 | 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 151 | 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 152 | 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 153 | 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 154 | 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 155 | 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 156 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 157 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 158 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 159 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 160 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 161 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015] 162 | -------------------------------------------------------------------------------- /config/params_outdoor.yaml: -------------------------------------------------------------------------------- 1 | lio_sam: 2 | 3 | # Topics 4 | pointCloudTopic: "points_raw" # Point cloud data 5 | imuTopic: "imu_correct" # IMU data 6 | odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU 7 | gpsTopic: "odometry/gpsz" # GPS odometry topic from navsat, see module_navsat.launch file 8 | 9 | # Frames 10 | lidarFrame: "base_link" 11 | baselinkFrame: "base_link" 12 | odometryFrame: "odom" 13 | mapFrame: "map" 14 | 15 | # GPS Settings 16 | useImuHeadingInitialization: false # if using GPS data, set to "true" 17 | useGpsElevation: false # if GPS elevation is bad, set to "false" 18 | gpsCovThreshold: 2.0 # m^2, threshold for using GPS data 19 | poseCovThreshold: 25.0 # m^2, threshold for using GPS data 20 | 21 | # Export settings 22 | savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3 23 | savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation 24 | 25 | # Sensor Settings 26 | sensor: velodyne # lidar sensor type, either 'velodyne' or 'ouster' 27 | N_SCAN: 16 # number of lidar channel (i.e., 16, 32, 64, 128) 28 | Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048) 29 | downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1 30 | lidarMinRange: 1.0 # default: 1.0, minimum lidar range to be used 31 | lidarMaxRange: 1000.0 # default: 1000.0, maximum lidar range to be used 32 | 33 | # IMU Settings 34 | imuAccNoise: 3.9939570888238808e-03 35 | imuGyrNoise: 1.5636343949698187e-03 36 | imuAccBiasN: 6.4356659353532566e-05 37 | imuGyrBiasN: 3.5640318696367613e-05 38 | imuGravity: 9.80511 39 | imuRPYWeight: 0.01 40 | 41 | # Extrinsics (lidar -> IMU) 42 | extrinsicTrans: [0.0, 0.0, 0.0] 43 | # extrinsicRot: [-1, 0, 0, 44 | # 0, 1, 0, 45 | # 0, 0, -1] 46 | #extrinsicRPY: [0, 1, 0, 47 | # -1, 0, 0, 48 | # 0, 0, 1] 49 | extrinsicRot: [1, 0, 0, 50 | 0, 1, 0, 51 | 0, 0, 1] 52 | extrinsicRPY: [1, 0, 0, 53 | 0, 1, 0, 54 | 0, 0, 1] 55 | 56 | # LOAM feature threshold 57 | edgeThreshold: 1.0 58 | surfThreshold: 0.1 59 | edgeFeatureMinValidNum: 10 60 | surfFeatureMinValidNum: 100 61 | 62 | # voxel filter paprams 63 | odometrySurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor 64 | mappingCornerLeafSize: 0.2 # default: 0.2 - outdoor, 0.1 - indoor 65 | mappingSurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor 66 | 67 | # robot motion constraint (in case you are using a 2D robot) 68 | z_tollerance: 1000 # meters 69 | rotation_tollerance: 1000 # radians 70 | 71 | # CPU Params 72 | numberOfCores: 4 # number of cores for mapping optimization 73 | mappingProcessInterval: 0.15 # seconds, regulate mapping frequency 74 | 75 | # Surrounding map 76 | surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold 77 | surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold 78 | surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses 79 | surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled) 80 | 81 | # Loop closure 82 | loopClosureEnableFlag: true 83 | loopClosureFrequency: 1.0 # Hz, regulate loop closure constraint add frequency 84 | surroundingKeyframeSize: 50 # submap size (when loop closure enabled) 85 | historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure 86 | historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure 87 | historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure 88 | historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment 89 | 90 | # Visualization 91 | globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius 92 | globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density 93 | globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density 94 | 95 | 96 | 97 | 98 | # Navsat (convert GPS coordinates to Cartesian) 99 | navsat: 100 | frequency: 50 101 | wait_for_datum: false 102 | delay: 0.0 103 | magnetic_declination_radians: 0 104 | yaw_offset: 0 105 | zero_altitude: true 106 | broadcast_utm_transform: false 107 | broadcast_utm_transform_as_parent_frame: false 108 | publish_filtered_gps: false 109 | 110 | # EKF for Navsat 111 | ekf_gps: 112 | publish_tf: false 113 | map_frame: map 114 | odom_frame: odom 115 | base_link_frame: base_link 116 | world_frame: odom 117 | 118 | frequency: 50 119 | two_d_mode: false 120 | sensor_timeout: 0.01 121 | # ------------------------------------- 122 | # External IMU: 123 | # ------------------------------------- 124 | imu0: imu_correct 125 | # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link 126 | imu0_config: [false, false, false, 127 | true, true, true, 128 | false, false, false, 129 | false, false, true, 130 | true, true, true] 131 | imu0_differential: false 132 | imu0_queue_size: 50 133 | imu0_remove_gravitational_acceleration: true 134 | # ------------------------------------- 135 | # Odometry (From Navsat): 136 | # ------------------------------------- 137 | odom0: odometry/gps 138 | odom0_config: [true, true, true, 139 | false, false, false, 140 | false, false, false, 141 | false, false, false, 142 | false, false, false] 143 | odom0_differential: false 144 | odom0_queue_size: 10 145 | 146 | # x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot 147 | process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 148 | 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 149 | 0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 150 | 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 151 | 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 152 | 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 153 | 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 154 | 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 155 | 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 156 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 157 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 158 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 159 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 160 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 161 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015] 162 | 163 | -------------------------------------------------------------------------------- /config/params_park.yaml: -------------------------------------------------------------------------------- 1 | lio_sam: 2 | 3 | # Topics 4 | pointCloudTopic: "points_raw" # Point cloud data 5 | imuTopic: "imu_raw" # IMU data 6 | odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU 7 | gpsTopic: "odometry/gps" # GPS odometry topic from navsat, see module_navsat.launch file 8 | 9 | # Frames 10 | lidarFrame: "base_link" 11 | baselinkFrame: "base_link" 12 | odometryFrame: "odom" 13 | mapFrame: "map" 14 | 15 | # GPS Settings 16 | useImuHeadingInitialization: true # if using GPS data, set to "true" 17 | useGpsElevation: false # if GPS elevation is bad, set to "false" 18 | gpsCovThreshold: 2.0 # m^2, threshold for using GPS data 19 | poseCovThreshold: 25.0 # m^2, threshold for using GPS data 20 | 21 | # Export settings 22 | savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3 23 | savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation 24 | 25 | # Sensor Settings 26 | sensor: velodyne # lidar sensor type, either 'velodyne' or 'ouster' 27 | N_SCAN: 16 # number of lidar channel (i.e., 16, 32, 64, 128) 28 | Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048) 29 | downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1 30 | lidarMinRange: 1.0 # default: 1.0, minimum lidar range to be used 31 | lidarMaxRange: 1000.0 # default: 1000.0, maximum lidar range to be used 32 | 33 | # IMU Settings 34 | imuAccNoise: 3.9939570888238808e-03 35 | imuGyrNoise: 1.5636343949698187e-03 36 | imuAccBiasN: 6.4356659353532566e-05 37 | imuGyrBiasN: 3.5640318696367613e-05 38 | imuGravity: 9.80511 39 | imuRPYWeight: 0.01 40 | 41 | # Extrinsics (lidar -> IMU) 42 | extrinsicTrans: [0.0, 0.0, 0.0] 43 | extrinsicRot: [-1, 0, 0, 44 | 0, 1, 0, 45 | 0, 0, -1] 46 | extrinsicRPY: [0, 1, 0, 47 | -1, 0, 0, 48 | 0, 0, 1] 49 | # extrinsicRot: [1, 0, 0, 50 | # 0, 1, 0, 51 | # 0, 0, 1] 52 | # extrinsicRPY: [1, 0, 0, 53 | # 0, 1, 0, 54 | # 0, 0, 1] 55 | 56 | # LOAM feature threshold 57 | edgeThreshold: 1.0 58 | surfThreshold: 0.1 59 | edgeFeatureMinValidNum: 10 60 | surfFeatureMinValidNum: 100 61 | 62 | # voxel filter paprams 63 | odometrySurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor 64 | mappingCornerLeafSize: 0.2 # default: 0.2 - outdoor, 0.1 - indoor 65 | mappingSurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor 66 | 67 | # robot motion constraint (in case you are using a 2D robot) 68 | z_tollerance: 1000 # meters 69 | rotation_tollerance: 1000 # radians 70 | 71 | # CPU Params 72 | numberOfCores: 4 # number of cores for mapping optimization 73 | mappingProcessInterval: 0.15 # seconds, regulate mapping frequency 74 | 75 | # Surrounding map 76 | surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold 77 | surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold 78 | surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses 79 | surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled) 80 | 81 | # Loop closure 82 | loopClosureEnableFlag: true 83 | loopClosureFrequency: 1.0 # Hz, regulate loop closure constraint add frequency 84 | surroundingKeyframeSize: 50 # submap size (when loop closure enabled) 85 | historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure 86 | historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure 87 | historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure 88 | historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment 89 | 90 | # Visualization 91 | globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius 92 | globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density 93 | globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density 94 | 95 | 96 | 97 | 98 | # Navsat (convert GPS coordinates to Cartesian) 99 | navsat: 100 | frequency: 50 101 | wait_for_datum: false 102 | delay: 0.0 103 | magnetic_declination_radians: 0 104 | yaw_offset: 0 105 | zero_altitude: true 106 | broadcast_utm_transform: false 107 | broadcast_utm_transform_as_parent_frame: false 108 | publish_filtered_gps: false 109 | 110 | # EKF for Navsat 111 | ekf_gps: 112 | publish_tf: false 113 | map_frame: map 114 | odom_frame: odom 115 | base_link_frame: base_link 116 | world_frame: odom 117 | 118 | frequency: 50 119 | two_d_mode: false 120 | sensor_timeout: 0.01 121 | # ------------------------------------- 122 | # External IMU: 123 | # ------------------------------------- 124 | imu0: imu_correct 125 | # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link 126 | imu0_config: [false, false, false, 127 | true, true, true, 128 | false, false, false, 129 | false, false, true, 130 | true, true, true] 131 | imu0_differential: false 132 | imu0_queue_size: 50 133 | imu0_remove_gravitational_acceleration: true 134 | # ------------------------------------- 135 | # Odometry (From Navsat): 136 | # ------------------------------------- 137 | odom0: odometry/gps 138 | odom0_config: [true, true, true, 139 | false, false, false, 140 | false, false, false, 141 | false, false, false, 142 | false, false, false] 143 | odom0_differential: false 144 | odom0_queue_size: 10 145 | 146 | # x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot 147 | process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 148 | 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 149 | 0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 150 | 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 151 | 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 152 | 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 153 | 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 154 | 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 155 | 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 156 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 157 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 158 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 159 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 160 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 161 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015] 162 | 163 | -------------------------------------------------------------------------------- /factor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/factor.png -------------------------------------------------------------------------------- /frames.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/frames.pdf -------------------------------------------------------------------------------- /include/utility.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #ifndef _UTILITY_LIDAR_ODOMETRY_H_ 3 | #define _UTILITY_LIDAR_ODOMETRY_H_ 4 | 5 | #include 6 | 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | #include 18 | 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | 33 | #include 34 | #include 35 | #include 36 | #include 37 | 38 | #include 39 | #include 40 | #include 41 | #include 42 | #include 43 | #include 44 | #include 45 | #include 46 | #include 47 | #include 48 | #include 49 | #include 50 | #include 51 | #include 52 | #include 53 | #include 54 | #include 55 | 56 | using namespace std; 57 | 58 | typedef pcl::PointXYZI PointType; 59 | 60 | // 传感器型号 61 | enum class SensorType { VELODYNE, OUSTER }; 62 | 63 | class ParamServer 64 | { 65 | public: 66 | 67 | ros::NodeHandle nh; 68 | 69 | std::string robot_id; 70 | 71 | // 话题 72 | string pointCloudTopic; // points_raw 原始点云数据 73 | string imuTopic; // imu_raw 对应park数据集,imu_correct对应outdoor数据集,都是原始imu数据,不同的坐标系表示 74 | string odomTopic; // odometry/imu,imu里程计,imu积分计算得到 75 | string gpsTopic; // odometry/gps,gps里程计 76 | 77 | // 坐标系 78 | string lidarFrame; // 激光坐标系 79 | string baselinkFrame; // 载体坐标系 80 | string odometryFrame; // 里程计坐标系 81 | string mapFrame; // 世界坐标系 82 | 83 | // GPS参数 84 | bool useImuHeadingInitialization; // 85 | bool useGpsElevation; 86 | float gpsCovThreshold; 87 | float poseCovThreshold; 88 | 89 | // 保存PCD 90 | bool savePCD; // 是否保存地图 91 | string savePCDDirectory; // 保存路径 92 | 93 | // 激光传感器参数 94 | SensorType sensor; // 传感器型号 95 | int N_SCAN; // 扫描线数,例如16、64 96 | int Horizon_SCAN; // 扫描一周计数,例如每隔0.2°扫描一次,一周360°可以扫描1800次 97 | int downsampleRate; // 扫描线降采样,跳过一些扫描线 98 | float lidarMinRange; // 最小范围 99 | float lidarMaxRange; // 最大范围 100 | 101 | // IMU参数 102 | float imuAccNoise; // 加速度噪声标准差 103 | float imuGyrNoise; // 角速度噪声标准差 104 | float imuAccBiasN; // 105 | float imuGyrBiasN; 106 | float imuGravity; // 重力加速度 107 | float imuRPYWeight; 108 | vector extRotV; 109 | vector extRPYV; 110 | vector extTransV; 111 | Eigen::Matrix3d extRot; // xyz坐标系旋转 112 | Eigen::Matrix3d extRPY; // RPY欧拉角的变换关系 113 | Eigen::Vector3d extTrans; // xyz坐标系平移 114 | Eigen::Quaterniond extQRPY; 115 | 116 | // LOAM 117 | float edgeThreshold; 118 | float surfThreshold; 119 | int edgeFeatureMinValidNum; 120 | int surfFeatureMinValidNum; 121 | 122 | // voxel filter paprams 123 | float odometrySurfLeafSize; 124 | float mappingCornerLeafSize; 125 | float mappingSurfLeafSize ; 126 | 127 | float z_tollerance; 128 | float rotation_tollerance; 129 | 130 | // CPU Params 131 | int numberOfCores; 132 | double mappingProcessInterval; 133 | 134 | // Surrounding map 135 | float surroundingkeyframeAddingDistThreshold; 136 | float surroundingkeyframeAddingAngleThreshold; 137 | float surroundingKeyframeDensity; 138 | float surroundingKeyframeSearchRadius; 139 | 140 | // Loop closure 141 | bool loopClosureEnableFlag; 142 | float loopClosureFrequency; 143 | int surroundingKeyframeSize; 144 | float historyKeyframeSearchRadius; 145 | float historyKeyframeSearchTimeDiff; 146 | int historyKeyframeSearchNum; 147 | float historyKeyframeFitnessScore; 148 | 149 | // global map visualization radius 150 | float globalMapVisualizationSearchRadius; 151 | float globalMapVisualizationPoseDensity; 152 | float globalMapVisualizationLeafSize; 153 | 154 | ParamServer() 155 | { 156 | nh.param("/robot_id", robot_id, "roboat"); 157 | 158 | // 从param server中读取key为"lio_sam/pointCloudTopic"对应的参数,存pointCloudTopic,第三个参数是默认值 159 | // launch文件中定义,从yaml文件加载参数 160 | nh.param("lio_sam/pointCloudTopic", pointCloudTopic, "points_raw"); 161 | nh.param("lio_sam/imuTopic", imuTopic, "imu_correct"); 162 | nh.param("lio_sam/odomTopic", odomTopic, "odometry/imu"); 163 | nh.param("lio_sam/gpsTopic", gpsTopic, "odometry/gps"); 164 | 165 | nh.param("lio_sam/lidarFrame", lidarFrame, "base_link"); 166 | nh.param("lio_sam/baselinkFrame", baselinkFrame, "base_link"); 167 | nh.param("lio_sam/odometryFrame", odometryFrame, "odom"); 168 | nh.param("lio_sam/mapFrame", mapFrame, "map"); 169 | 170 | nh.param("lio_sam/useImuHeadingInitialization", useImuHeadingInitialization, false); 171 | nh.param("lio_sam/useGpsElevation", useGpsElevation, false); 172 | nh.param("lio_sam/gpsCovThreshold", gpsCovThreshold, 2.0); 173 | nh.param("lio_sam/poseCovThreshold", poseCovThreshold, 25.0); 174 | 175 | nh.param("lio_sam/savePCD", savePCD, false); 176 | nh.param("lio_sam/savePCDDirectory", savePCDDirectory, "/Downloads/LOAM/"); 177 | 178 | std::string sensorStr; 179 | nh.param("lio_sam/sensor", sensorStr, ""); 180 | if (sensorStr == "velodyne") 181 | { 182 | sensor = SensorType::VELODYNE; 183 | } 184 | else if (sensorStr == "ouster") 185 | { 186 | sensor = SensorType::OUSTER; 187 | } 188 | else 189 | { 190 | ROS_ERROR_STREAM( 191 | "Invalid sensor type (must be either 'velodyne' or 'ouster'): " << sensorStr); 192 | ros::shutdown(); 193 | } 194 | 195 | nh.param("lio_sam/N_SCAN", N_SCAN, 16); 196 | nh.param("lio_sam/Horizon_SCAN", Horizon_SCAN, 1800); 197 | nh.param("lio_sam/downsampleRate", downsampleRate, 1); 198 | nh.param("lio_sam/lidarMinRange", lidarMinRange, 1.0); 199 | nh.param("lio_sam/lidarMaxRange", lidarMaxRange, 1000.0); 200 | 201 | nh.param("lio_sam/imuAccNoise", imuAccNoise, 0.01); 202 | nh.param("lio_sam/imuGyrNoise", imuGyrNoise, 0.001); 203 | nh.param("lio_sam/imuAccBiasN", imuAccBiasN, 0.0002); 204 | nh.param("lio_sam/imuGyrBiasN", imuGyrBiasN, 0.00003); 205 | nh.param("lio_sam/imuGravity", imuGravity, 9.80511); 206 | nh.param("lio_sam/imuRPYWeight", imuRPYWeight, 0.01); 207 | nh.param>("lio_sam/extrinsicRot", extRotV, vector()); 208 | nh.param>("lio_sam/extrinsicRPY", extRPYV, vector()); 209 | nh.param>("lio_sam/extrinsicTrans", extTransV, vector()); 210 | extRot = Eigen::Map>(extRotV.data(), 3, 3); 211 | extRPY = Eigen::Map>(extRPYV.data(), 3, 3); 212 | extTrans = Eigen::Map>(extTransV.data(), 3, 1); 213 | extQRPY = Eigen::Quaterniond(extRPY); 214 | 215 | nh.param("lio_sam/edgeThreshold", edgeThreshold, 0.1); 216 | nh.param("lio_sam/surfThreshold", surfThreshold, 0.1); 217 | nh.param("lio_sam/edgeFeatureMinValidNum", edgeFeatureMinValidNum, 10); 218 | nh.param("lio_sam/surfFeatureMinValidNum", surfFeatureMinValidNum, 100); 219 | 220 | nh.param("lio_sam/odometrySurfLeafSize", odometrySurfLeafSize, 0.2); 221 | nh.param("lio_sam/mappingCornerLeafSize", mappingCornerLeafSize, 0.2); 222 | nh.param("lio_sam/mappingSurfLeafSize", mappingSurfLeafSize, 0.2); 223 | 224 | nh.param("lio_sam/z_tollerance", z_tollerance, FLT_MAX); 225 | nh.param("lio_sam/rotation_tollerance", rotation_tollerance, FLT_MAX); 226 | 227 | nh.param("lio_sam/numberOfCores", numberOfCores, 2); 228 | nh.param("lio_sam/mappingProcessInterval", mappingProcessInterval, 0.15); 229 | 230 | nh.param("lio_sam/surroundingkeyframeAddingDistThreshold", surroundingkeyframeAddingDistThreshold, 1.0); 231 | nh.param("lio_sam/surroundingkeyframeAddingAngleThreshold", surroundingkeyframeAddingAngleThreshold, 0.2); 232 | nh.param("lio_sam/surroundingKeyframeDensity", surroundingKeyframeDensity, 1.0); 233 | nh.param("lio_sam/surroundingKeyframeSearchRadius", surroundingKeyframeSearchRadius, 50.0); 234 | 235 | nh.param("lio_sam/loopClosureEnableFlag", loopClosureEnableFlag, false); 236 | nh.param("lio_sam/loopClosureFrequency", loopClosureFrequency, 1.0); 237 | nh.param("lio_sam/surroundingKeyframeSize", surroundingKeyframeSize, 50); 238 | nh.param("lio_sam/historyKeyframeSearchRadius", historyKeyframeSearchRadius, 10.0); 239 | nh.param("lio_sam/historyKeyframeSearchTimeDiff", historyKeyframeSearchTimeDiff, 30.0); 240 | nh.param("lio_sam/historyKeyframeSearchNum", historyKeyframeSearchNum, 25); 241 | nh.param("lio_sam/historyKeyframeFitnessScore", historyKeyframeFitnessScore, 0.3); 242 | 243 | nh.param("lio_sam/globalMapVisualizationSearchRadius", globalMapVisualizationSearchRadius, 1e3); 244 | nh.param("lio_sam/globalMapVisualizationPoseDensity", globalMapVisualizationPoseDensity, 10.0); 245 | nh.param("lio_sam/globalMapVisualizationLeafSize", globalMapVisualizationLeafSize, 1.0); 246 | 247 | usleep(100); 248 | } 249 | 250 | /** 251 | * imu原始测量数据转换到lidar系,加速度、角速度、RPY 252 | */ 253 | sensor_msgs::Imu imuConverter(const sensor_msgs::Imu& imu_in) 254 | { 255 | sensor_msgs::Imu imu_out = imu_in; 256 | // 加速度,只跟xyz坐标系的旋转有关系 257 | Eigen::Vector3d acc(imu_in.linear_acceleration.x, imu_in.linear_acceleration.y, imu_in.linear_acceleration.z); 258 | acc = extRot * acc; 259 | imu_out.linear_acceleration.x = acc.x(); 260 | imu_out.linear_acceleration.y = acc.y(); 261 | imu_out.linear_acceleration.z = acc.z(); 262 | // 角速度,只跟xyz坐标系的旋转有关系 263 | Eigen::Vector3d gyr(imu_in.angular_velocity.x, imu_in.angular_velocity.y, imu_in.angular_velocity.z); 264 | gyr = extRot * gyr; 265 | imu_out.angular_velocity.x = gyr.x(); 266 | imu_out.angular_velocity.y = gyr.y(); 267 | imu_out.angular_velocity.z = gyr.z(); 268 | // RPY 269 | Eigen::Quaterniond q_from(imu_in.orientation.w, imu_in.orientation.x, imu_in.orientation.y, imu_in.orientation.z); 270 | // 为什么是右乘,可以动手画一下看看 271 | Eigen::Quaterniond q_final = q_from * extQRPY; 272 | imu_out.orientation.x = q_final.x(); 273 | imu_out.orientation.y = q_final.y(); 274 | imu_out.orientation.z = q_final.z(); 275 | imu_out.orientation.w = q_final.w(); 276 | 277 | if (sqrt(q_final.x()*q_final.x() + q_final.y()*q_final.y() + q_final.z()*q_final.z() + q_final.w()*q_final.w()) < 0.1) 278 | { 279 | ROS_ERROR("Invalid quaternion, please use a 9-axis IMU!"); 280 | ros::shutdown(); 281 | } 282 | 283 | return imu_out; 284 | } 285 | }; 286 | 287 | 288 | /** 289 | * 发布thisCloud,返回thisCloud对应msg格式 290 | */ 291 | sensor_msgs::PointCloud2 publishCloud(ros::Publisher *thisPub, pcl::PointCloud::Ptr thisCloud, ros::Time thisStamp, std::string thisFrame) 292 | { 293 | sensor_msgs::PointCloud2 tempCloud; 294 | pcl::toROSMsg(*thisCloud, tempCloud); 295 | tempCloud.header.stamp = thisStamp; 296 | tempCloud.header.frame_id = thisFrame; 297 | if (thisPub->getNumSubscribers() != 0) 298 | thisPub->publish(tempCloud); 299 | return tempCloud; 300 | } 301 | 302 | /** 303 | * msg时间戳 304 | */ 305 | template 306 | double ROS_TIME(T msg) 307 | { 308 | return msg->header.stamp.toSec(); 309 | } 310 | 311 | /** 312 | * 提取imu角速度 313 | */ 314 | template 315 | void imuAngular2rosAngular(sensor_msgs::Imu *thisImuMsg, T *angular_x, T *angular_y, T *angular_z) 316 | { 317 | *angular_x = thisImuMsg->angular_velocity.x; 318 | *angular_y = thisImuMsg->angular_velocity.y; 319 | *angular_z = thisImuMsg->angular_velocity.z; 320 | } 321 | 322 | /** 323 | * 提取imu加速度 324 | */ 325 | template 326 | void imuAccel2rosAccel(sensor_msgs::Imu *thisImuMsg, T *acc_x, T *acc_y, T *acc_z) 327 | { 328 | *acc_x = thisImuMsg->linear_acceleration.x; 329 | *acc_y = thisImuMsg->linear_acceleration.y; 330 | *acc_z = thisImuMsg->linear_acceleration.z; 331 | } 332 | 333 | /** 334 | * 提取imu姿态角RPY 335 | */ 336 | template 337 | void imuRPY2rosRPY(sensor_msgs::Imu *thisImuMsg, T *rosRoll, T *rosPitch, T *rosYaw) 338 | { 339 | double imuRoll, imuPitch, imuYaw; 340 | tf::Quaternion orientation; 341 | tf::quaternionMsgToTF(thisImuMsg->orientation, orientation); 342 | tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw); 343 | 344 | *rosRoll = imuRoll; 345 | *rosPitch = imuPitch; 346 | *rosYaw = imuYaw; 347 | } 348 | 349 | /** 350 | * 点到坐标系原点距离 351 | */ 352 | float pointDistance(PointType p) 353 | { 354 | return sqrt(p.x*p.x + p.y*p.y + p.z*p.z); 355 | } 356 | 357 | /** 358 | * 两点之间距离 359 | */ 360 | float pointDistance(PointType p1, PointType p2) 361 | { 362 | return sqrt((p1.x-p2.x)*(p1.x-p2.x) + (p1.y-p2.y)*(p1.y-p2.y) + (p1.z-p2.z)*(p1.z-p2.z)); 363 | } 364 | 365 | #endif 366 | -------------------------------------------------------------------------------- /launch/include/config/robot.urdf.xacro: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /launch/include/config/rviz.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 191 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /links1 8 | - /Odometry1 9 | - /Point Cloud1 10 | - /Feature Cloud1 11 | - /Mapping1 12 | Splitter Ratio: 0.5600000023841858 13 | Tree Height: 824 14 | - Class: rviz/Selection 15 | Name: Selection 16 | - Class: rviz/Views 17 | Expanded: 18 | - /Current View1 19 | Name: Views 20 | Splitter Ratio: 0.5 21 | - Class: rviz/Tool Properties 22 | Expanded: ~ 23 | Name: Tool Properties 24 | Splitter Ratio: 0.5 25 | Preferences: 26 | PromptSaveOnExit: true 27 | Toolbars: 28 | toolButtonStyle: 2 29 | Visualization Manager: 30 | Class: "" 31 | Displays: 32 | - Class: rviz/Group 33 | Displays: 34 | - Class: rviz/TF 35 | Enabled: false 36 | Frame Timeout: 999 37 | Frames: 38 | All Enabled: false 39 | Marker Scale: 3 40 | Name: TF 41 | Show Arrows: true 42 | Show Axes: true 43 | Show Names: true 44 | Tree: 45 | {} 46 | Update Interval: 0 47 | Value: false 48 | - Class: rviz/Axes 49 | Enabled: true 50 | Length: 2 51 | Name: map 52 | Radius: 0.5 53 | Reference Frame: map 54 | Value: true 55 | - Class: rviz/Axes 56 | Enabled: true 57 | Length: 1 58 | Name: base_link 59 | Radius: 0.30000001192092896 60 | Reference Frame: base_link 61 | Value: true 62 | Enabled: true 63 | Name: links 64 | - Class: rviz/Group 65 | Displays: 66 | - Angle Tolerance: 0.10000000149011612 67 | Class: rviz/Odometry 68 | Covariance: 69 | Orientation: 70 | Alpha: 0.5 71 | Color: 255; 255; 127 72 | Color Style: Unique 73 | Frame: Local 74 | Offset: 1 75 | Scale: 1 76 | Value: true 77 | Position: 78 | Alpha: 0.30000001192092896 79 | Color: 204; 51; 204 80 | Scale: 1 81 | Value: true 82 | Value: false 83 | Enabled: true 84 | Keep: 300 85 | Name: Odom IMU 86 | Position Tolerance: 0.10000000149011612 87 | Shape: 88 | Alpha: 1 89 | Axes Length: 0.5 90 | Axes Radius: 0.10000000149011612 91 | Color: 255; 25; 0 92 | Head Length: 0.30000001192092896 93 | Head Radius: 0.10000000149011612 94 | Shaft Length: 1 95 | Shaft Radius: 0.05000000074505806 96 | Value: Axes 97 | Topic: /odometry/imu 98 | Unreliable: false 99 | Value: true 100 | - Angle Tolerance: 0.10000000149011612 101 | Class: rviz/Odometry 102 | Covariance: 103 | Orientation: 104 | Alpha: 0.5 105 | Color: 255; 255; 127 106 | Color Style: Unique 107 | Frame: Local 108 | Offset: 1 109 | Scale: 1 110 | Value: true 111 | Position: 112 | Alpha: 0.30000001192092896 113 | Color: 204; 51; 204 114 | Scale: 1 115 | Value: true 116 | Value: false 117 | Enabled: true 118 | Keep: 300 119 | Name: Odom GPS 120 | Position Tolerance: 0.10000000149011612 121 | Shape: 122 | Alpha: 1 123 | Axes Length: 1 124 | Axes Radius: 0.30000001192092896 125 | Color: 255; 25; 0 126 | Head Length: 0.30000001192092896 127 | Head Radius: 0.10000000149011612 128 | Shaft Length: 1 129 | Shaft Radius: 0.05000000074505806 130 | Value: Axes 131 | Topic: /odometry/gps 132 | Unreliable: false 133 | Value: true 134 | - Angle Tolerance: 0.10000000149011612 135 | Class: rviz/Odometry 136 | Covariance: 137 | Orientation: 138 | Alpha: 0.5 139 | Color: 255; 255; 127 140 | Color Style: Unique 141 | Frame: Local 142 | Offset: 1 143 | Scale: 1 144 | Value: true 145 | Position: 146 | Alpha: 0.30000001192092896 147 | Color: 204; 51; 204 148 | Scale: 1 149 | Value: true 150 | Value: false 151 | Enabled: false 152 | Keep: 300 153 | Name: Odom lidar 154 | Position Tolerance: 0.10000000149011612 155 | Shape: 156 | Alpha: 1 157 | Axes Length: 0.25 158 | Axes Radius: 0.10000000149011612 159 | Color: 255; 25; 0 160 | Head Length: 0.30000001192092896 161 | Head Radius: 0.10000000149011612 162 | Shaft Length: 1 163 | Shaft Radius: 0.05000000074505806 164 | Value: Axes 165 | Topic: /lio_sam/mapping/odometry 166 | Unreliable: false 167 | Value: false 168 | Enabled: false 169 | Name: Odometry 170 | - Class: rviz/Group 171 | Displays: 172 | - Alpha: 1 173 | Autocompute Intensity Bounds: false 174 | Autocompute Value Bounds: 175 | Max Value: 10 176 | Min Value: -10 177 | Value: true 178 | Axis: Z 179 | Channel Name: intensity 180 | Class: rviz/PointCloud2 181 | Color: 255; 255; 255 182 | Color Transformer: Intensity 183 | Decay Time: 0 184 | Enabled: true 185 | Invert Rainbow: false 186 | Max Color: 255; 255; 255 187 | Max Intensity: 128 188 | Min Color: 0; 0; 0 189 | Min Intensity: 0 190 | Name: Velodyne 191 | Position Transformer: XYZ 192 | Queue Size: 10 193 | Selectable: false 194 | Size (Pixels): 2 195 | Size (m): 0.009999999776482582 196 | Style: Points 197 | Topic: /lio_sam/deskew/cloud_deskewed 198 | Unreliable: false 199 | Use Fixed Frame: true 200 | Use rainbow: true 201 | Value: true 202 | - Alpha: 1 203 | Autocompute Intensity Bounds: true 204 | Autocompute Value Bounds: 205 | Max Value: 10 206 | Min Value: -10 207 | Value: true 208 | Axis: Z 209 | Channel Name: intensity 210 | Class: rviz/PointCloud2 211 | Color: 255; 255; 255 212 | Color Transformer: Intensity 213 | Decay Time: 0 214 | Enabled: false 215 | Invert Rainbow: false 216 | Max Color: 255; 255; 255 217 | Max Intensity: 0.7900000214576721 218 | Min Color: 0; 0; 0 219 | Min Intensity: 0 220 | Name: Reg Cloud 221 | Position Transformer: XYZ 222 | Queue Size: 10 223 | Selectable: true 224 | Size (Pixels): 3 225 | Size (m): 0.009999999776482582 226 | Style: Points 227 | Topic: /lio_sam/mapping/cloud_registered 228 | Unreliable: false 229 | Use Fixed Frame: true 230 | Use rainbow: true 231 | Value: false 232 | Enabled: true 233 | Name: Point Cloud 234 | - Class: rviz/Group 235 | Displays: 236 | - Alpha: 1 237 | Autocompute Intensity Bounds: true 238 | Autocompute Value Bounds: 239 | Max Value: 10 240 | Min Value: -10 241 | Value: true 242 | Axis: Z 243 | Channel Name: intensity 244 | Class: rviz/PointCloud2 245 | Color: 0; 255; 0 246 | Color Transformer: FlatColor 247 | Decay Time: 0 248 | Enabled: true 249 | Invert Rainbow: false 250 | Max Color: 255; 255; 255 251 | Max Intensity: 15.077729225158691 252 | Min Color: 0; 0; 0 253 | Min Intensity: 0.019985778257250786 254 | Name: Edge Feature 255 | Position Transformer: XYZ 256 | Queue Size: 10 257 | Selectable: true 258 | Size (Pixels): 5 259 | Size (m): 0.009999999776482582 260 | Style: Points 261 | Topic: /lio_sam/feature/cloud_corner 262 | Unreliable: false 263 | Use Fixed Frame: true 264 | Use rainbow: true 265 | Value: true 266 | - Alpha: 1 267 | Autocompute Intensity Bounds: true 268 | Autocompute Value Bounds: 269 | Max Value: 10 270 | Min Value: -10 271 | Value: true 272 | Axis: Z 273 | Channel Name: intensity 274 | Class: rviz/PointCloud2 275 | Color: 255; 85; 255 276 | Color Transformer: FlatColor 277 | Decay Time: 0 278 | Enabled: true 279 | Invert Rainbow: false 280 | Max Color: 255; 255; 255 281 | Max Intensity: 15.100607872009277 282 | Min Color: 0; 0; 0 283 | Min Intensity: 0.0007188677554950118 284 | Name: Plannar Feature 285 | Position Transformer: XYZ 286 | Queue Size: 10 287 | Selectable: true 288 | Size (Pixels): 3 289 | Size (m): 0.009999999776482582 290 | Style: Points 291 | Topic: /lio_sam/feature/cloud_surface 292 | Unreliable: false 293 | Use Fixed Frame: true 294 | Use rainbow: true 295 | Value: true 296 | Enabled: false 297 | Name: Feature Cloud 298 | - Class: rviz/Group 299 | Displays: 300 | - Alpha: 0.30000001192092896 301 | Autocompute Intensity Bounds: false 302 | Autocompute Value Bounds: 303 | Max Value: 7.4858574867248535 304 | Min Value: -1.2309398651123047 305 | Value: true 306 | Axis: Z 307 | Channel Name: intensity 308 | Class: rviz/PointCloud2 309 | Color: 255; 255; 255 310 | Color Transformer: Intensity 311 | Decay Time: 300 312 | Enabled: true 313 | Invert Rainbow: false 314 | Max Color: 255; 255; 255 315 | Max Intensity: 128 316 | Min Color: 0; 0; 0 317 | Min Intensity: 0 318 | Name: Map (cloud) 319 | Position Transformer: XYZ 320 | Queue Size: 10 321 | Selectable: false 322 | Size (Pixels): 2 323 | Size (m): 0.009999999776482582 324 | Style: Points 325 | Topic: /lio_sam/mapping/cloud_registered 326 | Unreliable: false 327 | Use Fixed Frame: true 328 | Use rainbow: true 329 | Value: true 330 | - Alpha: 1 331 | Autocompute Intensity Bounds: true 332 | Autocompute Value Bounds: 333 | Max Value: 36.61034393310547 334 | Min Value: -2.3476977348327637 335 | Value: true 336 | Axis: Z 337 | Channel Name: intensity 338 | Class: rviz/PointCloud2 339 | Color: 255; 255; 255 340 | Color Transformer: AxisColor 341 | Decay Time: 0 342 | Enabled: false 343 | Invert Rainbow: false 344 | Max Color: 255; 255; 255 345 | Max Intensity: 15.100604057312012 346 | Min Color: 0; 0; 0 347 | Min Intensity: 0.014545874670147896 348 | Name: Map (local) 349 | Position Transformer: XYZ 350 | Queue Size: 10 351 | Selectable: true 352 | Size (Pixels): 2 353 | Size (m): 0.10000000149011612 354 | Style: Flat Squares 355 | Topic: /lio_sam/mapping/map_local 356 | Unreliable: false 357 | Use Fixed Frame: true 358 | Use rainbow: true 359 | Value: false 360 | - Alpha: 1 361 | Autocompute Intensity Bounds: true 362 | Autocompute Value Bounds: 363 | Max Value: 10 364 | Min Value: -10 365 | Value: true 366 | Axis: Z 367 | Channel Name: intensity 368 | Class: rviz/PointCloud2 369 | Color: 255; 255; 255 370 | Color Transformer: Intensity 371 | Decay Time: 0 372 | Enabled: false 373 | Invert Rainbow: false 374 | Max Color: 255; 255; 255 375 | Max Intensity: 15.100096702575684 376 | Min Color: 0; 0; 0 377 | Min Intensity: 0.007923072203993797 378 | Name: Map (global) 379 | Position Transformer: XYZ 380 | Queue Size: 10 381 | Selectable: true 382 | Size (Pixels): 2 383 | Size (m): 0.009999999776482582 384 | Style: Points 385 | Topic: /lio_sam/mapping/map_global 386 | Unreliable: false 387 | Use Fixed Frame: true 388 | Use rainbow: true 389 | Value: false 390 | - Alpha: 1 391 | Buffer Length: 1 392 | Class: rviz/Path 393 | Color: 85; 255; 255 394 | Enabled: true 395 | Head Diameter: 0.30000001192092896 396 | Head Length: 0.20000000298023224 397 | Length: 0.30000001192092896 398 | Line Style: Billboards 399 | Line Width: 0.10000000149011612 400 | Name: Path (global) 401 | Offset: 402 | X: 0 403 | Y: 0 404 | Z: 0 405 | Pose Color: 255; 85; 255 406 | Pose Style: None 407 | Radius: 0.029999999329447746 408 | Shaft Diameter: 0.10000000149011612 409 | Shaft Length: 0.10000000149011612 410 | Topic: /lio_sam/mapping/path 411 | Unreliable: false 412 | Value: true 413 | - Alpha: 1 414 | Buffer Length: 1 415 | Class: rviz/Path 416 | Color: 255; 85; 255 417 | Enabled: true 418 | Head Diameter: 0.30000001192092896 419 | Head Length: 0.20000000298023224 420 | Length: 0.30000001192092896 421 | Line Style: Billboards 422 | Line Width: 0.10000000149011612 423 | Name: Path (local) 424 | Offset: 425 | X: 0 426 | Y: 0 427 | Z: 0 428 | Pose Color: 255; 85; 255 429 | Pose Style: None 430 | Radius: 0.029999999329447746 431 | Shaft Diameter: 0.10000000149011612 432 | Shaft Length: 0.10000000149011612 433 | Topic: /lio_sam/imu/path 434 | Unreliable: false 435 | Value: true 436 | - Alpha: 1 437 | Autocompute Intensity Bounds: true 438 | Autocompute Value Bounds: 439 | Max Value: 20.802837371826172 440 | Min Value: -0.03934507071971893 441 | Value: true 442 | Axis: Z 443 | Channel Name: intensity 444 | Class: rviz/PointCloud2 445 | Color: 255; 255; 255 446 | Color Transformer: AxisColor 447 | Decay Time: 0 448 | Enabled: false 449 | Invert Rainbow: false 450 | Max Color: 255; 255; 255 451 | Max Intensity: 15.100604057312012 452 | Min Color: 0; 0; 0 453 | Min Intensity: 0.014545874670147896 454 | Name: Loop closure 455 | Position Transformer: XYZ 456 | Queue Size: 10 457 | Selectable: true 458 | Size (Pixels): 2 459 | Size (m): 0.30000001192092896 460 | Style: Flat Squares 461 | Topic: /lio_sam/mapping/icp_loop_closure_corrected_cloud 462 | Unreliable: false 463 | Use Fixed Frame: true 464 | Use rainbow: true 465 | Value: false 466 | - Class: rviz/MarkerArray 467 | Enabled: true 468 | Marker Topic: /lio_sam/mapping/loop_closure_constraints 469 | Name: Loop constraint 470 | Namespaces: 471 | {} 472 | Queue Size: 100 473 | Value: true 474 | Enabled: true 475 | Name: Mapping 476 | Enabled: true 477 | Global Options: 478 | Background Color: 0; 0; 0 479 | Default Light: true 480 | Fixed Frame: map 481 | Frame Rate: 30 482 | Name: root 483 | Tools: 484 | - Class: rviz/Interact 485 | Hide Inactive Objects: true 486 | - Class: rviz/FocusCamera 487 | - Class: rviz/Measure 488 | Value: true 489 | Views: 490 | Current: 491 | Class: rviz/Orbit 492 | Distance: 97.43701171875 493 | Enable Stereo Rendering: 494 | Stereo Eye Separation: 0.05999999865889549 495 | Stereo Focal Distance: 1 496 | Swap Stereo Eyes: false 497 | Value: false 498 | Focal Point: 499 | X: 0 500 | Y: 0 501 | Z: 0 502 | Focal Shape Fixed Size: false 503 | Focal Shape Size: 0.05000000074505806 504 | Invert Z Axis: false 505 | Name: Current View 506 | Near Clip Distance: 0.009999999776482582 507 | Pitch: 1.1253981590270996 508 | Target Frame: base_link 509 | Value: Orbit (rviz) 510 | Yaw: 3.3316705226898193 511 | Saved: ~ 512 | Window Geometry: 513 | Displays: 514 | collapsed: false 515 | Height: 1190 516 | Hide Left Dock: false 517 | Hide Right Dock: true 518 | QMainWindow State: 000000ff00000000fd0000000400000000000001a30000043efc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006e00fffffffb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000000a0049006d006100670065000000002e000001340000000000000000fb000000100044006900730070006c00610079007301000000470000043e000000f100fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730000000286000000a70000006e00ffffff000000010000011100000435fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002e00000435000000c200fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b000000040fc0100000002fb0000000800540069006d00650000000000000004b00000000000000000fb0000000800540069006d00650100000000000004500000000000000000000005f20000043e00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 519 | Selection: 520 | collapsed: false 521 | Tool Properties: 522 | collapsed: false 523 | Views: 524 | collapsed: true 525 | Width: 1948 526 | X: 725 527 | Y: 300 528 | -------------------------------------------------------------------------------- /launch/include/module_loam.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /launch/include/module_navsat.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /launch/include/module_robot_state_publisher.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /launch/include/module_rviz.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /launch/include/rosconsole/rosconsole_error.conf: -------------------------------------------------------------------------------- 1 | # Set the default ros output to warning and higher 2 | log4j.logger.ros = ERROR -------------------------------------------------------------------------------- /launch/include/rosconsole/rosconsole_info.conf: -------------------------------------------------------------------------------- 1 | # Set the default ros output to warning and higher 2 | log4j.logger.ros = INFO -------------------------------------------------------------------------------- /launch/include/rosconsole/rosconsole_warn.conf: -------------------------------------------------------------------------------- 1 | # Set the default ros output to warning and higher 2 | log4j.logger.ros = WARN -------------------------------------------------------------------------------- /launch/run.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /msg/cloud_info.msg: -------------------------------------------------------------------------------- 1 | # Cloud Info 2 | Header header 3 | 4 | int32[] startRingIndex 5 | int32[] endRingIndex 6 | 7 | int32[] pointColInd # point column index in range image 8 | float32[] pointRange # point range 9 | 10 | int64 imuAvailable 11 | int64 odomAvailable 12 | 13 | # Attitude for LOAM initialization 14 | float32 imuRollInit 15 | float32 imuPitchInit 16 | float32 imuYawInit 17 | 18 | # Initial guess from imu pre-integration 19 | float32 initialGuessX 20 | float32 initialGuessY 21 | float32 initialGuessZ 22 | float32 initialGuessRoll 23 | float32 initialGuessPitch 24 | float32 initialGuessYaw 25 | 26 | # Point cloud messages 27 | sensor_msgs/PointCloud2 cloud_deskewed # original cloud deskewed 28 | sensor_msgs/PointCloud2 cloud_corner # extracted corner feature 29 | sensor_msgs/PointCloud2 cloud_surface # extracted surface feature -------------------------------------------------------------------------------- /package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | lio_sam 4 | 1.0.0 5 | Lidar Odometry 6 | 7 | Tixiao Shan 8 | 9 | TODO 10 | 11 | Tixiao Shan 12 | 13 | catkin 14 | 15 | roscpp 16 | roscpp 17 | rospy 18 | rospy 19 | 20 | tf 21 | tf 22 | 23 | cv_bridge 24 | cv_bridge 25 | 26 | pcl_conversions 27 | pcl_conversions 28 | 29 | std_msgs 30 | std_msgs 31 | sensor_msgs 32 | sensor_msgs 33 | geometry_msgs 34 | geometry_msgs 35 | nav_msgs 36 | nav_msgs 37 | visualization_msgs 38 | visualization_msgs 39 | 40 | message_generation 41 | message_generation 42 | message_runtime 43 | message_runtime 44 | 45 | GTSAM 46 | GTSAM 47 | 48 | 49 | -------------------------------------------------------------------------------- /rosgraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/rosgraph.png -------------------------------------------------------------------------------- /src/featureExtraction.cpp: -------------------------------------------------------------------------------- 1 | /************************************************* 2 | GitHub: https://github.com/smilefacehh/LIO-SAM-DetailedNote 3 | Author: lutao2014@163.com 4 | Date: 2021-02-21 5 | -------------------------------------------------- 6 | 功能简介: 7 | 对经过运动畸变校正之后的当前帧激光点云,计算每个点的曲率,进而提取角点、平面点(用曲率的大小进行判定)。 8 | 9 | 订阅: 10 | 1、订阅当前激光帧运动畸变校正后的点云信息,来自ImageProjection。 11 | 12 | 发布: 13 | 1、发布当前激光帧提取特征之后的点云信息,包括的历史数据有:运动畸变校正,点云数据,初始位姿,姿态角,有效点云数据,角点点云,平面点点云等,发布给MapOptimization; 14 | 2、发布当前激光帧提取的角点点云,用于rviz展示; 15 | 3、发布当前激光帧提取的平面点点云,用于rviz展示。 16 | **************************************************/ 17 | #include "utility.h" 18 | #include "lio_sam/cloud_info.h" 19 | 20 | /** 21 | * 激光点曲率 22 | */ 23 | struct smoothness_t{ 24 | float value; // 曲率值 25 | size_t ind; // 激光点一维索引 26 | }; 27 | 28 | /** 29 | * 曲率比较函数,从小到大排序 30 | */ 31 | struct by_value{ 32 | bool operator()(smoothness_t const &left, smoothness_t const &right) { 33 | return left.value < right.value; 34 | } 35 | }; 36 | 37 | class FeatureExtraction : public ParamServer 38 | { 39 | 40 | public: 41 | 42 | ros::Subscriber subLaserCloudInfo; 43 | 44 | // 发布当前激光帧提取特征之后的点云信息 45 | ros::Publisher pubLaserCloudInfo; 46 | // 发布当前激光帧提取的角点点云 47 | ros::Publisher pubCornerPoints; 48 | // 发布当前激光帧提取的平面点点云 49 | ros::Publisher pubSurfacePoints; 50 | 51 | // 当前激光帧运动畸变校正后的有效点云 52 | pcl::PointCloud::Ptr extractedCloud; 53 | // 当前激光帧角点点云集合 54 | pcl::PointCloud::Ptr cornerCloud; 55 | // 当前激光帧平面点点云集合 56 | pcl::PointCloud::Ptr surfaceCloud; 57 | 58 | pcl::VoxelGrid downSizeFilter; 59 | 60 | // 当前激光帧点云信息,包括的历史数据有:运动畸变校正,点云数据,初始位姿,姿态角,有效点云数据,角点点云,平面点点云等 61 | lio_sam::cloud_info cloudInfo; 62 | std_msgs::Header cloudHeader; 63 | 64 | // 当前激光帧点云的曲率 65 | std::vector cloudSmoothness; 66 | float *cloudCurvature; 67 | // 特征提取标记,1表示遮挡、平行,或者已经进行特征提取的点,0表示还未进行特征提取处理 68 | int *cloudNeighborPicked; 69 | // 1表示角点,-1表示平面点 70 | int *cloudLabel; 71 | 72 | /** 73 | * 构造函数 74 | */ 75 | FeatureExtraction() 76 | { 77 | // 订阅当前激光帧运动畸变校正后的点云信息 78 | subLaserCloudInfo = nh.subscribe("lio_sam/deskew/cloud_info", 1, &FeatureExtraction::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay()); 79 | 80 | // 发布当前激光帧提取特征之后的点云信息 81 | pubLaserCloudInfo = nh.advertise ("lio_sam/feature/cloud_info", 1); 82 | // 发布当前激光帧的角点点云 83 | pubCornerPoints = nh.advertise("lio_sam/feature/cloud_corner", 1); 84 | // 发布当前激光帧的面点点云 85 | pubSurfacePoints = nh.advertise("lio_sam/feature/cloud_surface", 1); 86 | 87 | // 初始化 88 | initializationValue(); 89 | } 90 | 91 | // 初始化 92 | void initializationValue() 93 | { 94 | cloudSmoothness.resize(N_SCAN*Horizon_SCAN); 95 | 96 | downSizeFilter.setLeafSize(odometrySurfLeafSize, odometrySurfLeafSize, odometrySurfLeafSize); 97 | 98 | extractedCloud.reset(new pcl::PointCloud()); 99 | cornerCloud.reset(new pcl::PointCloud()); 100 | surfaceCloud.reset(new pcl::PointCloud()); 101 | 102 | cloudCurvature = new float[N_SCAN*Horizon_SCAN]; 103 | cloudNeighborPicked = new int[N_SCAN*Horizon_SCAN]; 104 | cloudLabel = new int[N_SCAN*Horizon_SCAN]; 105 | } 106 | 107 | /** 108 | * 订阅当前激光帧运动畸变校正后的点云信息 109 | * 1、计算当前激光帧点云中每个点的曲率 110 | * 2、标记属于遮挡、平行两种情况的点,不做特征提取 111 | * 3、点云角点、平面点特征提取 112 | * 1) 遍历扫描线,每根扫描线扫描一周的点云划分为6段,针对每段提取20个角点、不限数量的平面点,加入角点集合、平面点集合 113 | * 2) 认为非角点的点都是平面点,加入平面点云集合,最后降采样 114 | * 4、发布角点、面点点云,发布带特征点云数据的当前激光帧点云信息 115 | */ 116 | void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn) 117 | { 118 | cloudInfo = *msgIn; 119 | cloudHeader = msgIn->header; 120 | // 当前激光帧运动畸变校正后的有效点云 121 | pcl::fromROSMsg(msgIn->cloud_deskewed, *extractedCloud); 122 | 123 | // 计算当前激光帧点云中每个点的曲率 124 | calculateSmoothness(); 125 | 126 | // 标记属于遮挡、平行两种情况的点,不做特征提取 127 | markOccludedPoints(); 128 | 129 | // 点云角点、平面点特征提取 130 | // 1、遍历扫描线,每根扫描线扫描一周的点云划分为6段,针对每段提取20个角点、不限数量的平面点,加入角点集合、平面点集合 131 | // 2、认为非角点的点都是平面点,加入平面点云集合,最后降采样 132 | extractFeatures(); 133 | 134 | // 发布角点、面点点云,发布带特征点云数据的当前激光帧点云信息 135 | publishFeatureCloud(); 136 | } 137 | 138 | /** 139 | * 计算当前激光帧点云中每个点的曲率 140 | */ 141 | void calculateSmoothness() 142 | { 143 | // 遍历当前激光帧运动畸变校正后的有效点云 144 | int cloudSize = extractedCloud->points.size(); 145 | for (int i = 5; i < cloudSize - 5; i++) 146 | { 147 | // 用当前激光点前后5个点计算当前点的曲率,平坦位置处曲率较小,角点处曲率较大;这个方法很简单但有效 148 | float diffRange = cloudInfo.pointRange[i-5] + cloudInfo.pointRange[i-4] 149 | + cloudInfo.pointRange[i-3] + cloudInfo.pointRange[i-2] 150 | + cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i] * 10 151 | + cloudInfo.pointRange[i+1] + cloudInfo.pointRange[i+2] 152 | + cloudInfo.pointRange[i+3] + cloudInfo.pointRange[i+4] 153 | + cloudInfo.pointRange[i+5]; 154 | 155 | // 距离差值平方作为曲率 156 | cloudCurvature[i] = diffRange*diffRange; 157 | 158 | cloudNeighborPicked[i] = 0; 159 | cloudLabel[i] = 0; 160 | 161 | // 存储该点曲率值、激光点一维索引 162 | cloudSmoothness[i].value = cloudCurvature[i]; 163 | cloudSmoothness[i].ind = i; 164 | } 165 | } 166 | 167 | /** 168 | * 标记属于遮挡、平行两种情况的点,不做特征提取 169 | */ 170 | void markOccludedPoints() 171 | { 172 | int cloudSize = extractedCloud->points.size(); 173 | // mark occluded points and parallel beam points 174 | 175 | for (int i = 5; i < cloudSize - 6; ++i) 176 | { 177 | // 当前点和下一个点的range值 178 | float depth1 = cloudInfo.pointRange[i]; 179 | float depth2 = cloudInfo.pointRange[i+1]; 180 | // 两个激光点之间的一维索引差值,如果在一条扫描线上,那么值为1;如果两个点之间有一些无效点被剔除了,可能会比1大,但不会特别大 181 | // 如果恰好前一个点在扫描一周的结束时刻,下一个点是另一条扫描线的起始时刻,那么值会很大 182 | int columnDiff = std::abs(int(cloudInfo.pointColInd[i+1] - cloudInfo.pointColInd[i])); 183 | 184 | // 两个点在同一扫描线上,且距离相差大于0.3,认为存在遮挡关系(也就是这两个点不在同一平面上,如果在同一平面上,距离相差不会太大) 185 | // 远处的点会被遮挡,标记一下该点以及相邻的5个点,后面不再进行特征提取 186 | if (columnDiff < 10){ 187 | 188 | if (depth1 - depth2 > 0.3){ 189 | cloudNeighborPicked[i - 5] = 1; 190 | cloudNeighborPicked[i - 4] = 1; 191 | cloudNeighborPicked[i - 3] = 1; 192 | cloudNeighborPicked[i - 2] = 1; 193 | cloudNeighborPicked[i - 1] = 1; 194 | cloudNeighborPicked[i] = 1; 195 | }else if (depth2 - depth1 > 0.3){ 196 | cloudNeighborPicked[i + 1] = 1; 197 | cloudNeighborPicked[i + 2] = 1; 198 | cloudNeighborPicked[i + 3] = 1; 199 | cloudNeighborPicked[i + 4] = 1; 200 | cloudNeighborPicked[i + 5] = 1; 201 | cloudNeighborPicked[i + 6] = 1; 202 | } 203 | } 204 | 205 | // 用前后相邻点判断当前点所在平面是否与激光束方向平行 206 | float diff1 = std::abs(float(cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i])); 207 | float diff2 = std::abs(float(cloudInfo.pointRange[i+1] - cloudInfo.pointRange[i])); 208 | 209 | // 平行则标记一下 210 | if (diff1 > 0.02 * cloudInfo.pointRange[i] && diff2 > 0.02 * cloudInfo.pointRange[i]) 211 | cloudNeighborPicked[i] = 1; 212 | } 213 | } 214 | 215 | /** 216 | * 点云角点、平面点特征提取 217 | * 1、遍历扫描线,每根扫描线扫描一周的点云划分为6段,针对每段提取20个角点、不限数量的平面点,加入角点集合、平面点集合 218 | * 2、认为非角点的点都是平面点,加入平面点云集合,最后降采样 219 | */ 220 | void extractFeatures() 221 | { 222 | cornerCloud->clear(); 223 | surfaceCloud->clear(); 224 | 225 | pcl::PointCloud::Ptr surfaceCloudScan(new pcl::PointCloud()); 226 | pcl::PointCloud::Ptr surfaceCloudScanDS(new pcl::PointCloud()); 227 | 228 | // 遍历扫描线 229 | for (int i = 0; i < N_SCAN; i++) 230 | { 231 | surfaceCloudScan->clear(); 232 | 233 | // 将一条扫描线扫描一周的点云数据,划分为6段,每段分开提取有限数量的特征,保证特征均匀分布 234 | for (int j = 0; j < 6; j++) 235 | { 236 | // 每段点云的起始、结束索引;startRingIndex为扫描线起始第5个激光点在一维数组中的索引 237 | int sp = (cloudInfo.startRingIndex[i] * (6 - j) + cloudInfo.endRingIndex[i] * j) / 6; 238 | int ep = (cloudInfo.startRingIndex[i] * (5 - j) + cloudInfo.endRingIndex[i] * (j + 1)) / 6 - 1; 239 | 240 | if (sp >= ep) 241 | continue; 242 | 243 | // 按照曲率从小到大排序点云 244 | std::sort(cloudSmoothness.begin()+sp, cloudSmoothness.begin()+ep, by_value()); 245 | 246 | // 按照曲率从大到小遍历 247 | int largestPickedNum = 0; 248 | for (int k = ep; k >= sp; k--) 249 | { 250 | // 激光点的索引 251 | int ind = cloudSmoothness[k].ind; 252 | // 当前激光点还未被处理,且曲率大于阈值,则认为是角点 253 | if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] > edgeThreshold) 254 | { 255 | // 每段只取20个角点,如果单条扫描线扫描一周是1800个点,则划分6段,每段300个点,从中提取20个角点 256 | largestPickedNum++; 257 | if (largestPickedNum <= 20){ 258 | // 标记为角点 259 | cloudLabel[ind] = 1; 260 | // 加入角点点云 261 | cornerCloud->push_back(extractedCloud->points[ind]); 262 | } else { 263 | break; 264 | } 265 | 266 | // 标记已被处理 267 | cloudNeighborPicked[ind] = 1; 268 | // 同一条扫描线上后5个点标记一下,不再处理,避免特征聚集 269 | for (int l = 1; l <= 5; l++) 270 | { 271 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1])); 272 | if (columnDiff > 10) 273 | break; 274 | cloudNeighborPicked[ind + l] = 1; 275 | } 276 | // 同一条扫描线上前5个点标记一下,不再处理,避免特征聚集 277 | for (int l = -1; l >= -5; l--) 278 | { 279 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1])); 280 | if (columnDiff > 10) 281 | break; 282 | cloudNeighborPicked[ind + l] = 1; 283 | } 284 | } 285 | } 286 | 287 | // 按照曲率从小到大遍历 288 | for (int k = sp; k <= ep; k++) 289 | { 290 | // 激光点的索引 291 | int ind = cloudSmoothness[k].ind; 292 | // 当前激光点还未被处理,且曲率小于阈值,则认为是平面点 293 | if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] < surfThreshold) 294 | { 295 | // 标记为平面点 296 | cloudLabel[ind] = -1; 297 | // 标记已被处理 298 | cloudNeighborPicked[ind] = 1; 299 | 300 | // 同一条扫描线上后5个点标记一下,不再处理,避免特征聚集 301 | for (int l = 1; l <= 5; l++) { 302 | 303 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1])); 304 | if (columnDiff > 10) 305 | break; 306 | 307 | cloudNeighborPicked[ind + l] = 1; 308 | } 309 | // 同一条扫描线上前5个点标记一下,不再处理,避免特征聚集 310 | for (int l = -1; l >= -5; l--) { 311 | 312 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1])); 313 | if (columnDiff > 10) 314 | break; 315 | 316 | cloudNeighborPicked[ind + l] = 1; 317 | } 318 | } 319 | } 320 | 321 | // 平面点和未被处理的点,都认为是平面点,加入平面点云集合 322 | for (int k = sp; k <= ep; k++) 323 | { 324 | if (cloudLabel[k] <= 0){ 325 | surfaceCloudScan->push_back(extractedCloud->points[k]); 326 | } 327 | } 328 | } 329 | 330 | // 平面点云降采样 331 | surfaceCloudScanDS->clear(); 332 | downSizeFilter.setInputCloud(surfaceCloudScan); 333 | downSizeFilter.filter(*surfaceCloudScanDS); 334 | 335 | // 加入平面点云集合 336 | *surfaceCloud += *surfaceCloudScanDS; 337 | } 338 | } 339 | 340 | /** 341 | * 清理 342 | */ 343 | void freeCloudInfoMemory() 344 | { 345 | cloudInfo.startRingIndex.clear(); 346 | cloudInfo.endRingIndex.clear(); 347 | cloudInfo.pointColInd.clear(); 348 | cloudInfo.pointRange.clear(); 349 | } 350 | 351 | /** 352 | * 发布角点、面点点云,发布带特征点云数据的当前激光帧点云信息 353 | */ 354 | void publishFeatureCloud() 355 | { 356 | // 清理 357 | freeCloudInfoMemory(); 358 | // 发布角点、面点点云,用于rviz展示 359 | cloudInfo.cloud_corner = publishCloud(&pubCornerPoints, cornerCloud, cloudHeader.stamp, lidarFrame); 360 | cloudInfo.cloud_surface = publishCloud(&pubSurfacePoints, surfaceCloud, cloudHeader.stamp, lidarFrame); 361 | // 发布当前激光帧点云信息,加入了角点、面点点云数据,发布给mapOptimization 362 | pubLaserCloudInfo.publish(cloudInfo); 363 | } 364 | }; 365 | 366 | 367 | int main(int argc, char** argv) 368 | { 369 | ros::init(argc, argv, "lio_sam"); 370 | 371 | FeatureExtraction FE; 372 | 373 | ROS_INFO("\033[1;32m----> Feature Extraction Started.\033[0m"); 374 | 375 | ros::spin(); 376 | 377 | return 0; 378 | } 379 | -------------------------------------------------------------------------------- /src/imageProjection.cpp: -------------------------------------------------------------------------------- 1 | /************************************************* 2 | GitHub: https://github.com/smilefacehh/LIO-SAM-DetailedNote 3 | Author: lutao2014@163.com 4 | Date: 2021-02-21 5 | -------------------------------------------------- 6 | 功能简介: 7 | 1、利用当前激光帧起止时刻间的imu数据计算旋转增量,IMU里程计数据(来自ImuPreintegration)计算平移增量,进而对该帧激光每一时刻的激光点进行运动畸变校正(利用相对于激光帧起始时刻的位姿增量,变换当前激光点到起始时刻激光点的坐标系下,实现校正); 8 | 2、同时用IMU数据的姿态角(RPY,roll、pitch、yaw)、IMU里程计数据的的位姿,对当前帧激光位姿进行粗略初始化。 9 | 10 | 订阅: 11 | 1、订阅原始IMU数据; 12 | 2、订阅IMU里程计数据,来自ImuPreintegration,表示每一时刻对应的位姿; 13 | 3、订阅原始激光点云数据。 14 | 15 | 发布: 16 | 1、发布当前帧激光运动畸变校正之后的有效点云,用于rviz展示; 17 | 2、发布当前帧激光运动畸变校正之后的点云信息,包括点云数据、初始位姿、姿态角、有效点云数据等,发布给FeatureExtraction进行特征提取。 18 | **************************************************/ 19 | #include "utility.h" 20 | #include "lio_sam/cloud_info.h" 21 | 22 | 23 | /** 24 | * Velodyne点云结构,变量名XYZIRT是每个变量的首字母 25 | */ 26 | struct VelodynePointXYZIRT 27 | { 28 | PCL_ADD_POINT4D // 位置 29 | PCL_ADD_INTENSITY; // 激光点反射强度,也可以存点的索引 30 | uint16_t ring; // 扫描线 31 | float time; // 时间戳,记录相对于当前帧第一个激光点的时差,第一个点time=0 32 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 33 | } EIGEN_ALIGN16; // 内存16字节对齐,EIGEN SSE优化要求 34 | // 注册为PCL点云格式 35 | POINT_CLOUD_REGISTER_POINT_STRUCT (VelodynePointXYZIRT, 36 | (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity) 37 | (uint16_t, ring, ring) (float, time, time) 38 | ) 39 | 40 | /** 41 | * Ouster点云结构 42 | */ 43 | struct OusterPointXYZIRT { 44 | PCL_ADD_POINT4D; 45 | float intensity; 46 | uint32_t t; 47 | uint16_t reflectivity; 48 | uint8_t ring; 49 | uint16_t noise; 50 | uint32_t range; 51 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW 52 | } EIGEN_ALIGN16; 53 | POINT_CLOUD_REGISTER_POINT_STRUCT(OusterPointXYZIRT, 54 | (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity) 55 | (uint32_t, t, t) (uint16_t, reflectivity, reflectivity) 56 | (uint8_t, ring, ring) (uint16_t, noise, noise) (uint32_t, range, range) 57 | ) 58 | 59 | // 本程序使用Velodyne点云结构 60 | using PointXYZIRT = VelodynePointXYZIRT; 61 | 62 | // imu数据队列长度 63 | const int queueLength = 2000; 64 | 65 | class ImageProjection : public ParamServer 66 | { 67 | private: 68 | 69 | // imu队列、odom队列互斥锁 70 | std::mutex imuLock; 71 | std::mutex odoLock; 72 | 73 | // 订阅原始激光点云 74 | ros::Subscriber subLaserCloud; 75 | ros::Publisher pubLaserCloud; 76 | 77 | // 发布当前帧校正后点云,有效点 78 | ros::Publisher pubExtractedCloud; 79 | ros::Publisher pubLaserCloudInfo; 80 | 81 | // imu数据队列(原始数据,转lidar系下) 82 | ros::Subscriber subImu; 83 | std::deque imuQueue; 84 | 85 | // imu里程计队列 86 | ros::Subscriber subOdom; 87 | std::deque odomQueue; 88 | 89 | // 激光点云数据队列 90 | std::deque cloudQueue; 91 | // 队列front帧,作为当前处理帧点云 92 | sensor_msgs::PointCloud2 currentCloudMsg; 93 | 94 | // 当前激光帧起止时刻间对应的imu数据,计算相对于起始时刻的旋转增量,以及时时间戳;用于插值计算当前激光帧起止时间范围内,每一时刻的旋转姿态 95 | double *imuTime = new double[queueLength]; 96 | double *imuRotX = new double[queueLength]; 97 | double *imuRotY = new double[queueLength]; 98 | double *imuRotZ = new double[queueLength]; 99 | 100 | int imuPointerCur; 101 | bool firstPointFlag; 102 | Eigen::Affine3f transStartInverse; 103 | 104 | // 当前帧原始激光点云 105 | pcl::PointCloud::Ptr laserCloudIn; 106 | pcl::PointCloud::Ptr tmpOusterCloudIn; 107 | // 当期帧运动畸变校正之后的激光点云 108 | pcl::PointCloud::Ptr fullCloud; 109 | // 从fullCloud中提取有效点 110 | pcl::PointCloud::Ptr extractedCloud; 111 | 112 | int deskewFlag; 113 | cv::Mat rangeMat; 114 | 115 | bool odomDeskewFlag; 116 | // 当前激光帧起止时刻对应imu里程计位姿变换,该变换对应的平移增量;用于插值计算当前激光帧起止时间范围内,每一时刻的位置 117 | float odomIncreX; 118 | float odomIncreY; 119 | float odomIncreZ; 120 | 121 | // 当前帧激光点云运动畸变校正之后的数据,包括点云数据、初始位姿、姿态角等,发布给featureExtraction进行特征提取 122 | lio_sam::cloud_info cloudInfo; 123 | // 当前帧起始时刻 124 | double timeScanCur; 125 | // 当前帧结束时刻 126 | double timeScanEnd; 127 | // 当前帧header,包含时间戳信息 128 | std_msgs::Header cloudHeader; 129 | 130 | 131 | public: 132 | /** 133 | * 构造函数 134 | */ 135 | ImageProjection(): 136 | deskewFlag(0) 137 | { 138 | // 订阅原始imu数据 139 | subImu = nh.subscribe(imuTopic, 2000, &ImageProjection::imuHandler, this, ros::TransportHints().tcpNoDelay()); 140 | // 订阅imu里程计,由imuPreintegration积分计算得到的每时刻imu位姿 141 | subOdom = nh.subscribe(odomTopic+"_incremental", 2000, &ImageProjection::odometryHandler, this, ros::TransportHints().tcpNoDelay()); 142 | // 订阅原始lidar数据 143 | subLaserCloud = nh.subscribe(pointCloudTopic, 5, &ImageProjection::cloudHandler, this, ros::TransportHints().tcpNoDelay()); 144 | 145 | // 发布当前激光帧运动畸变校正后的点云,有效点 146 | pubExtractedCloud = nh.advertise ("lio_sam/deskew/cloud_deskewed", 1); 147 | // 发布当前激光帧运动畸变校正后的点云信息 148 | pubLaserCloudInfo = nh.advertise ("lio_sam/deskew/cloud_info", 1); 149 | 150 | // 初始化 151 | allocateMemory(); 152 | // 重置参数 153 | resetParameters(); 154 | 155 | // pcl日志级别,只打ERROR日志 156 | pcl::console::setVerbosityLevel(pcl::console::L_ERROR); 157 | } 158 | 159 | /** 160 | * 初始化 161 | */ 162 | void allocateMemory() 163 | { 164 | laserCloudIn.reset(new pcl::PointCloud()); 165 | tmpOusterCloudIn.reset(new pcl::PointCloud()); 166 | fullCloud.reset(new pcl::PointCloud()); 167 | extractedCloud.reset(new pcl::PointCloud()); 168 | 169 | fullCloud->points.resize(N_SCAN*Horizon_SCAN); 170 | 171 | cloudInfo.startRingIndex.assign(N_SCAN, 0); 172 | cloudInfo.endRingIndex.assign(N_SCAN, 0); 173 | 174 | cloudInfo.pointColInd.assign(N_SCAN*Horizon_SCAN, 0); 175 | cloudInfo.pointRange.assign(N_SCAN*Horizon_SCAN, 0); 176 | 177 | resetParameters(); 178 | } 179 | 180 | /** 181 | * 重置参数,接收每帧lidar数据都要重置这些参数 182 | */ 183 | void resetParameters() 184 | { 185 | laserCloudIn->clear(); 186 | extractedCloud->clear(); 187 | 188 | rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX)); 189 | 190 | imuPointerCur = 0; 191 | firstPointFlag = true; 192 | odomDeskewFlag = false; 193 | 194 | for (int i = 0; i < queueLength; ++i) 195 | { 196 | imuTime[i] = 0; 197 | imuRotX[i] = 0; 198 | imuRotY[i] = 0; 199 | imuRotZ[i] = 0; 200 | } 201 | } 202 | 203 | ~ImageProjection(){} 204 | 205 | /** 206 | * 订阅原始imu数据 207 | * 1、imu原始测量数据转换到lidar系,加速度、角速度、RPY 208 | */ 209 | void imuHandler(const sensor_msgs::Imu::ConstPtr& imuMsg) 210 | { 211 | // imu原始测量数据转换到lidar系,加速度、角速度、RPY 212 | sensor_msgs::Imu thisImu = imuConverter(*imuMsg); 213 | 214 | // 上锁,添加数据的时候队列不可用 215 | std::lock_guard lock1(imuLock); 216 | imuQueue.push_back(thisImu); 217 | 218 | // debug IMU data 219 | // cout << std::setprecision(6); 220 | // cout << "IMU acc: " << endl; 221 | // cout << "x: " << thisImu.linear_acceleration.x << 222 | // ", y: " << thisImu.linear_acceleration.y << 223 | // ", z: " << thisImu.linear_acceleration.z << endl; 224 | // cout << "IMU gyro: " << endl; 225 | // cout << "x: " << thisImu.angular_velocity.x << 226 | // ", y: " << thisImu.angular_velocity.y << 227 | // ", z: " << thisImu.angular_velocity.z << endl; 228 | // double imuRoll, imuPitch, imuYaw; 229 | // tf::Quaternion orientation; 230 | // tf::quaternionMsgToTF(thisImu.orientation, orientation); 231 | // tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw); 232 | // cout << "IMU roll pitch yaw: " << endl; 233 | // cout << "roll: " << imuRoll << ", pitch: " << imuPitch << ", yaw: " << imuYaw << endl << endl; 234 | } 235 | 236 | /** 237 | * 订阅imu里程计,由imuPreintegration积分计算得到的每时刻imu位姿 238 | */ 239 | void odometryHandler(const nav_msgs::Odometry::ConstPtr& odometryMsg) 240 | { 241 | std::lock_guard lock2(odoLock); 242 | odomQueue.push_back(*odometryMsg); 243 | } 244 | 245 | /** 246 | * 订阅原始lidar数据 247 | * 1、添加一帧激光点云到队列,取出最早一帧作为当前帧,计算起止时间戳,检查数据有效性 248 | * 2、当前帧起止时刻对应的imu数据、imu里程计数据处理 249 | * imu数据: 250 | * 1) 遍历当前激光帧起止时刻之间的imu数据,初始时刻对应imu的姿态角RPY设为当前帧的初始姿态角 251 | * 2) 用角速度、时间积分,计算每一时刻相对于初始时刻的旋转量,初始时刻旋转设为0 252 | * imu里程计数据: 253 | * 1) 遍历当前激光帧起止时刻之间的imu里程计数据,初始时刻对应imu里程计设为当前帧的初始位姿 254 | * 2) 用起始、终止时刻对应imu里程计,计算相对位姿变换,保存平移增量 255 | * 3、当前帧激光点云运动畸变校正 256 | * 1) 检查激光点距离、扫描线是否合规 257 | * 2) 激光运动畸变校正,保存激光点 258 | * 4、提取有效激光点,存extractedCloud 259 | * 5、发布当前帧校正后点云,有效点 260 | * 6、重置参数,接收每帧lidar数据都要重置这些参数 261 | */ 262 | void cloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg) 263 | { 264 | // 添加一帧激光点云到队列,取出最早一帧作为当前帧,计算起止时间戳,检查数据有效性 265 | if (!cachePointCloud(laserCloudMsg)) 266 | return; 267 | 268 | // 当前帧起止时刻对应的imu数据、imu里程计数据处理 269 | if (!deskewInfo()) 270 | return; 271 | 272 | // 当前帧激光点云运动畸变校正 273 | // 1、检查激光点距离、扫描线是否合规 274 | // 2、激光运动畸变校正,保存激光点 275 | projectPointCloud(); 276 | 277 | // 提取有效激光点,存extractedCloud 278 | cloudExtraction(); 279 | 280 | // 发布当前帧校正后点云,有效点 281 | publishClouds(); 282 | 283 | // 重置参数,接收每帧lidar数据都要重置这些参数 284 | resetParameters(); 285 | } 286 | 287 | /** 288 | * 添加一帧激光点云到队列,取出最早一帧作为当前帧,计算起止时间戳,检查数据有效性 289 | */ 290 | bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg) 291 | { 292 | cloudQueue.push_back(*laserCloudMsg); 293 | if (cloudQueue.size() <= 2) 294 | return false; 295 | 296 | // 取出激光点云队列中最早的一帧 297 | currentCloudMsg = std::move(cloudQueue.front()); 298 | cloudQueue.pop_front(); 299 | if (sensor == SensorType::VELODYNE) 300 | { 301 | // 转换成pcl点云格式 302 | pcl::moveFromROSMsg(currentCloudMsg, *laserCloudIn); 303 | } 304 | else if (sensor == SensorType::OUSTER) 305 | { 306 | // 转换成Velodyne格式 307 | pcl::moveFromROSMsg(currentCloudMsg, *tmpOusterCloudIn); 308 | laserCloudIn->points.resize(tmpOusterCloudIn->size()); 309 | laserCloudIn->is_dense = tmpOusterCloudIn->is_dense; 310 | for (size_t i = 0; i < tmpOusterCloudIn->size(); i++) 311 | { 312 | auto &src = tmpOusterCloudIn->points[i]; 313 | auto &dst = laserCloudIn->points[i]; 314 | dst.x = src.x; 315 | dst.y = src.y; 316 | dst.z = src.z; 317 | dst.intensity = src.intensity; 318 | dst.ring = src.ring; 319 | dst.time = src.t * 1e-9f; 320 | } 321 | } 322 | else 323 | { 324 | ROS_ERROR_STREAM("Unknown sensor type: " << int(sensor)); 325 | ros::shutdown(); 326 | } 327 | 328 | // 当前帧头部 329 | cloudHeader = currentCloudMsg.header; 330 | // 当前帧起始时刻 331 | timeScanCur = cloudHeader.stamp.toSec(); 332 | // 当前帧结束时刻,注:点云中激光点的time记录相对于当前帧第一个激光点的时差,第一个点time=0 333 | timeScanEnd = timeScanCur + laserCloudIn->points.back().time; 334 | 335 | // 存在无效点,Nan或者Inf 336 | if (laserCloudIn->is_dense == false) 337 | { 338 | ROS_ERROR("Point cloud is not in dense format, please remove NaN points first!"); 339 | ros::shutdown(); 340 | } 341 | 342 | // 检查是否存在ring通道,注意static只检查一次 343 | static int ringFlag = 0; 344 | if (ringFlag == 0) 345 | { 346 | ringFlag = -1; 347 | for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i) 348 | { 349 | if (currentCloudMsg.fields[i].name == "ring") 350 | { 351 | ringFlag = 1; 352 | break; 353 | } 354 | } 355 | if (ringFlag == -1) 356 | { 357 | ROS_ERROR("Point cloud ring channel not available, please configure your point cloud data!"); 358 | ros::shutdown(); 359 | } 360 | } 361 | 362 | // 检查是否存在time通道 363 | if (deskewFlag == 0) 364 | { 365 | deskewFlag = -1; 366 | for (auto &field : currentCloudMsg.fields) 367 | { 368 | if (field.name == "time" || field.name == "t") 369 | { 370 | deskewFlag = 1; 371 | break; 372 | } 373 | } 374 | if (deskewFlag == -1) 375 | ROS_WARN("Point cloud timestamp not available, deskew function disabled, system will drift significantly!"); 376 | } 377 | 378 | return true; 379 | } 380 | 381 | /** 382 | * 当前帧起止时刻对应的imu数据、imu里程计数据处理 383 | */ 384 | bool deskewInfo() 385 | { 386 | std::lock_guard lock1(imuLock); 387 | std::lock_guard lock2(odoLock); 388 | 389 | // 要求imu数据包含激光数据,否则不往下处理了 390 | if (imuQueue.empty() || imuQueue.front().header.stamp.toSec() > timeScanCur || imuQueue.back().header.stamp.toSec() < timeScanEnd) 391 | { 392 | ROS_DEBUG("Waiting for IMU data ..."); 393 | return false; 394 | } 395 | 396 | // 当前帧对应imu数据处理 397 | // 1、遍历当前激光帧起止时刻之间的imu数据,初始时刻对应imu的姿态角RPY设为当前帧的初始姿态角 398 | // 2、用角速度、时间积分,计算每一时刻相对于初始时刻的旋转量,初始时刻旋转设为0 399 | // 注:imu数据都已经转换到lidar系下了 400 | imuDeskewInfo(); 401 | 402 | // 当前帧对应imu里程计处理 403 | // 1、遍历当前激光帧起止时刻之间的imu里程计数据,初始时刻对应imu里程计设为当前帧的初始位姿 404 | // 2、用起始、终止时刻对应imu里程计,计算相对位姿变换,保存平移增量 405 | // 注:imu数据都已经转换到lidar系下了 406 | odomDeskewInfo(); 407 | 408 | return true; 409 | } 410 | 411 | /** 412 | * 当前帧对应imu数据处理 413 | * 1、遍历当前激光帧起止时刻之间的imu数据,初始时刻对应imu的姿态角RPY设为当前帧的初始姿态角 414 | * 2、用角速度、时间积分,计算每一时刻相对于初始时刻的旋转量,初始时刻旋转设为0 415 | * 注:imu数据都已经转换到lidar系下了 416 | */ 417 | void imuDeskewInfo() 418 | { 419 | cloudInfo.imuAvailable = false; 420 | 421 | // 从imu队列中删除当前激光帧0.01s前面时刻的imu数据 422 | while (!imuQueue.empty()) 423 | { 424 | if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.01) 425 | imuQueue.pop_front(); 426 | else 427 | break; 428 | } 429 | 430 | if (imuQueue.empty()) 431 | return; 432 | 433 | imuPointerCur = 0; 434 | 435 | // 遍历当前激光帧起止时刻(前后扩展0.01s)之间的imu数据 436 | for (int i = 0; i < (int)imuQueue.size(); ++i) 437 | { 438 | sensor_msgs::Imu thisImuMsg = imuQueue[i]; 439 | double currentImuTime = thisImuMsg.header.stamp.toSec(); 440 | 441 | // 提取imu姿态角RPY,作为当前lidar帧初始姿态角 442 | if (currentImuTime <= timeScanCur) 443 | imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit); 444 | 445 | // 超过当前激光帧结束时刻0.01s,结束 446 | if (currentImuTime > timeScanEnd + 0.01) 447 | break; 448 | 449 | // 第一帧imu旋转角初始化 450 | if (imuPointerCur == 0){ 451 | imuRotX[0] = 0; 452 | imuRotY[0] = 0; 453 | imuRotZ[0] = 0; 454 | imuTime[0] = currentImuTime; 455 | ++imuPointerCur; 456 | continue; 457 | } 458 | 459 | // 提取imu角速度 460 | double angular_x, angular_y, angular_z; 461 | imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z); 462 | 463 | // imu帧间时差 464 | double timeDiff = currentImuTime - imuTime[imuPointerCur-1]; 465 | // 当前时刻旋转角 = 前一时刻旋转角 + 角速度 * 时差 466 | imuRotX[imuPointerCur] = imuRotX[imuPointerCur-1] + angular_x * timeDiff; 467 | imuRotY[imuPointerCur] = imuRotY[imuPointerCur-1] + angular_y * timeDiff; 468 | imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur-1] + angular_z * timeDiff; 469 | imuTime[imuPointerCur] = currentImuTime; 470 | ++imuPointerCur; 471 | } 472 | 473 | --imuPointerCur; 474 | 475 | // 没有合规的imu数据 476 | if (imuPointerCur <= 0) 477 | return; 478 | 479 | cloudInfo.imuAvailable = true; 480 | } 481 | 482 | /** 483 | * 当前帧对应imu里程计处理 484 | * 1、遍历当前激光帧起止时刻之间的imu里程计数据,初始时刻对应imu里程计设为当前帧的初始位姿 485 | * 2、用起始、终止时刻对应imu里程计,计算相对位姿变换,保存平移增量 486 | * 注:imu数据都已经转换到lidar系下了 487 | */ 488 | void odomDeskewInfo() 489 | { 490 | cloudInfo.odomAvailable = false; 491 | 492 | // 从imu里程计队列中删除当前激光帧0.01s前面时刻的imu数据 493 | while (!odomQueue.empty()) 494 | { 495 | if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.01) 496 | odomQueue.pop_front(); 497 | else 498 | break; 499 | } 500 | 501 | if (odomQueue.empty()) 502 | return; 503 | 504 | // 要求必须有当前激光帧时刻之前的imu里程计数据 505 | if (odomQueue.front().header.stamp.toSec() > timeScanCur) 506 | return; 507 | 508 | // 提取当前激光帧起始时刻的imu里程计 509 | nav_msgs::Odometry startOdomMsg; 510 | 511 | for (int i = 0; i < (int)odomQueue.size(); ++i) 512 | { 513 | startOdomMsg = odomQueue[i]; 514 | 515 | if (ROS_TIME(&startOdomMsg) < timeScanCur) 516 | continue; 517 | else 518 | break; 519 | } 520 | 521 | // 提取imu里程计姿态角 522 | tf::Quaternion orientation; 523 | tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation); 524 | 525 | double roll, pitch, yaw; 526 | tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw); 527 | 528 | // 用当前激光帧起始时刻的imu里程计,初始化lidar位姿,后面用于mapOptmization 529 | cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x; 530 | cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y; 531 | cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z; 532 | cloudInfo.initialGuessRoll = roll; 533 | cloudInfo.initialGuessPitch = pitch; 534 | cloudInfo.initialGuessYaw = yaw; 535 | 536 | cloudInfo.odomAvailable = true; 537 | 538 | odomDeskewFlag = false; 539 | 540 | // 如果当前激光帧结束时刻之后没有imu里程计数据,返回 541 | if (odomQueue.back().header.stamp.toSec() < timeScanEnd) 542 | return; 543 | 544 | // 提取当前激光帧结束时刻的imu里程计 545 | nav_msgs::Odometry endOdomMsg; 546 | 547 | for (int i = 0; i < (int)odomQueue.size(); ++i) 548 | { 549 | endOdomMsg = odomQueue[i]; 550 | 551 | if (ROS_TIME(&endOdomMsg) < timeScanEnd) 552 | continue; 553 | else 554 | break; 555 | } 556 | 557 | // 如果起止时刻对应imu里程计的方差不等,返回 558 | if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0]))) 559 | return; 560 | 561 | Eigen::Affine3f transBegin = pcl::getTransformation(startOdomMsg.pose.pose.position.x, startOdomMsg.pose.pose.position.y, startOdomMsg.pose.pose.position.z, roll, pitch, yaw); 562 | 563 | tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation); 564 | tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw); 565 | Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x, endOdomMsg.pose.pose.position.y, endOdomMsg.pose.pose.position.z, roll, pitch, yaw); 566 | 567 | // 起止时刻imu里程计的相对变换 568 | Eigen::Affine3f transBt = transBegin.inverse() * transEnd; 569 | 570 | // 相对变换,提取增量平移、旋转(欧拉角) 571 | float rollIncre, pitchIncre, yawIncre; 572 | pcl::getTranslationAndEulerAngles(transBt, odomIncreX, odomIncreY, odomIncreZ, rollIncre, pitchIncre, yawIncre); 573 | 574 | odomDeskewFlag = true; 575 | } 576 | 577 | /** 578 | * 在当前激光帧起止时间范围内,计算某一时刻的旋转(相对于起始时刻的旋转增量) 579 | */ 580 | void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur) 581 | { 582 | *rotXCur = 0; *rotYCur = 0; *rotZCur = 0; 583 | 584 | // 查找当前时刻在imuTime下的索引 585 | int imuPointerFront = 0; 586 | while (imuPointerFront < imuPointerCur) 587 | { 588 | if (pointTime < imuTime[imuPointerFront]) 589 | break; 590 | ++imuPointerFront; 591 | } 592 | 593 | // 设为离当前时刻最近的旋转增量 594 | if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0) 595 | { 596 | *rotXCur = imuRotX[imuPointerFront]; 597 | *rotYCur = imuRotY[imuPointerFront]; 598 | *rotZCur = imuRotZ[imuPointerFront]; 599 | } else { 600 | // 前后时刻插值计算当前时刻的旋转增量 601 | int imuPointerBack = imuPointerFront - 1; 602 | double ratioFront = (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]); 603 | double ratioBack = (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]); 604 | *rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack; 605 | *rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack; 606 | *rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack; 607 | } 608 | } 609 | 610 | /** 611 | * 在当前激光帧起止时间范围内,计算某一时刻的平移(相对于起始时刻的平移增量) 612 | */ 613 | void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur) 614 | { 615 | // 如果传感器移动速度较慢,例如人行走的速度,那么可以认为激光在一帧时间范围内,平移量小到可以忽略不计 616 | *posXCur = 0; *posYCur = 0; *posZCur = 0; 617 | 618 | // If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented. 619 | 620 | // if (cloudInfo.odomAvailable == false || odomDeskewFlag == false) 621 | // return; 622 | 623 | // float ratio = relTime / (timeScanEnd - timeScanCur); 624 | 625 | // *posXCur = ratio * odomIncreX; 626 | // *posYCur = ratio * odomIncreY; 627 | // *posZCur = ratio * odomIncreZ; 628 | } 629 | 630 | /** 631 | * 激光运动畸变校正 632 | * 利用当前帧起止时刻之间的imu数据计算旋转增量,imu里程计数据计算平移增量,进而将每一时刻激光点位置变换到第一个激光点坐标系下,进行运动补偿 633 | */ 634 | PointType deskewPoint(PointType *point, double relTime) 635 | { 636 | if (deskewFlag == -1 || cloudInfo.imuAvailable == false) 637 | return *point; 638 | 639 | // relTime是当前激光点相对于激光帧起始时刻的时间,pointTime则是当前激光点的时间戳 640 | double pointTime = timeScanCur + relTime; 641 | 642 | // 在当前激光帧起止时间范围内,计算某一时刻的旋转(相对于起始时刻的旋转增量) 643 | float rotXCur, rotYCur, rotZCur; 644 | findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur); 645 | 646 | // 在当前激光帧起止时间范围内,计算某一时刻的平移(相对于起始时刻的平移增量) 647 | float posXCur, posYCur, posZCur; 648 | findPosition(relTime, &posXCur, &posYCur, &posZCur); 649 | 650 | // 第一个点的位姿增量(0),求逆 651 | if (firstPointFlag == true) 652 | { 653 | transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur)).inverse(); 654 | firstPointFlag = false; 655 | } 656 | 657 | // 当前时刻激光点与第一个激光点的位姿变换 658 | Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur); 659 | Eigen::Affine3f transBt = transStartInverse * transFinal; 660 | 661 | // 当前激光点在第一个激光点坐标系下的坐标 662 | PointType newPoint; 663 | newPoint.x = transBt(0,0) * point->x + transBt(0,1) * point->y + transBt(0,2) * point->z + transBt(0,3); 664 | newPoint.y = transBt(1,0) * point->x + transBt(1,1) * point->y + transBt(1,2) * point->z + transBt(1,3); 665 | newPoint.z = transBt(2,0) * point->x + transBt(2,1) * point->y + transBt(2,2) * point->z + transBt(2,3); 666 | newPoint.intensity = point->intensity; 667 | 668 | return newPoint; 669 | } 670 | 671 | /** 672 | * 当前帧激光点云运动畸变校正 673 | * 1、检查激光点距离、扫描线是否合规 674 | * 2、激光运动畸变校正,保存激光点 675 | */ 676 | void projectPointCloud() 677 | { 678 | int cloudSize = laserCloudIn->points.size(); 679 | 680 | // 遍历当前帧激光点云 681 | for (int i = 0; i < cloudSize; ++i) 682 | { 683 | // pcl格式 684 | PointType thisPoint; 685 | thisPoint.x = laserCloudIn->points[i].x; 686 | thisPoint.y = laserCloudIn->points[i].y; 687 | thisPoint.z = laserCloudIn->points[i].z; 688 | thisPoint.intensity = laserCloudIn->points[i].intensity; 689 | 690 | // 距离检查 691 | float range = pointDistance(thisPoint); 692 | if (range < lidarMinRange || range > lidarMaxRange) 693 | continue; 694 | 695 | // 扫描线检查 696 | int rowIdn = laserCloudIn->points[i].ring; 697 | if (rowIdn < 0 || rowIdn >= N_SCAN) 698 | continue; 699 | 700 | // 扫描线如果有降采样,跳过采样的扫描线这里要跳过 701 | if (rowIdn % downsampleRate != 0) 702 | continue; 703 | 704 | float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI; 705 | 706 | // 水平扫描角度步长,例如一周扫描1800次,则两次扫描间隔角度0.2° 707 | static float ang_res_x = 360.0/float(Horizon_SCAN); 708 | int columnIdn = -round((horizonAngle-90.0)/ang_res_x) + Horizon_SCAN/2; 709 | if (columnIdn >= Horizon_SCAN) 710 | columnIdn -= Horizon_SCAN; 711 | 712 | if (columnIdn < 0 || columnIdn >= Horizon_SCAN) 713 | continue; 714 | 715 | // 已经存过该点,不再处理 716 | if (rangeMat.at(rowIdn, columnIdn) != FLT_MAX) 717 | continue; 718 | 719 | // 激光运动畸变校正 720 | // 利用当前帧起止时刻之间的imu数据计算旋转增量,imu里程计数据计算平移增量,进而将每一时刻激光点位置变换到第一个激光点坐标系下,进行运动补偿 721 | thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time); 722 | 723 | // 矩阵存激光点的距离 724 | rangeMat.at(rowIdn, columnIdn) = range; 725 | 726 | // 转换成一维索引,存校正之后的激光点 727 | int index = columnIdn + rowIdn * Horizon_SCAN; 728 | fullCloud->points[index] = thisPoint; 729 | } 730 | } 731 | 732 | /** 733 | * 提取有效激光点,存extractedCloud 734 | */ 735 | void cloudExtraction() 736 | { 737 | // 有效激光点数量 738 | int count = 0; 739 | // 遍历所有激光点 740 | for (int i = 0; i < N_SCAN; ++i) 741 | { 742 | // 记录每根扫描线起始第5个激光点在一维数组中的索引 743 | cloudInfo.startRingIndex[i] = count - 1 + 5; 744 | 745 | for (int j = 0; j < Horizon_SCAN; ++j) 746 | { 747 | // 有效激光点 748 | if (rangeMat.at(i,j) != FLT_MAX) 749 | { 750 | // 记录激光点对应的Horizon_SCAN方向上的索引 751 | cloudInfo.pointColInd[count] = j; 752 | // 激光点距离 753 | cloudInfo.pointRange[count] = rangeMat.at(i,j); 754 | // 加入有效激光点 755 | extractedCloud->push_back(fullCloud->points[j + i*Horizon_SCAN]); 756 | ++count; 757 | } 758 | } 759 | // 记录每根扫描线倒数第5个激光点在一维数组中的索引 760 | cloudInfo.endRingIndex[i] = count -1 - 5; 761 | } 762 | } 763 | 764 | /** 765 | * 发布当前帧校正后点云,有效点 766 | */ 767 | void publishClouds() 768 | { 769 | cloudInfo.header = cloudHeader; 770 | cloudInfo.cloud_deskewed = publishCloud(&pubExtractedCloud, extractedCloud, cloudHeader.stamp, lidarFrame); 771 | pubLaserCloudInfo.publish(cloudInfo); 772 | } 773 | }; 774 | 775 | int main(int argc, char** argv) 776 | { 777 | ros::init(argc, argv, "lio_sam"); 778 | 779 | ImageProjection IP; 780 | 781 | ROS_INFO("\033[1;32m----> Image Projection Started.\033[0m"); 782 | 783 | ros::MultiThreadedSpinner spinner(3); 784 | spinner.spin(); 785 | 786 | return 0; 787 | } 788 | -------------------------------------------------------------------------------- /src/imuPreintegration.cpp: -------------------------------------------------------------------------------- 1 | /************************************************* 2 | GitHub: https://github.com/smilefacehh/LIO-SAM-DetailedNote 3 | Author: lutao2014@163.com 4 | Date: 2021-02-21 5 | -------------------------------------------------- 6 | TransformFusion类 7 | 功能简介: 8 | 主要功能是订阅激光里程计(来自MapOptimization)和IMU里程计,根据前一时刻激光里程计,和该时刻到当前时刻的IMU里程计变换增量,计算当前时刻IMU里程计;rviz展示IMU里程计轨迹(局部)。 9 | 10 | 订阅: 11 | 1、订阅激光里程计,来自MapOptimization; 12 | 2、订阅imu里程计,来自ImuPreintegration。 13 | 14 | 15 | 发布: 16 | 1、发布IMU里程计,用于rviz展示; 17 | 2、发布IMU里程计轨迹,仅展示最近一帧激光里程计时刻到当前时刻之间的轨迹。 18 | -------------------------------------------------- 19 | IMUPreintegration类 20 | 功能简介: 21 | 1、用激光里程计,两帧激光里程计之间的IMU预计分量构建因子图,优化当前帧的状态(包括位姿、速度、偏置); 22 | 2、以优化后的状态为基础,施加IMU预计分量,得到每一时刻的IMU里程计。 23 | 24 | 订阅: 25 | 1、订阅IMU原始数据,以因子图优化后的激光里程计为基础,施加两帧之间的IMU预计分量,预测每一时刻(IMU频率)的IMU里程计; 26 | 2、订阅激光里程计(来自MapOptimization),用两帧之间的IMU预计分量构建因子图,优化当前帧位姿(这个位姿仅用于更新每时刻的IMU里程计,以及下一次因子图优化)。 27 | 28 | 发布: 29 | 1、发布imu里程计; 30 | **************************************************/ 31 | #include "utility.h" 32 | 33 | #include 34 | #include 35 | #include 36 | #include 37 | #include 38 | #include 39 | #include 40 | #include 41 | #include 42 | #include 43 | #include 44 | #include 45 | 46 | #include 47 | #include 48 | 49 | using gtsam::symbol_shorthand::X; // Pose3 (x,y,z,r,p,y) 50 | using gtsam::symbol_shorthand::V; // Vel (xdot,ydot,zdot) 51 | using gtsam::symbol_shorthand::B; // Bias (ax,ay,az,gx,gy,gz) 52 | 53 | class TransformFusion : public ParamServer 54 | { 55 | public: 56 | std::mutex mtx; 57 | 58 | ros::Subscriber subImuOdometry; 59 | ros::Subscriber subLaserOdometry; 60 | 61 | ros::Publisher pubImuOdometry; 62 | ros::Publisher pubImuPath; 63 | 64 | Eigen::Affine3f lidarOdomAffine; 65 | Eigen::Affine3f imuOdomAffineFront; 66 | Eigen::Affine3f imuOdomAffineBack; 67 | 68 | tf::TransformListener tfListener; 69 | tf::StampedTransform lidar2Baselink; 70 | 71 | double lidarOdomTime = -1; 72 | deque imuOdomQueue; 73 | 74 | /** 75 | * 构造函数 76 | */ 77 | TransformFusion() 78 | { 79 | // 如果lidar系与baselink系不同(激光系和载体系),需要外部提供二者之间的变换关系 80 | if(lidarFrame != baselinkFrame) 81 | { 82 | try 83 | { 84 | // 等待3s 85 | tfListener.waitForTransform(lidarFrame, baselinkFrame, ros::Time(0), ros::Duration(3.0)); 86 | // lidar系到baselink系的变换 87 | tfListener.lookupTransform(lidarFrame, baselinkFrame, ros::Time(0), lidar2Baselink); 88 | } 89 | catch (tf::TransformException ex) 90 | { 91 | ROS_ERROR("%s",ex.what()); 92 | } 93 | } 94 | 95 | // 订阅激光里程计,来自mapOptimization 96 | subLaserOdometry = nh.subscribe("lio_sam/mapping/odometry", 5, &TransformFusion::lidarOdometryHandler, this, ros::TransportHints().tcpNoDelay()); 97 | // 订阅imu里程计,来自IMUPreintegration 98 | subImuOdometry = nh.subscribe(odomTopic+"_incremental", 2000, &TransformFusion::imuOdometryHandler, this, ros::TransportHints().tcpNoDelay()); 99 | 100 | // 发布imu里程计,用于rviz展示 101 | pubImuOdometry = nh.advertise(odomTopic, 2000); 102 | // 发布imu里程计轨迹 103 | pubImuPath = nh.advertise ("lio_sam/imu/path", 1); 104 | } 105 | 106 | /** 107 | * 里程计对应变换矩阵 108 | */ 109 | Eigen::Affine3f odom2affine(nav_msgs::Odometry odom) 110 | { 111 | double x, y, z, roll, pitch, yaw; 112 | x = odom.pose.pose.position.x; 113 | y = odom.pose.pose.position.y; 114 | z = odom.pose.pose.position.z; 115 | tf::Quaternion orientation; 116 | tf::quaternionMsgToTF(odom.pose.pose.orientation, orientation); 117 | tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw); 118 | return pcl::getTransformation(x, y, z, roll, pitch, yaw); 119 | } 120 | 121 | /** 122 | * 订阅激光里程计,来自mapOptimization 123 | */ 124 | void lidarOdometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg) 125 | { 126 | std::lock_guard lock(mtx); 127 | // 激光里程计对应变换矩阵 128 | lidarOdomAffine = odom2affine(*odomMsg); 129 | // 激光里程计时间戳 130 | lidarOdomTime = odomMsg->header.stamp.toSec(); 131 | } 132 | 133 | /** 134 | * 订阅imu里程计,来自IMUPreintegration 135 | * 1、以最近一帧激光里程计位姿为基础,计算该时刻与当前时刻间imu里程计增量位姿变换,相乘得到当前时刻imu里程计位姿 136 | * 2、发布当前时刻里程计位姿,用于rviz展示;发布imu里程计路径,注:只是最近一帧激光里程计时刻与当前时刻之间的一段 137 | */ 138 | void imuOdometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg) 139 | { 140 | // 发布tf,map与odom系设为同一个系 141 | static tf::TransformBroadcaster tfMap2Odom; 142 | static tf::Transform map_to_odom = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Vector3(0, 0, 0)); 143 | tfMap2Odom.sendTransform(tf::StampedTransform(map_to_odom, odomMsg->header.stamp, mapFrame, odometryFrame)); 144 | 145 | std::lock_guard lock(mtx); 146 | 147 | // 添加imu里程计到队列 148 | imuOdomQueue.push_back(*odomMsg); 149 | 150 | // 从imu里程计队列中删除当前(最近的一帧)激光里程计时刻之前的数据 151 | if (lidarOdomTime == -1) 152 | return; 153 | while (!imuOdomQueue.empty()) 154 | { 155 | if (imuOdomQueue.front().header.stamp.toSec() <= lidarOdomTime) 156 | imuOdomQueue.pop_front(); 157 | else 158 | break; 159 | } 160 | // 最近的一帧激光里程计时刻对应imu里程计位姿 161 | Eigen::Affine3f imuOdomAffineFront = odom2affine(imuOdomQueue.front()); 162 | // 当前时刻imu里程计位姿 163 | Eigen::Affine3f imuOdomAffineBack = odom2affine(imuOdomQueue.back()); 164 | // imu里程计增量位姿变换 165 | Eigen::Affine3f imuOdomAffineIncre = imuOdomAffineFront.inverse() * imuOdomAffineBack; 166 | // 最近的一帧激光里程计位姿 * imu里程计增量位姿变换 = 当前时刻imu里程计位姿 167 | Eigen::Affine3f imuOdomAffineLast = lidarOdomAffine * imuOdomAffineIncre; 168 | float x, y, z, roll, pitch, yaw; 169 | pcl::getTranslationAndEulerAngles(imuOdomAffineLast, x, y, z, roll, pitch, yaw); 170 | 171 | // 发布当前时刻里程计位姿 172 | nav_msgs::Odometry laserOdometry = imuOdomQueue.back(); 173 | laserOdometry.pose.pose.position.x = x; 174 | laserOdometry.pose.pose.position.y = y; 175 | laserOdometry.pose.pose.position.z = z; 176 | laserOdometry.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw); 177 | pubImuOdometry.publish(laserOdometry); 178 | // 发布tf,当前时刻odom与baselink系变换关系 179 | static tf::TransformBroadcaster tfOdom2BaseLink; 180 | tf::Transform tCur; 181 | tf::poseMsgToTF(laserOdometry.pose.pose, tCur); 182 | if(lidarFrame != baselinkFrame) 183 | tCur = tCur * lidar2Baselink; 184 | tf::StampedTransform odom_2_baselink = tf::StampedTransform(tCur, odomMsg->header.stamp, odometryFrame, baselinkFrame); 185 | tfOdom2BaseLink.sendTransform(odom_2_baselink); 186 | 187 | // 发布imu里程计路径,注:只是最近一帧激光里程计时刻与当前时刻之间的一段 188 | static nav_msgs::Path imuPath; 189 | static double last_path_time = -1; 190 | double imuTime = imuOdomQueue.back().header.stamp.toSec(); 191 | // 每隔0.1s添加一个 192 | if (imuTime - last_path_time > 0.1) 193 | { 194 | last_path_time = imuTime; 195 | geometry_msgs::PoseStamped pose_stamped; 196 | pose_stamped.header.stamp = imuOdomQueue.back().header.stamp; 197 | pose_stamped.header.frame_id = odometryFrame; 198 | pose_stamped.pose = laserOdometry.pose.pose; 199 | imuPath.poses.push_back(pose_stamped); 200 | // 删除最近一帧激光里程计时刻之前的imu里程计 201 | while(!imuPath.poses.empty() && imuPath.poses.front().header.stamp.toSec() < lidarOdomTime - 1.0) 202 | imuPath.poses.erase(imuPath.poses.begin()); 203 | if (pubImuPath.getNumSubscribers() != 0) 204 | { 205 | imuPath.header.stamp = imuOdomQueue.back().header.stamp; 206 | imuPath.header.frame_id = odometryFrame; 207 | pubImuPath.publish(imuPath); 208 | } 209 | } 210 | } 211 | }; 212 | 213 | class IMUPreintegration : public ParamServer 214 | { 215 | public: 216 | 217 | std::mutex mtx; 218 | 219 | // 订阅与发布 220 | ros::Subscriber subImu; 221 | ros::Subscriber subOdometry; 222 | ros::Publisher pubImuOdometry; 223 | 224 | bool systemInitialized = false; 225 | 226 | // 噪声协方差 227 | gtsam::noiseModel::Diagonal::shared_ptr priorPoseNoise; 228 | gtsam::noiseModel::Diagonal::shared_ptr priorVelNoise; 229 | gtsam::noiseModel::Diagonal::shared_ptr priorBiasNoise; 230 | gtsam::noiseModel::Diagonal::shared_ptr correctionNoise; 231 | gtsam::noiseModel::Diagonal::shared_ptr correctionNoise2; 232 | gtsam::Vector noiseModelBetweenBias; 233 | 234 | // imu预积分器 235 | gtsam::PreintegratedImuMeasurements *imuIntegratorOpt_; 236 | gtsam::PreintegratedImuMeasurements *imuIntegratorImu_; 237 | 238 | // imu数据队列 239 | std::deque imuQueOpt; 240 | std::deque imuQueImu; 241 | 242 | // imu因子图优化过程中的状态变量 243 | gtsam::Pose3 prevPose_; 244 | gtsam::Vector3 prevVel_; 245 | gtsam::NavState prevState_; 246 | gtsam::imuBias::ConstantBias prevBias_; 247 | 248 | // imu状态 249 | gtsam::NavState prevStateOdom; 250 | gtsam::imuBias::ConstantBias prevBiasOdom; 251 | 252 | bool doneFirstOpt = false; 253 | double lastImuT_imu = -1; 254 | double lastImuT_opt = -1; 255 | 256 | // ISAM2优化器 257 | gtsam::ISAM2 optimizer; 258 | gtsam::NonlinearFactorGraph graphFactors; 259 | gtsam::Values graphValues; 260 | 261 | const double delta_t = 0; 262 | 263 | int key = 1; 264 | 265 | // imu-lidar位姿变换 266 | gtsam::Pose3 imu2Lidar = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(-extTrans.x(), -extTrans.y(), -extTrans.z())); 267 | gtsam::Pose3 lidar2Imu = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(extTrans.x(), extTrans.y(), extTrans.z())); 268 | 269 | /** 270 | * 构造函数 271 | */ 272 | IMUPreintegration() 273 | { 274 | // 订阅imu原始数据,用下面因子图优化的结果,施加两帧之间的imu预计分量,预测每一时刻(imu频率)的imu里程计 275 | subImu = nh.subscribe (imuTopic, 2000, &IMUPreintegration::imuHandler, this, ros::TransportHints().tcpNoDelay()); 276 | // 订阅激光里程计,来自mapOptimization,用两帧之间的imu预计分量构建因子图,优化当前帧位姿(这个位姿仅用于更新每时刻的imu里程计,以及下一次因子图优化) 277 | subOdometry = nh.subscribe("lio_sam/mapping/odometry_incremental", 5, &IMUPreintegration::odometryHandler, this, ros::TransportHints().tcpNoDelay()); 278 | 279 | // 发布imu里程计 280 | pubImuOdometry = nh.advertise (odomTopic+"_incremental", 2000); 281 | 282 | // imu预积分的噪声协方差 283 | boost::shared_ptr p = gtsam::PreintegrationParams::MakeSharedU(imuGravity); 284 | p->accelerometerCovariance = gtsam::Matrix33::Identity(3,3) * pow(imuAccNoise, 2); // acc white noise in continuous 285 | p->gyroscopeCovariance = gtsam::Matrix33::Identity(3,3) * pow(imuGyrNoise, 2); // gyro white noise in continuous 286 | p->integrationCovariance = gtsam::Matrix33::Identity(3,3) * pow(1e-4, 2); // error committed in integrating position from velocities 287 | gtsam::imuBias::ConstantBias prior_imu_bias((gtsam::Vector(6) << 0, 0, 0, 0, 0, 0).finished());; // assume zero initial bias 288 | 289 | // 噪声先验 290 | priorPoseNoise = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1e-2, 1e-2, 1e-2, 1e-2, 1e-2, 1e-2).finished()); // rad,rad,rad,m, m, m 291 | priorVelNoise = gtsam::noiseModel::Isotropic::Sigma(3, 1e4); // m/s 292 | priorBiasNoise = gtsam::noiseModel::Isotropic::Sigma(6, 1e-3); // 1e-2 ~ 1e-3 seems to be good 293 | // 激光里程计scan-to-map优化过程中发生退化,则选择一个较大的协方差 294 | correctionNoise = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 0.05, 0.05, 0.05, 0.1, 0.1, 0.1).finished()); // rad,rad,rad,m, m, m 295 | correctionNoise2 = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1, 1, 1, 1, 1, 1).finished()); // rad,rad,rad,m, m, m 296 | noiseModelBetweenBias = (gtsam::Vector(6) << imuAccBiasN, imuAccBiasN, imuAccBiasN, imuGyrBiasN, imuGyrBiasN, imuGyrBiasN).finished(); 297 | 298 | // imu预积分器,用于预测每一时刻(imu频率)的imu里程计(转到lidar系了,与激光里程计同一个系) 299 | imuIntegratorImu_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for IMU message thread 300 | // imu预积分器,用于因子图优化 301 | imuIntegratorOpt_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for optimization 302 | } 303 | 304 | /** 305 | * 重置ISAM2优化器 306 | */ 307 | void resetOptimization() 308 | { 309 | gtsam::ISAM2Params optParameters; 310 | optParameters.relinearizeThreshold = 0.1; 311 | optParameters.relinearizeSkip = 1; 312 | optimizer = gtsam::ISAM2(optParameters); 313 | 314 | gtsam::NonlinearFactorGraph newGraphFactors; 315 | graphFactors = newGraphFactors; 316 | 317 | gtsam::Values NewGraphValues; 318 | graphValues = NewGraphValues; 319 | } 320 | 321 | /** 322 | * 重置参数 323 | */ 324 | void resetParams() 325 | { 326 | lastImuT_imu = -1; 327 | doneFirstOpt = false; 328 | systemInitialized = false; 329 | } 330 | 331 | /** 332 | * 订阅激光里程计,来自mapOptimization 333 | * 1、每隔100帧激光里程计,重置ISAM2优化器,添加里程计、速度、偏置先验因子,执行优化 334 | * 2、计算前一帧激光里程计与当前帧激光里程计之间的imu预积分量,用前一帧状态施加预积分量得到当前帧初始状态估计,添加来自mapOptimization的当前帧位姿,进行因子图优化,更新当前帧状态 335 | * 3、优化之后,执行重传播;优化更新了imu的偏置,用最新的偏置重新计算当前激光里程计时刻之后的imu预积分,这个预积分用于计算每时刻位姿 336 | */ 337 | void odometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg) 338 | { 339 | std::lock_guard lock(mtx); 340 | // 当前帧激光里程计时间戳 341 | double currentCorrectionTime = ROS_TIME(odomMsg); 342 | 343 | // 确保imu优化队列中有imu数据进行预积分 344 | if (imuQueOpt.empty()) 345 | return; 346 | 347 | // 当前帧激光位姿,来自scan-to-map匹配、因子图优化后的位姿 348 | float p_x = odomMsg->pose.pose.position.x; 349 | float p_y = odomMsg->pose.pose.position.y; 350 | float p_z = odomMsg->pose.pose.position.z; 351 | float r_x = odomMsg->pose.pose.orientation.x; 352 | float r_y = odomMsg->pose.pose.orientation.y; 353 | float r_z = odomMsg->pose.pose.orientation.z; 354 | float r_w = odomMsg->pose.pose.orientation.w; 355 | bool degenerate = (int)odomMsg->pose.covariance[0] == 1 ? true : false; 356 | gtsam::Pose3 lidarPose = gtsam::Pose3(gtsam::Rot3::Quaternion(r_w, r_x, r_y, r_z), gtsam::Point3(p_x, p_y, p_z)); 357 | 358 | 359 | // 0. 系统初始化,第一帧 360 | if (systemInitialized == false) 361 | { 362 | // 重置ISAM2优化器 363 | resetOptimization(); 364 | 365 | // 从imu优化队列中删除当前帧激光里程计时刻之前的imu数据 366 | while (!imuQueOpt.empty()) 367 | { 368 | if (ROS_TIME(&imuQueOpt.front()) < currentCorrectionTime - delta_t) 369 | { 370 | lastImuT_opt = ROS_TIME(&imuQueOpt.front()); 371 | imuQueOpt.pop_front(); 372 | } 373 | else 374 | break; 375 | } 376 | // 添加里程计位姿先验因子 377 | prevPose_ = lidarPose.compose(lidar2Imu); 378 | gtsam::PriorFactor priorPose(X(0), prevPose_, priorPoseNoise); 379 | graphFactors.add(priorPose); 380 | // 添加速度先验因子 381 | prevVel_ = gtsam::Vector3(0, 0, 0); 382 | gtsam::PriorFactor priorVel(V(0), prevVel_, priorVelNoise); 383 | graphFactors.add(priorVel); 384 | // 添加imu偏置先验因子 385 | prevBias_ = gtsam::imuBias::ConstantBias(); 386 | gtsam::PriorFactor priorBias(B(0), prevBias_, priorBiasNoise); 387 | graphFactors.add(priorBias); 388 | // 变量节点赋初值 389 | graphValues.insert(X(0), prevPose_); 390 | graphValues.insert(V(0), prevVel_); 391 | graphValues.insert(B(0), prevBias_); 392 | // 优化一次 393 | optimizer.update(graphFactors, graphValues); 394 | graphFactors.resize(0); 395 | graphValues.clear(); 396 | 397 | // 重置优化之后的偏置 398 | imuIntegratorImu_->resetIntegrationAndSetBias(prevBias_); 399 | imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_); 400 | 401 | key = 1; 402 | systemInitialized = true; 403 | return; 404 | } 405 | 406 | 407 | // 每隔100帧激光里程计,重置ISAM2优化器,保证优化效率 408 | if (key == 100) 409 | { 410 | // 前一帧的位姿、速度、偏置噪声模型 411 | gtsam::noiseModel::Gaussian::shared_ptr updatedPoseNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(X(key-1))); 412 | gtsam::noiseModel::Gaussian::shared_ptr updatedVelNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(V(key-1))); 413 | gtsam::noiseModel::Gaussian::shared_ptr updatedBiasNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(B(key-1))); 414 | // 重置ISAM2优化器 415 | resetOptimization(); 416 | // 添加位姿先验因子,用前一帧的值初始化 417 | gtsam::PriorFactor priorPose(X(0), prevPose_, updatedPoseNoise); 418 | graphFactors.add(priorPose); 419 | // 添加速度先验因子,用前一帧的值初始化 420 | gtsam::PriorFactor priorVel(V(0), prevVel_, updatedVelNoise); 421 | graphFactors.add(priorVel); 422 | // 添加偏置先验因子,用前一帧的值初始化 423 | gtsam::PriorFactor priorBias(B(0), prevBias_, updatedBiasNoise); 424 | graphFactors.add(priorBias); 425 | // 变量节点赋初值,用前一帧的值初始化 426 | graphValues.insert(X(0), prevPose_); 427 | graphValues.insert(V(0), prevVel_); 428 | graphValues.insert(B(0), prevBias_); 429 | // 优化一次 430 | optimizer.update(graphFactors, graphValues); 431 | graphFactors.resize(0); 432 | graphValues.clear(); 433 | 434 | key = 1; 435 | } 436 | 437 | 438 | // 1. 计算前一帧与当前帧之间的imu预积分量,用前一帧状态施加预积分量得到当前帧初始状态估计,添加来自mapOptimization的当前帧位姿,进行因子图优化,更新当前帧状态 439 | while (!imuQueOpt.empty()) 440 | { 441 | // 提取前一帧与当前帧之间的imu数据,计算预积分 442 | sensor_msgs::Imu *thisImu = &imuQueOpt.front(); 443 | double imuTime = ROS_TIME(thisImu); 444 | if (imuTime < currentCorrectionTime - delta_t) 445 | { 446 | // 两帧imu数据时间间隔 447 | double dt = (lastImuT_opt < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_opt); 448 | // imu预积分数据输入:加速度、角速度、dt 449 | imuIntegratorOpt_->integrateMeasurement( 450 | gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z), 451 | gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y, thisImu->angular_velocity.z), dt); 452 | 453 | lastImuT_opt = imuTime; 454 | // 从队列中删除已经处理的imu数据 455 | imuQueOpt.pop_front(); 456 | } 457 | else 458 | break; 459 | } 460 | // 添加imu预积分因子 461 | const gtsam::PreintegratedImuMeasurements& preint_imu = dynamic_cast(*imuIntegratorOpt_); 462 | // 参数:前一帧位姿,前一帧速度,当前帧位姿,当前帧速度,前一帧偏置,预计分量 463 | gtsam::ImuFactor imu_factor(X(key - 1), V(key - 1), X(key), V(key), B(key - 1), preint_imu); 464 | graphFactors.add(imu_factor); 465 | // 添加imu偏置因子,前一帧偏置,当前帧偏置,观测值,噪声协方差;deltaTij()是积分段的时间 466 | graphFactors.add(gtsam::BetweenFactor(B(key - 1), B(key), gtsam::imuBias::ConstantBias(), 467 | gtsam::noiseModel::Diagonal::Sigmas(sqrt(imuIntegratorOpt_->deltaTij()) * noiseModelBetweenBias))); 468 | // 添加位姿因子 469 | gtsam::Pose3 curPose = lidarPose.compose(lidar2Imu); 470 | gtsam::PriorFactor pose_factor(X(key), curPose, degenerate ? correctionNoise2 : correctionNoise); 471 | graphFactors.add(pose_factor); 472 | // 用前一帧的状态、偏置,施加imu预计分量,得到当前帧的状态 473 | gtsam::NavState propState_ = imuIntegratorOpt_->predict(prevState_, prevBias_); 474 | // 变量节点赋初值 475 | graphValues.insert(X(key), propState_.pose()); 476 | graphValues.insert(V(key), propState_.v()); 477 | graphValues.insert(B(key), prevBias_); 478 | // 优化 479 | optimizer.update(graphFactors, graphValues); 480 | optimizer.update(); 481 | graphFactors.resize(0); 482 | graphValues.clear(); 483 | // 优化结果 484 | gtsam::Values result = optimizer.calculateEstimate(); 485 | // 更新当前帧位姿、速度 486 | prevPose_ = result.at(X(key)); 487 | prevVel_ = result.at(V(key)); 488 | // 更新当前帧状态 489 | prevState_ = gtsam::NavState(prevPose_, prevVel_); 490 | // 更新当前帧imu偏置 491 | prevBias_ = result.at(B(key)); 492 | // 重置预积分器,设置新的偏置,这样下一帧激光里程计进来的时候,预积分量就是两帧之间的增量 493 | imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_); 494 | 495 | // imu因子图优化结果,速度或者偏置过大,认为失败 496 | if (failureDetection(prevVel_, prevBias_)) 497 | { 498 | // 重置参数 499 | resetParams(); 500 | return; 501 | } 502 | 503 | 504 | // 2. 优化之后,执行重传播;优化更新了imu的偏置,用最新的偏置重新计算当前激光里程计时刻之后的imu预积分,这个预积分用于计算每时刻位姿 505 | prevStateOdom = prevState_; 506 | prevBiasOdom = prevBias_; 507 | // 从imu队列中删除当前激光里程计时刻之前的imu数据 508 | double lastImuQT = -1; 509 | while (!imuQueImu.empty() && ROS_TIME(&imuQueImu.front()) < currentCorrectionTime - delta_t) 510 | { 511 | lastImuQT = ROS_TIME(&imuQueImu.front()); 512 | imuQueImu.pop_front(); 513 | } 514 | // 对剩余的imu数据计算预积分 515 | if (!imuQueImu.empty()) 516 | { 517 | // 重置预积分器和最新的偏置 518 | imuIntegratorImu_->resetIntegrationAndSetBias(prevBiasOdom); 519 | // 计算预积分 520 | for (int i = 0; i < (int)imuQueImu.size(); ++i) 521 | { 522 | sensor_msgs::Imu *thisImu = &imuQueImu[i]; 523 | double imuTime = ROS_TIME(thisImu); 524 | double dt = (lastImuQT < 0) ? (1.0 / 500.0) :(imuTime - lastImuQT); 525 | 526 | imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z), 527 | gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y, thisImu->angular_velocity.z), dt); 528 | lastImuQT = imuTime; 529 | } 530 | } 531 | 532 | ++key; 533 | doneFirstOpt = true; 534 | } 535 | 536 | /** 537 | * imu因子图优化结果,速度或者偏置过大,认为失败 538 | */ 539 | bool failureDetection(const gtsam::Vector3& velCur, const gtsam::imuBias::ConstantBias& biasCur) 540 | { 541 | Eigen::Vector3f vel(velCur.x(), velCur.y(), velCur.z()); 542 | if (vel.norm() > 30) 543 | { 544 | ROS_WARN("Large velocity, reset IMU-preintegration!"); 545 | return true; 546 | } 547 | 548 | Eigen::Vector3f ba(biasCur.accelerometer().x(), biasCur.accelerometer().y(), biasCur.accelerometer().z()); 549 | Eigen::Vector3f bg(biasCur.gyroscope().x(), biasCur.gyroscope().y(), biasCur.gyroscope().z()); 550 | if (ba.norm() > 1.0 || bg.norm() > 1.0) 551 | { 552 | ROS_WARN("Large bias, reset IMU-preintegration!"); 553 | return true; 554 | } 555 | 556 | return false; 557 | } 558 | 559 | /** 560 | * 订阅imu原始数据 561 | * 1、用上一帧激光里程计时刻对应的状态、偏置,施加从该时刻开始到当前时刻的imu预计分量,得到当前时刻的状态,也就是imu里程计 562 | * 2、imu里程计位姿转到lidar系,发布里程计 563 | */ 564 | void imuHandler(const sensor_msgs::Imu::ConstPtr& imu_raw) 565 | { 566 | std::lock_guard lock(mtx); 567 | // imu原始测量数据转换到lidar系,加速度、角速度、RPY 568 | sensor_msgs::Imu thisImu = imuConverter(*imu_raw); 569 | 570 | // 添加当前帧imu数据到队列 571 | imuQueOpt.push_back(thisImu); 572 | imuQueImu.push_back(thisImu); 573 | 574 | // 要求上一次imu因子图优化执行成功,确保更新了上一帧(激光里程计帧)的状态、偏置,预积分重新计算了 575 | if (doneFirstOpt == false) 576 | return; 577 | 578 | double imuTime = ROS_TIME(&thisImu); 579 | double dt = (lastImuT_imu < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_imu); 580 | lastImuT_imu = imuTime; 581 | 582 | // imu预积分器添加一帧imu数据,注:这个预积分器的起始时刻是上一帧激光里程计时刻 583 | imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu.linear_acceleration.x, thisImu.linear_acceleration.y, thisImu.linear_acceleration.z), 584 | gtsam::Vector3(thisImu.angular_velocity.x, thisImu.angular_velocity.y, thisImu.angular_velocity.z), dt); 585 | 586 | // 用上一帧激光里程计时刻对应的状态、偏置,施加从该时刻开始到当前时刻的imu预计分量,得到当前时刻的状态 587 | gtsam::NavState currentState = imuIntegratorImu_->predict(prevStateOdom, prevBiasOdom); 588 | 589 | // 发布imu里程计(转到lidar系,与激光里程计同一个系) 590 | nav_msgs::Odometry odometry; 591 | odometry.header.stamp = thisImu.header.stamp; 592 | odometry.header.frame_id = odometryFrame; 593 | odometry.child_frame_id = "odom_imu"; 594 | 595 | // 变换到lidar系 596 | gtsam::Pose3 imuPose = gtsam::Pose3(currentState.quaternion(), currentState.position()); 597 | gtsam::Pose3 lidarPose = imuPose.compose(imu2Lidar); 598 | 599 | odometry.pose.pose.position.x = lidarPose.translation().x(); 600 | odometry.pose.pose.position.y = lidarPose.translation().y(); 601 | odometry.pose.pose.position.z = lidarPose.translation().z(); 602 | odometry.pose.pose.orientation.x = lidarPose.rotation().toQuaternion().x(); 603 | odometry.pose.pose.orientation.y = lidarPose.rotation().toQuaternion().y(); 604 | odometry.pose.pose.orientation.z = lidarPose.rotation().toQuaternion().z(); 605 | odometry.pose.pose.orientation.w = lidarPose.rotation().toQuaternion().w(); 606 | 607 | odometry.twist.twist.linear.x = currentState.velocity().x(); 608 | odometry.twist.twist.linear.y = currentState.velocity().y(); 609 | odometry.twist.twist.linear.z = currentState.velocity().z(); 610 | odometry.twist.twist.angular.x = thisImu.angular_velocity.x + prevBiasOdom.gyroscope().x(); 611 | odometry.twist.twist.angular.y = thisImu.angular_velocity.y + prevBiasOdom.gyroscope().y(); 612 | odometry.twist.twist.angular.z = thisImu.angular_velocity.z + prevBiasOdom.gyroscope().z(); 613 | pubImuOdometry.publish(odometry); 614 | } 615 | }; 616 | 617 | 618 | int main(int argc, char** argv) 619 | { 620 | ros::init(argc, argv, "roboat_loam"); 621 | 622 | IMUPreintegration ImuP; 623 | 624 | TransformFusion TF; 625 | 626 | ROS_INFO("\033[1;32m----> IMU Preintegration Started.\033[0m"); 627 | 628 | ros::MultiThreadedSpinner spinner(4); 629 | spinner.spin(); 630 | 631 | return 0; 632 | } 633 | -------------------------------------------------------------------------------- /srv/save_map.srv: -------------------------------------------------------------------------------- 1 | float32 resolution 2 | string destination 3 | --- 4 | bool success 5 | -------------------------------------------------------------------------------- /system.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/system.png -------------------------------------------------------------------------------- /运行截图.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smilefacehh/LIO-SAM-DetailedNote/a77630a8390a7ab32fd4d92afb2e483ffe48cc2e/运行截图.png --------------------------------------------------------------------------------