├── .github
└── stale.yml
├── CMakeLists.txt
├── Dockerfile
├── LICENSE
├── README.md
├── config
├── doc
│ ├── demo.gif
│ ├── device-boat.png
│ ├── device-hand-2.png
│ ├── device-hand.png
│ ├── device-jackal.png
│ ├── device-livox-horizon.png
│ ├── gps-demo.gif
│ ├── imu-debug.gif
│ ├── imu-transform.png
│ ├── kitti-demo.gif
│ ├── kitti-map.png
│ ├── kitti2bag
│ │ ├── README.md
│ │ └── kitti2bag.py
│ ├── livox-demo.gif
│ ├── loop-closure-2.gif
│ ├── loop-closure.gif
│ ├── ouster-demo.gif
│ ├── ouster-device.jpg
│ ├── paper.pdf
│ └── system.png
└── params.yaml
├── include
└── utility.h
├── launch
├── include
│ ├── config
│ │ ├── robot.urdf.xacro
│ │ └── rviz.rviz
│ ├── module_loam.launch
│ ├── module_navsat.launch
│ ├── module_robot_state_publisher.launch
│ ├── module_rviz.launch
│ └── rosconsole
│ │ ├── rosconsole_error.conf
│ │ ├── rosconsole_info.conf
│ │ └── rosconsole_warn.conf
└── run.launch
├── msg
└── cloud_info.msg
├── package.xml
├── src
├── featureExtraction.cpp
├── imageProjection.cpp
├── imuPreintegration.cpp
└── mapOptmization.cpp
└── srv
└── save_map.srv
/.github/stale.yml:
--------------------------------------------------------------------------------
1 | # Number of days of inactivity before an issue becomes stale
2 | daysUntilStale: 14
3 | # Number of days of inactivity before a stale issue is closed
4 | daysUntilClose: 1
5 | # Issues with these labels will never be considered stale
6 | exemptLabels:
7 | - pinned
8 | - security
9 | # Label to use when marking an issue as stale
10 | staleLabel: stale
11 | # Comment to post when marking an issue as stale. Set to `false` to disable
12 | markComment: >
13 | This issue has been automatically marked as stale because it has not had
14 | recent activity. It will be closed if no further activity occurs. Thank you
15 | for your contributions.
16 | # Comment to post when closing a stale issue. Set to `false` to disable
17 | closeComment: false
18 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(lio_sam)
3 |
4 | set(CMAKE_BUILD_TYPE "Release")
5 | set(CMAKE_CXX_FLAGS "-std=c++11")
6 | set(CMAKE_CXX_FLAGS_RELEASE "-O3 -Wall -g -pthread")
7 |
8 | find_package(catkin REQUIRED COMPONENTS
9 | tf
10 | roscpp
11 | rospy
12 | cv_bridge
13 | # pcl library
14 | pcl_conversions
15 | # msgs
16 | std_msgs
17 | sensor_msgs
18 | geometry_msgs
19 | nav_msgs
20 | message_generation
21 | visualization_msgs
22 | )
23 |
24 | find_package(OpenMP REQUIRED)
25 | find_package(PCL REQUIRED QUIET)
26 | find_package(OpenCV REQUIRED QUIET)
27 | find_package(GTSAM REQUIRED QUIET)
28 | find_package(Boost REQUIRED COMPONENTS timer)
29 |
30 | add_message_files(
31 | DIRECTORY msg
32 | FILES
33 | cloud_info.msg
34 | )
35 |
36 | add_service_files(
37 | DIRECTORY srv
38 | FILES
39 | save_map.srv
40 | )
41 |
42 | generate_messages(
43 | DEPENDENCIES
44 | geometry_msgs
45 | std_msgs
46 | nav_msgs
47 | sensor_msgs
48 | )
49 |
50 | catkin_package(
51 | INCLUDE_DIRS include
52 | DEPENDS PCL GTSAM
53 |
54 | CATKIN_DEPENDS
55 | std_msgs
56 | nav_msgs
57 | geometry_msgs
58 | sensor_msgs
59 | message_runtime
60 | message_generation
61 | visualization_msgs
62 | )
63 |
64 | # include directories
65 | include_directories(
66 | include
67 | ${catkin_INCLUDE_DIRS}
68 | ${PCL_INCLUDE_DIRS}
69 | ${OpenCV_INCLUDE_DIRS}
70 | ${GTSAM_INCLUDE_DIR}
71 | )
72 |
73 | # link directories
74 | link_directories(
75 | include
76 | ${PCL_LIBRARY_DIRS}
77 | ${OpenCV_LIBRARY_DIRS}
78 | ${GTSAM_LIBRARY_DIRS}
79 | )
80 |
81 | ###########
82 | ## Build ##
83 | ###########
84 |
85 | # Range Image Projection
86 | add_executable(${PROJECT_NAME}_imageProjection src/imageProjection.cpp)
87 | add_dependencies(${PROJECT_NAME}_imageProjection ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
88 | target_link_libraries(${PROJECT_NAME}_imageProjection ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})
89 |
90 | # Feature Association
91 | add_executable(${PROJECT_NAME}_featureExtraction src/featureExtraction.cpp)
92 | add_dependencies(${PROJECT_NAME}_featureExtraction ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
93 | target_link_libraries(${PROJECT_NAME}_featureExtraction ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})
94 |
95 | # Mapping Optimization
96 | add_executable(${PROJECT_NAME}_mapOptmization src/mapOptmization.cpp)
97 | add_dependencies(${PROJECT_NAME}_mapOptmization ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
98 | target_compile_options(${PROJECT_NAME}_mapOptmization PRIVATE ${OpenMP_CXX_FLAGS})
99 | target_link_libraries(${PROJECT_NAME}_mapOptmization Boost::timer ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam)
100 |
101 | # IMU Preintegration
102 | add_executable(${PROJECT_NAME}_imuPreintegration src/imuPreintegration.cpp)
103 | target_link_libraries(${PROJECT_NAME}_imuPreintegration Boost::timer ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} gtsam)
104 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM osrf/ros:kinetic-desktop-full-xenial
2 |
3 | RUN apt-get update \
4 | && apt-get install -y curl \
5 | && curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | apt-key add - \
6 | && apt-get update \
7 | && apt-get install -y ros-kinetic-navigation \
8 | && apt-get install -y ros-kinetic-robot-localization \
9 | && apt-get install -y ros-kinetic-robot-state-publisher \
10 | && rm -rf /var/lib/apt/lists/*
11 |
12 | RUN apt-get update \
13 | && apt install -y software-properties-common \
14 | && add-apt-repository -y ppa:borglab/gtsam-release-4.0 \
15 | && apt-get update \
16 | && apt install -y libgtsam-dev libgtsam-unstable-dev \
17 | && rm -rf /var/lib/apt/lists/*
18 |
19 | SHELL ["/bin/bash", "-c"]
20 |
21 | RUN mkdir -p ~/catkin_ws/src \
22 | && cd ~/catkin_ws/src \
23 | && git clone https://github.com/TixiaoShan/LIO-SAM.git \
24 | && cd .. \
25 | && source /opt/ros/kinetic/setup.bash \
26 | && catkin_make
27 |
28 | RUN echo "source /opt/ros/kinetic/setup.bash" >> /root/.bashrc \
29 | && echo "source /root/catkin_ws/devel/setup.bash" >> /root/.bashrc
30 |
31 | WORKDIR /root/catkin_ws
32 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2020, Tixiao Shan
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # LIO-SAM
2 |
3 | **A real-time lidar-inertial odometry package. We strongly recommend the users read this document thoroughly and test the package with the provided dataset first. A video of the demonstration of the method can be found on [YouTube](https://www.youtube.com/watch?v=A0H8CoORZJU).**
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 | ## Menu
17 |
18 | - [**System architecture**](#system-architecture)
19 |
20 | - [**Package dependency**](#dependency)
21 |
22 | - [**Package install**](#install)
23 |
24 | - [**Prepare lidar data**](#prepare-lidar-data) (must read)
25 |
26 | - [**Prepare IMU data**](#prepare-imu-data) (must read)
27 |
28 | - [**Sample datasets**](#sample-datasets)
29 |
30 | - [**Run the package**](#run-the-package)
31 |
32 | - [**Other notes**](#other-notes)
33 |
34 | - [**Issues**](#issues)
35 |
36 | - [**Paper**](#paper)
37 |
38 | - [**TODO**](#todo)
39 |
40 | - [**Related Package**](#related-package)
41 |
42 | - [**Acknowledgement**](#acknowledgement)
43 |
44 | ## System architecture
45 |
46 |
47 |
48 |
49 |
50 | We design a system that maintains two graphs and runs up to 10x faster than real-time.
51 | - The factor graph in "mapOptimization.cpp" optimizes lidar odometry factor and GPS factor. This factor graph is maintained consistently throughout the whole test.
52 | - The factor graph in "imuPreintegration.cpp" optimizes IMU and lidar odometry factor and estimates IMU bias. This factor graph is reset periodically and guarantees real-time odometry estimation at IMU frequency.
53 |
54 | ## Dependency
55 |
56 | This is the original ROS1 implementation of LIO-SAM. For a ROS2 implementation see branch `ros2`.
57 |
58 | - [ROS](http://wiki.ros.org/ROS/Installation) (tested with Kinetic and Melodic. Refer to [#206](https://github.com/TixiaoShan/LIO-SAM/issues/206) for Noetic)
59 | ```
60 | sudo apt-get install -y ros-kinetic-navigation
61 | sudo apt-get install -y ros-kinetic-robot-localization
62 | sudo apt-get install -y ros-kinetic-robot-state-publisher
63 | ```
64 | - [gtsam](https://gtsam.org/get_started/) (Georgia Tech Smoothing and Mapping library)
65 | ```
66 | sudo add-apt-repository ppa:borglab/gtsam-release-4.0
67 | sudo apt install libgtsam-dev libgtsam-unstable-dev
68 | ```
69 |
70 | ## Install
71 |
72 | Use the following commands to download and compile the package.
73 |
74 | ```
75 | cd ~/catkin_ws/src
76 | git clone https://github.com/TixiaoShan/LIO-SAM.git
77 | cd ..
78 | catkin_make
79 | ```
80 |
81 | ## Using Docker
82 | Build image (based on ROS1 Kinetic):
83 |
84 | ```bash
85 | docker build -t liosam-kinetic-xenial .
86 | ```
87 |
88 | Once you have the image, start a container as follows:
89 |
90 | ```bash
91 | docker run --init -it -d \
92 | -v /etc/localtime:/etc/localtime:ro \
93 | -v /etc/timezone:/etc/timezone:ro \
94 | -v /tmp/.X11-unix:/tmp/.X11-unix \
95 | -e DISPLAY=$DISPLAY \
96 | liosam-kinetic-xenial \
97 | bash
98 | ```
99 |
100 |
101 | ## Prepare lidar data
102 |
103 | The user needs to prepare the point cloud data in the correct format for cloud deskewing, which is mainly done in "imageProjection.cpp". The two requirements are:
104 | - **Provide point time stamp**. LIO-SAM uses IMU data to perform point cloud deskew. Thus, the relative point time in a scan needs to be known. The up-to-date Velodyne ROS driver should output this information directly. Here, we assume the point time channel is called "time." The definition of the point type is located at the top of the "imageProjection.cpp." "deskewPoint()" function utilizes this relative time to obtain the transformation of this point relative to the beginning of the scan. When the lidar rotates at 10Hz, the timestamp of a point should vary between 0 and 0.1 seconds. If you are using other lidar sensors, you may need to change the name of this time channel and make sure that it is the relative time in a scan.
105 | - **Provide point ring number**. LIO-SAM uses this information to organize the point correctly in a matrix. The ring number indicates which channel of the sensor that this point belongs to. The definition of the point type is located at the top of "imageProjection.cpp." The up-to-date Velodyne ROS driver should output this information directly. Again, if you are using other lidar sensors, you may need to rename this information. Note that only mechanical lidars are supported by the package currently.
106 |
107 | ## Prepare IMU data
108 |
109 | - **IMU requirement**. Like the original LOAM implementation, LIO-SAM only works with a 9-axis IMU, which gives roll, pitch, and yaw estimation. The roll and pitch estimation is mainly used to initialize the system at the correct attitude. The yaw estimation initializes the system at the right heading when using GPS data. Theoretically, an initialization procedure like VINS-Mono will enable LIO-SAM to work with a 6-axis IMU. (**New**: [liorf](https://github.com/YJZLuckyBoy/liorf) has added support for 6-axis IMU.) The performance of the system largely depends on the quality of the IMU measurements. The higher the IMU data rate, the better the system accuracy. We use Microstrain 3DM-GX5-25, which outputs data at 500Hz. We recommend using an IMU that gives at least a 200Hz output rate. Note that the internal IMU of Ouster lidar is an 6-axis IMU.
110 |
111 | - **IMU alignment**. LIO-SAM transforms IMU raw data from the IMU frame to the Lidar frame, which follows the ROS REP-105 convention (x - forward, y - left, z - upward). To make the system function properly, the correct extrinsic transformation needs to be provided in "params.yaml" file. **The reason why there are two extrinsics is that my IMU (Microstrain 3DM-GX5-25) acceleration and attitude have different cooridinates. Depend on your IMU manufacturer, the two extrinsics for your IMU may or may not be the same**. Using our setup as an example:
112 | - we need to set the readings of x-z acceleration and gyro negative to transform the IMU data in the lidar frame, which is indicated by "extrinsicRot" in "params.yaml."
113 | - The transformation of attitude readings might be slightly different. IMU's attitude measurement `q_wb` usually means the rotation of points in the IMU coordinate system to the world coordinate system (e.g. ENU). However, the algorithm requires `q_wl`, the rotation from lidar to world. So we need a rotation from lidar to IMU `q_bl`, where `q_wl = q_wb * q_bl`. For convenience, the user only needs to provide `q_lb` as "extrinsicRPY" in "params.yaml" (same as the "extrinsicRot" if acceleration and attitude have the same coordinate).
114 |
115 | - **IMU debug**. It's strongly recommended that the user uncomment the debug lines in "imuHandler()" of "imageProjection.cpp" and test the output of the transformed IMU data. The user can rotate the sensor suite to check whether the readings correspond to the sensor's movement. A YouTube video that shows the corrected IMU data can be found [here (link to YouTube)](https://youtu.be/BOUK8LYQhHs).
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 | ## Sample datasets
126 |
127 | * Download some sample datasets to test the functionality of the package. The datasets below are configured to run using the default settings:
128 | - **Walking dataset:** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
129 | - **Park dataset:** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
130 | - **Garden dataset:** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
131 |
132 | * The datasets below need the parameters to be configured. In these datasets, the point cloud topic is "points_raw." The IMU topic is "imu_correct," which gives the IMU data in ROS REP105 standard. Because no IMU transformation is needed for this dataset, the following configurations need to be changed to run this dataset successfully:
133 | - The "imuTopic" parameter in "config/params.yaml" needs to be set to "imu_correct".
134 | - The "extrinsicRot" and "extrinsicRPY" in "config/params.yaml" needs to be set as identity matrices.
135 | - **Rotation dataset:** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
136 | - **Campus dataset (large):** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
137 | - **Campus dataset (small):** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
138 |
139 | * Ouster (OS1-128) dataset. No extrinsics need to be changed for this dataset if you are using the default settings. Please follow the Ouster notes below to configure the package to run with Ouster data. A video of the dataset can be found on [YouTube](https://youtu.be/O7fKgZQzkEo):
140 | - **Rooftop dataset:** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
141 |
142 | * Livox Horizon dataset. Please refer to the following notes section for paramater changes.
143 | - **Livox Horizon:** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
144 |
145 | * KITTI dataset. The extrinsics can be found in the Notes KITTI section below. To generate more bags using other KITTI raw data, you can use the python script provided in "config/doc/kitti2bag".
146 | - **2011_09_30_drive_0028:** [[Google Drive](https://drive.google.com/drive/folders/1gJHwfdHCRdjP7vuT556pv8atqrCJPbUq?usp=sharing)]
147 |
148 | ## Run the package
149 |
150 | 1. Run the launch file:
151 | ```
152 | roslaunch lio_sam run.launch
153 | ```
154 |
155 | 2. Play existing bag files:
156 | ```
157 | rosbag play your-bag.bag -r 3
158 | ```
159 |
160 | ## Other notes
161 |
162 | - **Loop closure:** The loop function here gives an example of proof of concept. It is directly adapted from LeGO-LOAM loop closure. For more advanced loop closure implementation, please refer to [ScanContext](https://github.com/irapkaist/SC-LeGO-LOAM). Set the "loopClosureEnableFlag" in "params.yaml" to "true" to test the loop closure function. In Rviz, uncheck "Map (cloud)" and check "Map (global)". This is because the visualized map - "Map (cloud)" - is simply a stack of point clouds in Rviz. Their postion will not be updated after pose correction. The loop closure function here is simply adapted from LeGO-LOAM, which is an ICP-based method. Because ICP runs pretty slow, it is suggested that the playback speed is set to be "-r 1". You can try the Garden dataset for testing.
163 |
164 |
165 |
166 |
167 |
168 |
169 | - **Using GPS:** The park dataset is provided for testing LIO-SAM with GPS data. This dataset is gathered by [Yewei Huang](https://robustfieldautonomylab.github.io/people.html). To enable the GPS function, change "gpsTopic" in "params.yaml" to "odometry/gps". In Rviz, uncheck "Map (cloud)" and check "Map (global)". Also check "Odom GPS", which visualizes the GPS odometry. "gpsCovThreshold" can be adjusted to filter bad GPS readings. "poseCovThreshold" can be used to adjust the frequency of adding GPS factor to the graph. For example, you will notice the trajectory is constantly corrected by GPS whey you set "poseCovThreshold" to 1.0. Because of the heavy iSAM optimization, it's recommended that the playback speed is "-r 1".
170 |
171 |
172 |
173 |
174 |
175 | - **KITTI:** Since LIO-SAM needs a high-frequency IMU for function properly, we need to use KITTI raw data for testing. One problem remains unsolved is that the intrinsics of the IMU are unknown, which has a big impact on the accuracy of LIO-SAM. Download the provided sample data and make the following changes in "params.yaml":
176 | - extrinsicTrans: [-8.086759e-01, 3.195559e-01, -7.997231e-01]
177 | - extrinsicRot: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]
178 | - extrinsicRPY: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]
179 | - N_SCAN: 64
180 | - downsampleRate: 2 or 4
181 | - loopClosureEnableFlag: true or false
182 |
183 |
184 |
185 |
186 |
187 |
188 | - **Ouster lidar:** To make LIO-SAM work with Ouster lidar, some preparations need to be done on hardware and software level.
189 | - Hardware:
190 | - Use an external IMU. LIO-SAM does not work with the internal 6-axis IMU of Ouster lidar. You need to attach a 9-axis IMU to the lidar and perform data-gathering.
191 | - Configure the driver. Change "timestamp_mode" in your Ouster launch file to "TIME_FROM_PTP_1588" so you can have ROS format timestamp for the point clouds.
192 | - Config:
193 | - Change "sensor" in "params.yaml" to "ouster".
194 | - Change "N_SCAN" and "Horizon_SCAN" in "params.yaml" according to your lidar, i.e., N_SCAN=128, Horizon_SCAN=1024.
195 | - Gen 1 and Gen 2 Ouster:
196 | It seems that the point coordinate definition might be different in different generations. Please refer to [Issue #94](https://github.com/TixiaoShan/LIO-SAM/issues/94) for debugging.
197 |
198 |
199 |
200 |
201 |
202 |
203 | - **Livox Horizon lidar:** Please note that solid-state lidar hasn't been extensively tested with LIO-SAM yet. An external IMU is also used here rather than the internal one. The support for such lidars is based on minimal change of the codebase from mechanical lidars. A customized [livox_ros_driver](https://github.com/TixiaoShan/livox_ros_driver) needs to be used to publish point cloud format that can be processed by LIO-SAM. Other SLAM solutions may offer better implementations. More studies and suggestions are welcome. Please change the following parameters to make LIO-SAM work with Livox Horizon lidar:
204 | - sensor: livox
205 | - N_SCAN: 6
206 | - Horizon_SCAN: 4000
207 | - edgeFeatureMinValidNum: 1
208 | - Use [livox_ros_driver](https://github.com/TixiaoShan/livox_ros_driver) for data recording
209 |
210 |
211 |
212 |
213 |
214 | ## Service
215 | - /lio_sam/save_map
216 | - save map as a PCD file.
217 | ``` bash
218 | rosservice call [service] [resolution] [destination]
219 | ```
220 | - Example:
221 | ``` bash
222 | $ rosservice call /lio_sam/save_map 0.2 "/Downloads/LOAM/"
223 | ```
224 |
225 | ## Issues
226 |
227 | - **Zigzag or jerking behavior**: if your lidar and IMU data formats are consistent with the requirement of LIO-SAM, this problem is likely caused by un-synced timestamp of lidar and IMU data.
228 |
229 | - **Jumpping up and down**: if you start testing your bag file and the base_link starts to jump up and down immediately, it is likely your IMU extrinsics are wrong. For example, the gravity acceleration has negative value.
230 |
231 | - **mapOptimization crash**: it is usually caused by GTSAM. Please install the GTSAM specified in the README.md. More similar issues can be found [here](https://github.com/TixiaoShan/LIO-SAM/issues).
232 |
233 | - **gps odometry unavailable**: it is generally caused due to unavailable transform between message frame_ids and robot frame_id (for example: transform should be available from "imu_frame_id" and "gps_frame_id" to "base_link" frame. Please read the Robot Localization documentation found [here](http://docs.ros.org/en/melodic/api/robot_localization/html/preparing_sensor_data.html).
234 |
235 | ## Paper
236 |
237 | Thank you for citing [LIO-SAM (IROS-2020)](./config/doc/paper.pdf) if you use any of this code.
238 | ```
239 | @inproceedings{liosam2020shan,
240 | title={LIO-SAM: Tightly-coupled Lidar Inertial Odometry via Smoothing and Mapping},
241 | author={Shan, Tixiao and Englot, Brendan and Meyers, Drew and Wang, Wei and Ratti, Carlo and Rus Daniela},
242 | booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
243 | pages={5135-5142},
244 | year={2020},
245 | organization={IEEE}
246 | }
247 | ```
248 |
249 | Part of the code is adapted from [LeGO-LOAM](https://github.com/RobustFieldAutonomyLab/LeGO-LOAM).
250 | ```
251 | @inproceedings{legoloam2018shan,
252 | title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},
253 | author={Shan, Tixiao and Englot, Brendan},
254 | booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
255 | pages={4758-4765},
256 | year={2018},
257 | organization={IEEE}
258 | }
259 | ```
260 |
261 | ## TODO
262 |
263 | - [ ] [Bug within imuPreintegration](https://github.com/TixiaoShan/LIO-SAM/issues/104)
264 |
265 | ## Related Package
266 |
267 | - [liorf](https://github.com/YJZLuckyBoy/liorf) LIO-SAM with 6-axis IMU and more lidar support.
268 | - [Lidar-IMU calibration](https://github.com/chennuo0125-HIT/lidar_imu_calib)
269 | - [LIO-SAM with Scan Context](https://github.com/gisbi-kim/SC-LIO-SAM)
270 | - [LIO-SAM with 6-axis IMU](https://github.com/JokerJohn/LIO_SAM_6AXIS)
271 |
272 | ## Acknowledgement
273 |
274 | - LIO-SAM is based on LOAM (J. Zhang and S. Singh. LOAM: Lidar Odometry and Mapping in Real-time).
275 |
--------------------------------------------------------------------------------
/config/doc/demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/demo.gif
--------------------------------------------------------------------------------
/config/doc/device-boat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/device-boat.png
--------------------------------------------------------------------------------
/config/doc/device-hand-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/device-hand-2.png
--------------------------------------------------------------------------------
/config/doc/device-hand.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/device-hand.png
--------------------------------------------------------------------------------
/config/doc/device-jackal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/device-jackal.png
--------------------------------------------------------------------------------
/config/doc/device-livox-horizon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/device-livox-horizon.png
--------------------------------------------------------------------------------
/config/doc/gps-demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/gps-demo.gif
--------------------------------------------------------------------------------
/config/doc/imu-debug.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/imu-debug.gif
--------------------------------------------------------------------------------
/config/doc/imu-transform.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/imu-transform.png
--------------------------------------------------------------------------------
/config/doc/kitti-demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/kitti-demo.gif
--------------------------------------------------------------------------------
/config/doc/kitti-map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/kitti-map.png
--------------------------------------------------------------------------------
/config/doc/kitti2bag/README.md:
--------------------------------------------------------------------------------
1 | # kitti2bag
2 |
3 | ## How to run it?
4 |
5 | ```bash
6 | wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_sync.zip
7 | wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_extract.zip
8 | wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_calib.zip
9 | unzip 2011_09_26_drive_0084_sync.zip
10 | unzip 2011_09_26_drive_0084_extract.zip
11 | unzip 2011_09_26_calib.zip
12 | python kitti2bag.py -t 2011_09_26 -r 0084 raw_synced .
13 | ```
14 |
15 | That's it. You have a bag that contains your data.
16 |
17 | Other source files can be found at [KITTI raw data](http://www.cvlibs.net/datasets/kitti/raw_data.php) page.
--------------------------------------------------------------------------------
/config/doc/kitti2bag/kitti2bag.py:
--------------------------------------------------------------------------------
1 | #!env python
2 | # -*- coding: utf-8 -*-
3 |
4 | import sys
5 |
6 | try:
7 | import pykitti
8 | except ImportError as e:
9 | print('Could not load module \'pykitti\'. Please run `pip install pykitti`')
10 | sys.exit(1)
11 |
12 | import tf
13 | import os
14 | import cv2
15 | import rospy
16 | import rosbag
17 | from tqdm import tqdm
18 | from tf2_msgs.msg import TFMessage
19 | from datetime import datetime
20 | from std_msgs.msg import Header
21 | from sensor_msgs.msg import CameraInfo, Imu, PointField, NavSatFix
22 | import sensor_msgs.point_cloud2 as pcl2
23 | from geometry_msgs.msg import TransformStamped, TwistStamped, Transform
24 | from cv_bridge import CvBridge
25 | import numpy as np
26 | import argparse
27 |
28 | def save_imu_data(bag, kitti, imu_frame_id, topic):
29 | print("Exporting IMU")
30 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
31 | q = tf.transformations.quaternion_from_euler(oxts.packet.roll, oxts.packet.pitch, oxts.packet.yaw)
32 | imu = Imu()
33 | imu.header.frame_id = imu_frame_id
34 | imu.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
35 | imu.orientation.x = q[0]
36 | imu.orientation.y = q[1]
37 | imu.orientation.z = q[2]
38 | imu.orientation.w = q[3]
39 | imu.linear_acceleration.x = oxts.packet.af
40 | imu.linear_acceleration.y = oxts.packet.al
41 | imu.linear_acceleration.z = oxts.packet.au
42 | imu.angular_velocity.x = oxts.packet.wf
43 | imu.angular_velocity.y = oxts.packet.wl
44 | imu.angular_velocity.z = oxts.packet.wu
45 | bag.write(topic, imu, t=imu.header.stamp)
46 |
47 | def save_imu_data_raw(bag, kitti, imu_frame_id, topic):
48 | print("Exporting IMU Raw")
49 | synced_path = kitti.data_path
50 | unsynced_path = synced_path.replace('sync', 'extract')
51 | imu_path = os.path.join(unsynced_path, 'oxts')
52 |
53 | # read time stamp (convert to ros seconds format)
54 | with open(os.path.join(imu_path, 'timestamps.txt')) as f:
55 | lines = f.readlines()
56 | imu_datetimes = []
57 | for line in lines:
58 | if len(line) == 1:
59 | continue
60 | timestamp = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
61 | imu_datetimes.append(float(timestamp.strftime("%s.%f")))
62 |
63 | # fix imu time using a linear model (may not be ideal, ^_^)
64 | imu_index = np.asarray(range(len(imu_datetimes)), dtype=np.float64)
65 | z = np.polyfit(imu_index, imu_datetimes, 1)
66 | imu_datetimes_new = z[0] * imu_index + z[1]
67 | imu_datetimes = imu_datetimes_new.tolist()
68 |
69 | # get all imu data
70 | imu_data_dir = os.path.join(imu_path, 'data')
71 | imu_filenames = sorted(os.listdir(imu_data_dir))
72 | imu_data = [None] * len(imu_filenames)
73 | for i, imu_file in enumerate(imu_filenames):
74 | imu_data_file = open(os.path.join(imu_data_dir, imu_file), "r")
75 | for line in imu_data_file:
76 | if len(line) == 1:
77 | continue
78 | stripped_line = line.strip()
79 | line_list = stripped_line.split()
80 | imu_data[i] = line_list
81 |
82 | assert len(imu_datetimes) == len(imu_data)
83 |
84 | for timestamp, data in zip(imu_datetimes, imu_data):
85 | roll, pitch, yaw = float(data[3]), float(data[4]), float(data[5]),
86 | q = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
87 | imu = Imu()
88 | imu.header.frame_id = imu_frame_id
89 | imu.header.stamp = rospy.Time.from_sec(timestamp)
90 | imu.orientation.x = q[0]
91 | imu.orientation.y = q[1]
92 | imu.orientation.z = q[2]
93 | imu.orientation.w = q[3]
94 | imu.linear_acceleration.x = float(data[11])
95 | imu.linear_acceleration.y = float(data[12])
96 | imu.linear_acceleration.z = float(data[13])
97 | imu.angular_velocity.x = float(data[17])
98 | imu.angular_velocity.y = float(data[18])
99 | imu.angular_velocity.z = float(data[19])
100 | bag.write(topic, imu, t=imu.header.stamp)
101 |
102 | imu.header.frame_id = 'imu_enu_link'
103 | bag.write('/imu_correct', imu, t=imu.header.stamp) # for LIO-SAM GPS
104 |
105 | def save_dynamic_tf(bag, kitti, kitti_type, initial_time):
106 | print("Exporting time dependent transformations")
107 | if kitti_type.find("raw") != -1:
108 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
109 | tf_oxts_msg = TFMessage()
110 | tf_oxts_transform = TransformStamped()
111 | tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
112 | tf_oxts_transform.header.frame_id = 'world'
113 | tf_oxts_transform.child_frame_id = 'base_link'
114 |
115 | transform = (oxts.T_w_imu)
116 | t = transform[0:3, 3]
117 | q = tf.transformations.quaternion_from_matrix(transform)
118 | oxts_tf = Transform()
119 |
120 | oxts_tf.translation.x = t[0]
121 | oxts_tf.translation.y = t[1]
122 | oxts_tf.translation.z = t[2]
123 |
124 | oxts_tf.rotation.x = q[0]
125 | oxts_tf.rotation.y = q[1]
126 | oxts_tf.rotation.z = q[2]
127 | oxts_tf.rotation.w = q[3]
128 |
129 | tf_oxts_transform.transform = oxts_tf
130 | tf_oxts_msg.transforms.append(tf_oxts_transform)
131 |
132 | bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp)
133 |
134 | elif kitti_type.find("odom") != -1:
135 | timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)
136 | for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0):
137 | tf_msg = TFMessage()
138 | tf_stamped = TransformStamped()
139 | tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)
140 | tf_stamped.header.frame_id = 'world'
141 | tf_stamped.child_frame_id = 'camera_left'
142 |
143 | t = tf_matrix[0:3, 3]
144 | q = tf.transformations.quaternion_from_matrix(tf_matrix)
145 | transform = Transform()
146 |
147 | transform.translation.x = t[0]
148 | transform.translation.y = t[1]
149 | transform.translation.z = t[2]
150 |
151 | transform.rotation.x = q[0]
152 | transform.rotation.y = q[1]
153 | transform.rotation.z = q[2]
154 | transform.rotation.w = q[3]
155 |
156 | tf_stamped.transform = transform
157 | tf_msg.transforms.append(tf_stamped)
158 |
159 | bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)
160 |
161 | def save_camera_data(bag, kitti_type, kitti, util, bridge, camera, camera_frame_id, topic, initial_time):
162 | print("Exporting camera {}".format(camera))
163 | if kitti_type.find("raw") != -1:
164 | camera_pad = '{0:02d}'.format(camera)
165 | image_dir = os.path.join(kitti.data_path, 'image_{}'.format(camera_pad))
166 | image_path = os.path.join(image_dir, 'data')
167 | image_filenames = sorted(os.listdir(image_path))
168 | with open(os.path.join(image_dir, 'timestamps.txt')) as f:
169 | image_datetimes = map(lambda x: datetime.strptime(x[:-4], '%Y-%m-%d %H:%M:%S.%f'), f.readlines())
170 |
171 | calib = CameraInfo()
172 | calib.header.frame_id = camera_frame_id
173 | calib.width, calib.height = tuple(util['S_rect_{}'.format(camera_pad)].tolist())
174 | calib.distortion_model = 'plumb_bob'
175 | calib.K = util['K_{}'.format(camera_pad)]
176 | calib.R = util['R_rect_{}'.format(camera_pad)]
177 | calib.D = util['D_{}'.format(camera_pad)]
178 | calib.P = util['P_rect_{}'.format(camera_pad)]
179 |
180 | elif kitti_type.find("odom") != -1:
181 | camera_pad = '{0:01d}'.format(camera)
182 | image_path = os.path.join(kitti.sequence_path, 'image_{}'.format(camera_pad))
183 | image_filenames = sorted(os.listdir(image_path))
184 | image_datetimes = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)
185 |
186 | calib = CameraInfo()
187 | calib.header.frame_id = camera_frame_id
188 | calib.P = util['P{}'.format(camera_pad)]
189 |
190 | iterable = zip(image_datetimes, image_filenames)
191 | for dt, filename in tqdm(iterable, total=len(image_filenames)):
192 | image_filename = os.path.join(image_path, filename)
193 | cv_image = cv2.imread(image_filename)
194 | calib.height, calib.width = cv_image.shape[:2]
195 | if camera in (0, 1):
196 | cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY)
197 | encoding = "mono8" if camera in (0, 1) else "bgr8"
198 | image_message = bridge.cv2_to_imgmsg(cv_image, encoding=encoding)
199 | image_message.header.frame_id = camera_frame_id
200 | if kitti_type.find("raw") != -1:
201 | image_message.header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f")))
202 | topic_ext = "/image_raw"
203 | elif kitti_type.find("odom") != -1:
204 | image_message.header.stamp = rospy.Time.from_sec(dt)
205 | topic_ext = "/image_rect"
206 | calib.header.stamp = image_message.header.stamp
207 | bag.write(topic + topic_ext, image_message, t = image_message.header.stamp)
208 | bag.write(topic + '/camera_info', calib, t = calib.header.stamp)
209 |
210 | def save_velo_data(bag, kitti, velo_frame_id, topic):
211 | print("Exporting velodyne data")
212 | velo_path = os.path.join(kitti.data_path, 'velodyne_points')
213 | velo_data_dir = os.path.join(velo_path, 'data')
214 | velo_filenames = sorted(os.listdir(velo_data_dir))
215 | with open(os.path.join(velo_path, 'timestamps.txt')) as f:
216 | lines = f.readlines()
217 | velo_datetimes = []
218 | for line in lines:
219 | if len(line) == 1:
220 | continue
221 | dt = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
222 | velo_datetimes.append(dt)
223 |
224 | iterable = zip(velo_datetimes, velo_filenames)
225 |
226 | count = 0
227 |
228 | for dt, filename in tqdm(iterable, total=len(velo_filenames)):
229 | if dt is None:
230 | continue
231 |
232 | velo_filename = os.path.join(velo_data_dir, filename)
233 |
234 | # read binary data
235 | scan = (np.fromfile(velo_filename, dtype=np.float32)).reshape(-1, 4)
236 |
237 | # get ring channel
238 | depth = np.linalg.norm(scan, 2, axis=1)
239 | pitch = np.arcsin(scan[:, 2] / depth) # arcsin(z, depth)
240 | fov_down = -24.8 / 180.0 * np.pi
241 | fov = (abs(-24.8) + abs(2.0)) / 180.0 * np.pi
242 | proj_y = (pitch + abs(fov_down)) / fov # in [0.0, 1.0]
243 | proj_y *= 64 # in [0.0, H]
244 | proj_y = np.floor(proj_y)
245 | proj_y = np.minimum(64 - 1, proj_y)
246 | proj_y = np.maximum(0, proj_y).astype(np.int32) # in [0,H-1]
247 | proj_y = proj_y.reshape(-1, 1)
248 | scan = np.concatenate((scan,proj_y), axis=1)
249 | scan = scan.tolist()
250 | for i in range(len(scan)):
251 | scan[i][-1] = int(scan[i][-1])
252 |
253 | # create header
254 | header = Header()
255 | header.frame_id = velo_frame_id
256 | header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f")))
257 |
258 | # fill pcl msg
259 | fields = [PointField('x', 0, PointField.FLOAT32, 1),
260 | PointField('y', 4, PointField.FLOAT32, 1),
261 | PointField('z', 8, PointField.FLOAT32, 1),
262 | PointField('intensity', 12, PointField.FLOAT32, 1),
263 | PointField('ring', 16, PointField.UINT16, 1)]
264 | pcl_msg = pcl2.create_cloud(header, fields, scan)
265 | pcl_msg.is_dense = True
266 | # print(pcl_msg)
267 |
268 | bag.write(topic, pcl_msg, t=pcl_msg.header.stamp)
269 |
270 | # count += 1
271 | # if count > 200:
272 | # break
273 |
274 | def get_static_transform(from_frame_id, to_frame_id, transform):
275 | t = transform[0:3, 3]
276 | q = tf.transformations.quaternion_from_matrix(transform)
277 | tf_msg = TransformStamped()
278 | tf_msg.header.frame_id = from_frame_id
279 | tf_msg.child_frame_id = to_frame_id
280 | tf_msg.transform.translation.x = float(t[0])
281 | tf_msg.transform.translation.y = float(t[1])
282 | tf_msg.transform.translation.z = float(t[2])
283 | tf_msg.transform.rotation.x = float(q[0])
284 | tf_msg.transform.rotation.y = float(q[1])
285 | tf_msg.transform.rotation.z = float(q[2])
286 | tf_msg.transform.rotation.w = float(q[3])
287 | return tf_msg
288 |
289 |
290 | def inv(transform):
291 | "Invert rigid body transformation matrix"
292 | R = transform[0:3, 0:3]
293 | t = transform[0:3, 3]
294 | t_inv = -1 * R.T.dot(t)
295 | transform_inv = np.eye(4)
296 | transform_inv[0:3, 0:3] = R.T
297 | transform_inv[0:3, 3] = t_inv
298 | return transform_inv
299 |
300 |
301 | def save_static_transforms(bag, transforms, timestamps):
302 | print("Exporting static transformations")
303 | tfm = TFMessage()
304 | for transform in transforms:
305 | t = get_static_transform(from_frame_id=transform[0], to_frame_id=transform[1], transform=transform[2])
306 | tfm.transforms.append(t)
307 | for timestamp in timestamps:
308 | time = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
309 | for i in range(len(tfm.transforms)):
310 | tfm.transforms[i].header.stamp = time
311 | bag.write('/tf_static', tfm, t=time)
312 |
313 |
314 | def save_gps_fix_data(bag, kitti, gps_frame_id, topic):
315 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
316 | navsatfix_msg = NavSatFix()
317 | navsatfix_msg.header.frame_id = gps_frame_id
318 | navsatfix_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
319 | navsatfix_msg.latitude = oxts.packet.lat
320 | navsatfix_msg.longitude = oxts.packet.lon
321 | navsatfix_msg.altitude = oxts.packet.alt
322 | navsatfix_msg.status.service = 1
323 | bag.write(topic, navsatfix_msg, t=navsatfix_msg.header.stamp)
324 |
325 |
326 | def save_gps_vel_data(bag, kitti, gps_frame_id, topic):
327 | for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
328 | twist_msg = TwistStamped()
329 | twist_msg.header.frame_id = gps_frame_id
330 | twist_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
331 | twist_msg.twist.linear.x = oxts.packet.vf
332 | twist_msg.twist.linear.y = oxts.packet.vl
333 | twist_msg.twist.linear.z = oxts.packet.vu
334 | twist_msg.twist.angular.x = oxts.packet.wf
335 | twist_msg.twist.angular.y = oxts.packet.wl
336 | twist_msg.twist.angular.z = oxts.packet.wu
337 | bag.write(topic, twist_msg, t=twist_msg.header.stamp)
338 |
339 |
340 | if __name__ == "__main__":
341 | parser = argparse.ArgumentParser(description = "Convert KITTI dataset to ROS bag file the easy way!")
342 | # Accepted argument values
343 | kitti_types = ["raw_synced", "odom_color", "odom_gray"]
344 | odometry_sequences = []
345 | for s in range(22):
346 | odometry_sequences.append(str(s).zfill(2))
347 |
348 | parser.add_argument("kitti_type", choices = kitti_types, help = "KITTI dataset type")
349 | parser.add_argument("dir", nargs = "?", default = os.getcwd(), help = "base directory of the dataset, if no directory passed the deafult is current working directory")
350 | parser.add_argument("-t", "--date", help = "date of the raw dataset (i.e. 2011_09_26), option is only for RAW datasets.")
351 | parser.add_argument("-r", "--drive", help = "drive number of the raw dataset (i.e. 0001), option is only for RAW datasets.")
352 | parser.add_argument("-s", "--sequence", choices = odometry_sequences,help = "sequence of the odometry dataset (between 00 - 21), option is only for ODOMETRY datasets.")
353 | args = parser.parse_args()
354 |
355 | bridge = CvBridge()
356 | compression = rosbag.Compression.NONE
357 | # compression = rosbag.Compression.BZ2
358 | # compression = rosbag.Compression.LZ4
359 |
360 | # CAMERAS
361 | cameras = [
362 | (0, 'camera_gray_left', '/kitti/camera_gray_left'),
363 | (1, 'camera_gray_right', '/kitti/camera_gray_right'),
364 | (2, 'camera_color_left', '/kitti/camera_color_left'),
365 | (3, 'camera_color_right', '/kitti/camera_color_right')
366 | ]
367 |
368 | if args.kitti_type.find("raw") != -1:
369 |
370 | if args.date == None:
371 | print("Date option is not given. It is mandatory for raw dataset.")
372 | print("Usage for raw dataset: kitti2bag raw_synced [dir] -t -r ")
373 | sys.exit(1)
374 | elif args.drive == None:
375 | print("Drive option is not given. It is mandatory for raw dataset.")
376 | print("Usage for raw dataset: kitti2bag raw_synced [dir] -t -r ")
377 | sys.exit(1)
378 |
379 | bag = rosbag.Bag("kitti_{}_drive_{}_{}.bag".format(args.date, args.drive, args.kitti_type[4:]), 'w', compression=compression)
380 | kitti = pykitti.raw(args.dir, args.date, args.drive)
381 | if not os.path.exists(kitti.data_path):
382 | print('Path {} does not exists. Exiting.'.format(kitti.data_path))
383 | sys.exit(1)
384 |
385 | if len(kitti.timestamps) == 0:
386 | print('Dataset is empty? Exiting.')
387 | sys.exit(1)
388 |
389 | try:
390 | # IMU
391 | imu_frame_id = 'imu_link'
392 | imu_topic = '/kitti/oxts/imu'
393 | imu_raw_topic = '/imu_raw'
394 | gps_fix_topic = '/gps/fix'
395 | gps_vel_topic = '/gps/vel'
396 | velo_frame_id = 'velodyne'
397 | velo_topic = '/points_raw'
398 |
399 | T_base_link_to_imu = np.eye(4, 4)
400 | T_base_link_to_imu[0:3, 3] = [-2.71/2.0-0.05, 0.32, 0.93]
401 |
402 | # tf_static
403 | transforms = [
404 | ('base_link', imu_frame_id, T_base_link_to_imu),
405 | (imu_frame_id, velo_frame_id, inv(kitti.calib.T_velo_imu)),
406 | (imu_frame_id, cameras[0][1], inv(kitti.calib.T_cam0_imu)),
407 | (imu_frame_id, cameras[1][1], inv(kitti.calib.T_cam1_imu)),
408 | (imu_frame_id, cameras[2][1], inv(kitti.calib.T_cam2_imu)),
409 | (imu_frame_id, cameras[3][1], inv(kitti.calib.T_cam3_imu))
410 | ]
411 |
412 | util = pykitti.utils.read_calib_file(os.path.join(kitti.calib_path, 'calib_cam_to_cam.txt'))
413 |
414 | # Export
415 | # save_static_transforms(bag, transforms, kitti.timestamps)
416 | # save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=None)
417 | # save_imu_data(bag, kitti, imu_frame_id, imu_topic)
418 | save_imu_data_raw(bag, kitti, imu_frame_id, imu_raw_topic)
419 | save_gps_fix_data(bag, kitti, imu_frame_id, gps_fix_topic)
420 | save_gps_vel_data(bag, kitti, imu_frame_id, gps_vel_topic)
421 | for camera in cameras:
422 | save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=None)
423 | break
424 | save_velo_data(bag, kitti, velo_frame_id, velo_topic)
425 |
426 | finally:
427 | print("## OVERVIEW ##")
428 | print(bag)
429 | bag.close()
430 |
431 | elif args.kitti_type.find("odom") != -1:
432 |
433 | if args.sequence == None:
434 | print("Sequence option is not given. It is mandatory for odometry dataset.")
435 | print("Usage for odometry dataset: kitti2bag {odom_color, odom_gray} [dir] -s ")
436 | sys.exit(1)
437 |
438 | bag = rosbag.Bag("kitti_data_odometry_{}_sequence_{}.bag".format(args.kitti_type[5:],args.sequence), 'w', compression=compression)
439 |
440 | kitti = pykitti.odometry(args.dir, args.sequence)
441 | if not os.path.exists(kitti.sequence_path):
442 | print('Path {} does not exists. Exiting.'.format(kitti.sequence_path))
443 | sys.exit(1)
444 |
445 | kitti.load_calib()
446 | kitti.load_timestamps()
447 |
448 | if len(kitti.timestamps) == 0:
449 | print('Dataset is empty? Exiting.')
450 | sys.exit(1)
451 |
452 | if args.sequence in odometry_sequences[:11]:
453 | print("Odometry dataset sequence {} has ground truth information (poses).".format(args.sequence))
454 | kitti.load_poses()
455 |
456 | try:
457 | util = pykitti.utils.read_calib_file(os.path.join(args.dir,'sequences',args.sequence, 'calib.txt'))
458 | current_epoch = (datetime.utcnow() - datetime(1970, 1, 1)).total_seconds()
459 | # Export
460 | if args.kitti_type.find("gray") != -1:
461 | used_cameras = cameras[:2]
462 | elif args.kitti_type.find("color") != -1:
463 | used_cameras = cameras[-2:]
464 |
465 | save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=current_epoch)
466 | for camera in used_cameras:
467 | save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=current_epoch)
468 |
469 | finally:
470 | print("## OVERVIEW ##")
471 | print(bag)
472 | bag.close()
473 |
474 |
--------------------------------------------------------------------------------
/config/doc/livox-demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/livox-demo.gif
--------------------------------------------------------------------------------
/config/doc/loop-closure-2.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/loop-closure-2.gif
--------------------------------------------------------------------------------
/config/doc/loop-closure.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/loop-closure.gif
--------------------------------------------------------------------------------
/config/doc/ouster-demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/ouster-demo.gif
--------------------------------------------------------------------------------
/config/doc/ouster-device.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/ouster-device.jpg
--------------------------------------------------------------------------------
/config/doc/paper.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/paper.pdf
--------------------------------------------------------------------------------
/config/doc/system.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TixiaoShan/LIO-SAM/0be1fbe6275fb8366d5b800af4fc8c76a885c869/config/doc/system.png
--------------------------------------------------------------------------------
/config/params.yaml:
--------------------------------------------------------------------------------
1 | lio_sam:
2 |
3 | # Topics
4 | pointCloudTopic: "points_raw" # Point cloud data
5 | imuTopic: "imu_raw" # IMU data
6 | odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU
7 | gpsTopic: "odometry/gpsz" # GPS odometry topic from navsat, see module_navsat.launch file
8 |
9 | # Frames
10 | lidarFrame: "base_link"
11 | baselinkFrame: "base_link"
12 | odometryFrame: "odom"
13 | mapFrame: "map"
14 |
15 | # GPS Settings
16 | useImuHeadingInitialization: true # if using GPS data, set to "true"
17 | useGpsElevation: false # if GPS elevation is bad, set to "false"
18 | gpsCovThreshold: 2.0 # m^2, threshold for using GPS data
19 | poseCovThreshold: 25.0 # m^2, threshold for using GPS data
20 |
21 | # Export settings
22 | savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3
23 | savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation
24 |
25 | # Sensor Settings
26 | sensor: velodyne # lidar sensor type, 'velodyne' or 'ouster' or 'livox'
27 | N_SCAN: 16 # number of lidar channel (i.e., Velodyne/Ouster: 16, 32, 64, 128, Livox Horizon: 6)
28 | Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048, Livox Horizon: 4000)
29 | downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1
30 | lidarMinRange: 1.0 # default: 1.0, minimum lidar range to be used
31 | lidarMaxRange: 1000.0 # default: 1000.0, maximum lidar range to be used
32 |
33 | # IMU Settings
34 | imuAccNoise: 3.9939570888238808e-03
35 | imuGyrNoise: 1.5636343949698187e-03
36 | imuAccBiasN: 6.4356659353532566e-05
37 | imuGyrBiasN: 3.5640318696367613e-05
38 | imuGravity: 9.80511
39 | imuRPYWeight: 0.01
40 |
41 | # Extrinsics: T_lb (lidar -> imu)
42 | extrinsicTrans: [0.0, 0.0, 0.0]
43 | extrinsicRot: [-1, 0, 0,
44 | 0, 1, 0,
45 | 0, 0, -1]
46 | extrinsicRPY: [0, -1, 0,
47 | 1, 0, 0,
48 | 0, 0, 1]
49 | # extrinsicRot: [1, 0, 0,
50 | # 0, 1, 0,
51 | # 0, 0, 1]
52 | # extrinsicRPY: [1, 0, 0,
53 | # 0, 1, 0,
54 | # 0, 0, 1]
55 |
56 | # LOAM feature threshold
57 | edgeThreshold: 1.0
58 | surfThreshold: 0.1
59 | edgeFeatureMinValidNum: 10
60 | surfFeatureMinValidNum: 100
61 |
62 | # voxel filter paprams
63 | odometrySurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor
64 | mappingCornerLeafSize: 0.2 # default: 0.2 - outdoor, 0.1 - indoor
65 | mappingSurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor
66 |
67 | # robot motion constraint (in case you are using a 2D robot)
68 | z_tollerance: 1000 # meters
69 | rotation_tollerance: 1000 # radians
70 |
71 | # CPU Params
72 | numberOfCores: 4 # number of cores for mapping optimization
73 | mappingProcessInterval: 0.15 # seconds, regulate mapping frequency
74 |
75 | # Surrounding map
76 | surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold
77 | surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold
78 | surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses
79 | surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled)
80 |
81 | # Loop closure
82 | loopClosureEnableFlag: true
83 | loopClosureFrequency: 1.0 # Hz, regulate loop closure constraint add frequency
84 | surroundingKeyframeSize: 50 # submap size (when loop closure enabled)
85 | historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure
86 | historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure
87 | historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure
88 | historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment
89 |
90 | # Visualization
91 | globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius
92 | globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density
93 | globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density
94 |
95 |
96 |
97 |
98 | # Navsat (convert GPS coordinates to Cartesian)
99 | navsat:
100 | frequency: 50
101 | wait_for_datum: false
102 | delay: 0.0
103 | magnetic_declination_radians: 0
104 | yaw_offset: 0
105 | zero_altitude: true
106 | broadcast_utm_transform: false
107 | broadcast_utm_transform_as_parent_frame: false
108 | publish_filtered_gps: false
109 |
110 | # EKF for Navsat
111 | ekf_gps:
112 | publish_tf: false
113 | map_frame: map
114 | odom_frame: odom
115 | base_link_frame: base_link
116 | world_frame: odom
117 |
118 | frequency: 50
119 | two_d_mode: false
120 | sensor_timeout: 0.01
121 | # -------------------------------------
122 | # External IMU:
123 | # -------------------------------------
124 | imu0: imu_correct
125 | # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link
126 | imu0_config: [false, false, false,
127 | true, true, true,
128 | false, false, false,
129 | false, false, true,
130 | true, true, true]
131 | imu0_differential: false
132 | imu0_queue_size: 50
133 | imu0_remove_gravitational_acceleration: true
134 | # -------------------------------------
135 | # Odometry (From Navsat):
136 | # -------------------------------------
137 | odom0: odometry/gps
138 | odom0_config: [true, true, true,
139 | false, false, false,
140 | false, false, false,
141 | false, false, false,
142 | false, false, false]
143 | odom0_differential: false
144 | odom0_queue_size: 10
145 |
146 | # x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot
147 | process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
148 | 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
149 | 0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
150 | 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
151 | 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
152 | 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
153 | 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0,
154 | 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0,
155 | 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0,
156 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0,
157 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0,
158 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0,
159 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0,
160 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0,
161 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015]
162 |
--------------------------------------------------------------------------------
/include/utility.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #ifndef _UTILITY_LIDAR_ODOMETRY_H_
3 | #define _UTILITY_LIDAR_ODOMETRY_H_
4 | #define PCL_NO_PRECOMPILE
5 |
6 | #include
7 |
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include
16 | #include
17 |
18 | #include
19 |
20 | #include
21 | #include
22 | #include
23 | #include
24 | #include
25 | #include
26 | #include
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 |
34 | #include
35 | #include
36 | #include
37 | #include
38 |
39 | #include
40 | #include
41 | #include
42 | #include
43 | #include
44 | #include
45 | #include
46 | #include
47 | #include
48 | #include
49 | #include
50 | #include
51 | #include
52 | #include
53 | #include
54 | #include
55 | #include
56 |
57 | using namespace std;
58 |
59 | typedef pcl::PointXYZI PointType;
60 |
61 | enum class SensorType { VELODYNE, OUSTER, LIVOX };
62 |
63 | class ParamServer
64 | {
65 | public:
66 |
67 | ros::NodeHandle nh;
68 |
69 | std::string robot_id;
70 |
71 | //Topics
72 | string pointCloudTopic;
73 | string imuTopic;
74 | string odomTopic;
75 | string gpsTopic;
76 |
77 | //Frames
78 | string lidarFrame;
79 | string baselinkFrame;
80 | string odometryFrame;
81 | string mapFrame;
82 |
83 | // GPS Settings
84 | bool useImuHeadingInitialization;
85 | bool useGpsElevation;
86 | float gpsCovThreshold;
87 | float poseCovThreshold;
88 |
89 | // Save pcd
90 | bool savePCD;
91 | string savePCDDirectory;
92 |
93 | // Lidar Sensor Configuration
94 | SensorType sensor;
95 | int N_SCAN;
96 | int Horizon_SCAN;
97 | int downsampleRate;
98 | float lidarMinRange;
99 | float lidarMaxRange;
100 |
101 | // IMU
102 | float imuAccNoise;
103 | float imuGyrNoise;
104 | float imuAccBiasN;
105 | float imuGyrBiasN;
106 | float imuGravity;
107 | float imuRPYWeight;
108 | vector extRotV;
109 | vector extRPYV;
110 | vector extTransV;
111 | Eigen::Matrix3d extRot;
112 | Eigen::Matrix3d extRPY;
113 | Eigen::Vector3d extTrans;
114 | Eigen::Quaterniond extQRPY;
115 |
116 | // LOAM
117 | float edgeThreshold;
118 | float surfThreshold;
119 | int edgeFeatureMinValidNum;
120 | int surfFeatureMinValidNum;
121 |
122 | // voxel filter paprams
123 | float odometrySurfLeafSize;
124 | float mappingCornerLeafSize;
125 | float mappingSurfLeafSize ;
126 |
127 | float z_tollerance;
128 | float rotation_tollerance;
129 |
130 | // CPU Params
131 | int numberOfCores;
132 | double mappingProcessInterval;
133 |
134 | // Surrounding map
135 | float surroundingkeyframeAddingDistThreshold;
136 | float surroundingkeyframeAddingAngleThreshold;
137 | float surroundingKeyframeDensity;
138 | float surroundingKeyframeSearchRadius;
139 |
140 | // Loop closure
141 | bool loopClosureEnableFlag;
142 | float loopClosureFrequency;
143 | int surroundingKeyframeSize;
144 | float historyKeyframeSearchRadius;
145 | float historyKeyframeSearchTimeDiff;
146 | int historyKeyframeSearchNum;
147 | float historyKeyframeFitnessScore;
148 |
149 | // global map visualization radius
150 | float globalMapVisualizationSearchRadius;
151 | float globalMapVisualizationPoseDensity;
152 | float globalMapVisualizationLeafSize;
153 |
154 | ParamServer()
155 | {
156 | nh.param("/robot_id", robot_id, "roboat");
157 |
158 | nh.param("lio_sam/pointCloudTopic", pointCloudTopic, "points_raw");
159 | nh.param("lio_sam/imuTopic", imuTopic, "imu_correct");
160 | nh.param("lio_sam/odomTopic", odomTopic, "odometry/imu");
161 | nh.param("lio_sam/gpsTopic", gpsTopic, "odometry/gps");
162 |
163 | nh.param("lio_sam/lidarFrame", lidarFrame, "base_link");
164 | nh.param("lio_sam/baselinkFrame", baselinkFrame, "base_link");
165 | nh.param("lio_sam/odometryFrame", odometryFrame, "odom");
166 | nh.param("lio_sam/mapFrame", mapFrame, "map");
167 |
168 | nh.param("lio_sam/useImuHeadingInitialization", useImuHeadingInitialization, false);
169 | nh.param("lio_sam/useGpsElevation", useGpsElevation, false);
170 | nh.param("lio_sam/gpsCovThreshold", gpsCovThreshold, 2.0);
171 | nh.param("lio_sam/poseCovThreshold", poseCovThreshold, 25.0);
172 |
173 | nh.param("lio_sam/savePCD", savePCD, false);
174 | nh.param("lio_sam/savePCDDirectory", savePCDDirectory, "/Downloads/LOAM/");
175 |
176 | std::string sensorStr;
177 | nh.param("lio_sam/sensor", sensorStr, "");
178 | if (sensorStr == "velodyne")
179 | {
180 | sensor = SensorType::VELODYNE;
181 | }
182 | else if (sensorStr == "ouster")
183 | {
184 | sensor = SensorType::OUSTER;
185 | }
186 | else if (sensorStr == "livox")
187 | {
188 | sensor = SensorType::LIVOX;
189 | }
190 | else
191 | {
192 | ROS_ERROR_STREAM(
193 | "Invalid sensor type (must be either 'velodyne' or 'ouster' or 'livox'): " << sensorStr);
194 | ros::shutdown();
195 | }
196 |
197 | nh.param("lio_sam/N_SCAN", N_SCAN, 16);
198 | nh.param("lio_sam/Horizon_SCAN", Horizon_SCAN, 1800);
199 | nh.param("lio_sam/downsampleRate", downsampleRate, 1);
200 | nh.param("lio_sam/lidarMinRange", lidarMinRange, 1.0);
201 | nh.param("lio_sam/lidarMaxRange", lidarMaxRange, 1000.0);
202 |
203 | nh.param("lio_sam/imuAccNoise", imuAccNoise, 0.01);
204 | nh.param("lio_sam/imuGyrNoise", imuGyrNoise, 0.001);
205 | nh.param("lio_sam/imuAccBiasN", imuAccBiasN, 0.0002);
206 | nh.param("lio_sam/imuGyrBiasN", imuGyrBiasN, 0.00003);
207 | nh.param("lio_sam/imuGravity", imuGravity, 9.80511);
208 | nh.param("lio_sam/imuRPYWeight", imuRPYWeight, 0.01);
209 | nh.param>("lio_sam/extrinsicRot", extRotV, vector());
210 | nh.param>("lio_sam/extrinsicRPY", extRPYV, vector());
211 | nh.param>("lio_sam/extrinsicTrans", extTransV, vector());
212 | extRot = Eigen::Map>(extRotV.data(), 3, 3);
213 | extRPY = Eigen::Map>(extRPYV.data(), 3, 3);
214 | extTrans = Eigen::Map>(extTransV.data(), 3, 1);
215 | extQRPY = Eigen::Quaterniond(extRPY).inverse();
216 |
217 | nh.param("lio_sam/edgeThreshold", edgeThreshold, 0.1);
218 | nh.param("lio_sam/surfThreshold", surfThreshold, 0.1);
219 | nh.param("lio_sam/edgeFeatureMinValidNum", edgeFeatureMinValidNum, 10);
220 | nh.param("lio_sam/surfFeatureMinValidNum", surfFeatureMinValidNum, 100);
221 |
222 | nh.param("lio_sam/odometrySurfLeafSize", odometrySurfLeafSize, 0.2);
223 | nh.param("lio_sam/mappingCornerLeafSize", mappingCornerLeafSize, 0.2);
224 | nh.param("lio_sam/mappingSurfLeafSize", mappingSurfLeafSize, 0.2);
225 |
226 | nh.param("lio_sam/z_tollerance", z_tollerance, FLT_MAX);
227 | nh.param("lio_sam/rotation_tollerance", rotation_tollerance, FLT_MAX);
228 |
229 | nh.param("lio_sam/numberOfCores", numberOfCores, 2);
230 | nh.param("lio_sam/mappingProcessInterval", mappingProcessInterval, 0.15);
231 |
232 | nh.param("lio_sam/surroundingkeyframeAddingDistThreshold", surroundingkeyframeAddingDistThreshold, 1.0);
233 | nh.param("lio_sam/surroundingkeyframeAddingAngleThreshold", surroundingkeyframeAddingAngleThreshold, 0.2);
234 | nh.param("lio_sam/surroundingKeyframeDensity", surroundingKeyframeDensity, 1.0);
235 | nh.param("lio_sam/surroundingKeyframeSearchRadius", surroundingKeyframeSearchRadius, 50.0);
236 |
237 | nh.param("lio_sam/loopClosureEnableFlag", loopClosureEnableFlag, false);
238 | nh.param("lio_sam/loopClosureFrequency", loopClosureFrequency, 1.0);
239 | nh.param("lio_sam/surroundingKeyframeSize", surroundingKeyframeSize, 50);
240 | nh.param("lio_sam/historyKeyframeSearchRadius", historyKeyframeSearchRadius, 10.0);
241 | nh.param("lio_sam/historyKeyframeSearchTimeDiff", historyKeyframeSearchTimeDiff, 30.0);
242 | nh.param("lio_sam/historyKeyframeSearchNum", historyKeyframeSearchNum, 25);
243 | nh.param("lio_sam/historyKeyframeFitnessScore", historyKeyframeFitnessScore, 0.3);
244 |
245 | nh.param("lio_sam/globalMapVisualizationSearchRadius", globalMapVisualizationSearchRadius, 1e3);
246 | nh.param("lio_sam/globalMapVisualizationPoseDensity", globalMapVisualizationPoseDensity, 10.0);
247 | nh.param("lio_sam/globalMapVisualizationLeafSize", globalMapVisualizationLeafSize, 1.0);
248 |
249 | usleep(100);
250 | }
251 |
252 | sensor_msgs::Imu imuConverter(const sensor_msgs::Imu& imu_in)
253 | {
254 | sensor_msgs::Imu imu_out = imu_in;
255 | // rotate acceleration
256 | Eigen::Vector3d acc(imu_in.linear_acceleration.x, imu_in.linear_acceleration.y, imu_in.linear_acceleration.z);
257 | acc = extRot * acc;
258 | imu_out.linear_acceleration.x = acc.x();
259 | imu_out.linear_acceleration.y = acc.y();
260 | imu_out.linear_acceleration.z = acc.z();
261 | // rotate gyroscope
262 | Eigen::Vector3d gyr(imu_in.angular_velocity.x, imu_in.angular_velocity.y, imu_in.angular_velocity.z);
263 | gyr = extRot * gyr;
264 | imu_out.angular_velocity.x = gyr.x();
265 | imu_out.angular_velocity.y = gyr.y();
266 | imu_out.angular_velocity.z = gyr.z();
267 | // rotate roll pitch yaw
268 | Eigen::Quaterniond q_from(imu_in.orientation.w, imu_in.orientation.x, imu_in.orientation.y, imu_in.orientation.z);
269 | Eigen::Quaterniond q_final = q_from * extQRPY;
270 | imu_out.orientation.x = q_final.x();
271 | imu_out.orientation.y = q_final.y();
272 | imu_out.orientation.z = q_final.z();
273 | imu_out.orientation.w = q_final.w();
274 |
275 | if (sqrt(q_final.x()*q_final.x() + q_final.y()*q_final.y() + q_final.z()*q_final.z() + q_final.w()*q_final.w()) < 0.1)
276 | {
277 | ROS_ERROR("Invalid quaternion, please use a 9-axis IMU!");
278 | ros::shutdown();
279 | }
280 |
281 | return imu_out;
282 | }
283 | };
284 |
285 | template
286 | sensor_msgs::PointCloud2 publishCloud(const ros::Publisher& thisPub, const T& thisCloud, ros::Time thisStamp, std::string thisFrame)
287 | {
288 | sensor_msgs::PointCloud2 tempCloud;
289 | pcl::toROSMsg(*thisCloud, tempCloud);
290 | tempCloud.header.stamp = thisStamp;
291 | tempCloud.header.frame_id = thisFrame;
292 | if (thisPub.getNumSubscribers() != 0)
293 | thisPub.publish(tempCloud);
294 | return tempCloud;
295 | }
296 |
297 | template
298 | double ROS_TIME(T msg)
299 | {
300 | return msg->header.stamp.toSec();
301 | }
302 |
303 |
304 | template
305 | void imuAngular2rosAngular(sensor_msgs::Imu *thisImuMsg, T *angular_x, T *angular_y, T *angular_z)
306 | {
307 | *angular_x = thisImuMsg->angular_velocity.x;
308 | *angular_y = thisImuMsg->angular_velocity.y;
309 | *angular_z = thisImuMsg->angular_velocity.z;
310 | }
311 |
312 |
313 | template
314 | void imuAccel2rosAccel(sensor_msgs::Imu *thisImuMsg, T *acc_x, T *acc_y, T *acc_z)
315 | {
316 | *acc_x = thisImuMsg->linear_acceleration.x;
317 | *acc_y = thisImuMsg->linear_acceleration.y;
318 | *acc_z = thisImuMsg->linear_acceleration.z;
319 | }
320 |
321 |
322 | template
323 | void imuRPY2rosRPY(sensor_msgs::Imu *thisImuMsg, T *rosRoll, T *rosPitch, T *rosYaw)
324 | {
325 | double imuRoll, imuPitch, imuYaw;
326 | tf::Quaternion orientation;
327 | tf::quaternionMsgToTF(thisImuMsg->orientation, orientation);
328 | tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);
329 |
330 | *rosRoll = imuRoll;
331 | *rosPitch = imuPitch;
332 | *rosYaw = imuYaw;
333 | }
334 |
335 |
336 | float pointDistance(PointType p)
337 | {
338 | return sqrt(p.x*p.x + p.y*p.y + p.z*p.z);
339 | }
340 |
341 |
342 | float pointDistance(PointType p1, PointType p2)
343 | {
344 | return sqrt((p1.x-p2.x)*(p1.x-p2.x) + (p1.y-p2.y)*(p1.y-p2.y) + (p1.z-p2.z)*(p1.z-p2.z));
345 | }
346 |
347 | #endif
348 |
--------------------------------------------------------------------------------
/launch/include/config/robot.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/launch/include/config/rviz.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 191
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /links1
8 | - /Odometry1
9 | - /Point Cloud1
10 | - /Feature Cloud1
11 | - /Mapping1
12 | Splitter Ratio: 0.5600000023841858
13 | Tree Height: 824
14 | - Class: rviz/Selection
15 | Name: Selection
16 | - Class: rviz/Views
17 | Expanded:
18 | - /Current View1
19 | Name: Views
20 | Splitter Ratio: 0.5
21 | - Class: rviz/Tool Properties
22 | Expanded: ~
23 | Name: Tool Properties
24 | Splitter Ratio: 0.5
25 | Preferences:
26 | PromptSaveOnExit: true
27 | Toolbars:
28 | toolButtonStyle: 2
29 | Visualization Manager:
30 | Class: ""
31 | Displays:
32 | - Class: rviz/Group
33 | Displays:
34 | - Class: rviz/TF
35 | Enabled: false
36 | Frame Timeout: 999
37 | Frames:
38 | All Enabled: false
39 | Marker Scale: 3
40 | Name: TF
41 | Show Arrows: true
42 | Show Axes: true
43 | Show Names: true
44 | Tree:
45 | {}
46 | Update Interval: 0
47 | Value: false
48 | - Class: rviz/Axes
49 | Enabled: true
50 | Length: 2
51 | Name: map
52 | Radius: 0.5
53 | Reference Frame: map
54 | Value: true
55 | - Class: rviz/Axes
56 | Enabled: true
57 | Length: 1
58 | Name: base_link
59 | Radius: 0.30000001192092896
60 | Reference Frame: base_link
61 | Value: true
62 | Enabled: true
63 | Name: links
64 | - Class: rviz/Group
65 | Displays:
66 | - Angle Tolerance: 0.10000000149011612
67 | Class: rviz/Odometry
68 | Covariance:
69 | Orientation:
70 | Alpha: 0.5
71 | Color: 255; 255; 127
72 | Color Style: Unique
73 | Frame: Local
74 | Offset: 1
75 | Scale: 1
76 | Value: true
77 | Position:
78 | Alpha: 0.30000001192092896
79 | Color: 204; 51; 204
80 | Scale: 1
81 | Value: true
82 | Value: false
83 | Enabled: true
84 | Keep: 300
85 | Name: Odom IMU
86 | Position Tolerance: 0.10000000149011612
87 | Shape:
88 | Alpha: 1
89 | Axes Length: 0.5
90 | Axes Radius: 0.10000000149011612
91 | Color: 255; 25; 0
92 | Head Length: 0.30000001192092896
93 | Head Radius: 0.10000000149011612
94 | Shaft Length: 1
95 | Shaft Radius: 0.05000000074505806
96 | Value: Axes
97 | Topic: /odometry/imu
98 | Unreliable: false
99 | Value: true
100 | - Angle Tolerance: 0.10000000149011612
101 | Class: rviz/Odometry
102 | Covariance:
103 | Orientation:
104 | Alpha: 0.5
105 | Color: 255; 255; 127
106 | Color Style: Unique
107 | Frame: Local
108 | Offset: 1
109 | Scale: 1
110 | Value: true
111 | Position:
112 | Alpha: 0.30000001192092896
113 | Color: 204; 51; 204
114 | Scale: 1
115 | Value: true
116 | Value: false
117 | Enabled: true
118 | Keep: 300
119 | Name: Odom GPS
120 | Position Tolerance: 0.10000000149011612
121 | Shape:
122 | Alpha: 1
123 | Axes Length: 1
124 | Axes Radius: 0.30000001192092896
125 | Color: 255; 25; 0
126 | Head Length: 0.30000001192092896
127 | Head Radius: 0.10000000149011612
128 | Shaft Length: 1
129 | Shaft Radius: 0.05000000074505806
130 | Value: Axes
131 | Topic: /odometry/gps
132 | Unreliable: false
133 | Value: true
134 | - Angle Tolerance: 0.10000000149011612
135 | Class: rviz/Odometry
136 | Covariance:
137 | Orientation:
138 | Alpha: 0.5
139 | Color: 255; 255; 127
140 | Color Style: Unique
141 | Frame: Local
142 | Offset: 1
143 | Scale: 1
144 | Value: true
145 | Position:
146 | Alpha: 0.30000001192092896
147 | Color: 204; 51; 204
148 | Scale: 1
149 | Value: true
150 | Value: false
151 | Enabled: false
152 | Keep: 300
153 | Name: Odom lidar
154 | Position Tolerance: 0.10000000149011612
155 | Shape:
156 | Alpha: 1
157 | Axes Length: 0.25
158 | Axes Radius: 0.10000000149011612
159 | Color: 255; 25; 0
160 | Head Length: 0.30000001192092896
161 | Head Radius: 0.10000000149011612
162 | Shaft Length: 1
163 | Shaft Radius: 0.05000000074505806
164 | Value: Axes
165 | Topic: /lio_sam/mapping/odometry
166 | Unreliable: false
167 | Value: false
168 | Enabled: false
169 | Name: Odometry
170 | - Class: rviz/Group
171 | Displays:
172 | - Alpha: 1
173 | Autocompute Intensity Bounds: false
174 | Autocompute Value Bounds:
175 | Max Value: 10
176 | Min Value: -10
177 | Value: true
178 | Axis: Z
179 | Channel Name: intensity
180 | Class: rviz/PointCloud2
181 | Color: 255; 255; 255
182 | Color Transformer: Intensity
183 | Decay Time: 0
184 | Enabled: true
185 | Invert Rainbow: false
186 | Max Color: 255; 255; 255
187 | Max Intensity: 128
188 | Min Color: 0; 0; 0
189 | Min Intensity: 0
190 | Name: Velodyne
191 | Position Transformer: XYZ
192 | Queue Size: 10
193 | Selectable: false
194 | Size (Pixels): 2
195 | Size (m): 0.009999999776482582
196 | Style: Points
197 | Topic: /lio_sam/deskew/cloud_deskewed
198 | Unreliable: false
199 | Use Fixed Frame: true
200 | Use rainbow: true
201 | Value: true
202 | - Alpha: 1
203 | Autocompute Intensity Bounds: true
204 | Autocompute Value Bounds:
205 | Max Value: 10
206 | Min Value: -10
207 | Value: true
208 | Axis: Z
209 | Channel Name: intensity
210 | Class: rviz/PointCloud2
211 | Color: 255; 255; 255
212 | Color Transformer: Intensity
213 | Decay Time: 0
214 | Enabled: false
215 | Invert Rainbow: false
216 | Max Color: 255; 255; 255
217 | Max Intensity: 0.7900000214576721
218 | Min Color: 0; 0; 0
219 | Min Intensity: 0
220 | Name: Reg Cloud
221 | Position Transformer: XYZ
222 | Queue Size: 10
223 | Selectable: true
224 | Size (Pixels): 3
225 | Size (m): 0.009999999776482582
226 | Style: Points
227 | Topic: /lio_sam/mapping/cloud_registered
228 | Unreliable: false
229 | Use Fixed Frame: true
230 | Use rainbow: true
231 | Value: false
232 | Enabled: true
233 | Name: Point Cloud
234 | - Class: rviz/Group
235 | Displays:
236 | - Alpha: 1
237 | Autocompute Intensity Bounds: true
238 | Autocompute Value Bounds:
239 | Max Value: 10
240 | Min Value: -10
241 | Value: true
242 | Axis: Z
243 | Channel Name: intensity
244 | Class: rviz/PointCloud2
245 | Color: 0; 255; 0
246 | Color Transformer: FlatColor
247 | Decay Time: 0
248 | Enabled: true
249 | Invert Rainbow: false
250 | Max Color: 255; 255; 255
251 | Max Intensity: 15.077729225158691
252 | Min Color: 0; 0; 0
253 | Min Intensity: 0.019985778257250786
254 | Name: Edge Feature
255 | Position Transformer: XYZ
256 | Queue Size: 10
257 | Selectable: true
258 | Size (Pixels): 5
259 | Size (m): 0.009999999776482582
260 | Style: Points
261 | Topic: /lio_sam/feature/cloud_corner
262 | Unreliable: false
263 | Use Fixed Frame: true
264 | Use rainbow: true
265 | Value: true
266 | - Alpha: 1
267 | Autocompute Intensity Bounds: true
268 | Autocompute Value Bounds:
269 | Max Value: 10
270 | Min Value: -10
271 | Value: true
272 | Axis: Z
273 | Channel Name: intensity
274 | Class: rviz/PointCloud2
275 | Color: 255; 85; 255
276 | Color Transformer: FlatColor
277 | Decay Time: 0
278 | Enabled: true
279 | Invert Rainbow: false
280 | Max Color: 255; 255; 255
281 | Max Intensity: 15.100607872009277
282 | Min Color: 0; 0; 0
283 | Min Intensity: 0.0007188677554950118
284 | Name: Plannar Feature
285 | Position Transformer: XYZ
286 | Queue Size: 10
287 | Selectable: true
288 | Size (Pixels): 3
289 | Size (m): 0.009999999776482582
290 | Style: Points
291 | Topic: /lio_sam/feature/cloud_surface
292 | Unreliable: false
293 | Use Fixed Frame: true
294 | Use rainbow: true
295 | Value: true
296 | Enabled: false
297 | Name: Feature Cloud
298 | - Class: rviz/Group
299 | Displays:
300 | - Alpha: 0.30000001192092896
301 | Autocompute Intensity Bounds: false
302 | Autocompute Value Bounds:
303 | Max Value: 7.4858574867248535
304 | Min Value: -1.2309398651123047
305 | Value: true
306 | Axis: Z
307 | Channel Name: intensity
308 | Class: rviz/PointCloud2
309 | Color: 255; 255; 255
310 | Color Transformer: Intensity
311 | Decay Time: 300
312 | Enabled: true
313 | Invert Rainbow: false
314 | Max Color: 255; 255; 255
315 | Max Intensity: 128
316 | Min Color: 0; 0; 0
317 | Min Intensity: 0
318 | Name: Map (cloud)
319 | Position Transformer: XYZ
320 | Queue Size: 10
321 | Selectable: false
322 | Size (Pixels): 2
323 | Size (m): 0.009999999776482582
324 | Style: Points
325 | Topic: /lio_sam/mapping/cloud_registered
326 | Unreliable: false
327 | Use Fixed Frame: true
328 | Use rainbow: true
329 | Value: true
330 | - Alpha: 1
331 | Autocompute Intensity Bounds: true
332 | Autocompute Value Bounds:
333 | Max Value: 36.61034393310547
334 | Min Value: -2.3476977348327637
335 | Value: true
336 | Axis: Z
337 | Channel Name: intensity
338 | Class: rviz/PointCloud2
339 | Color: 255; 255; 255
340 | Color Transformer: AxisColor
341 | Decay Time: 0
342 | Enabled: false
343 | Invert Rainbow: false
344 | Max Color: 255; 255; 255
345 | Max Intensity: 15.100604057312012
346 | Min Color: 0; 0; 0
347 | Min Intensity: 0.014545874670147896
348 | Name: Map (local)
349 | Position Transformer: XYZ
350 | Queue Size: 10
351 | Selectable: true
352 | Size (Pixels): 2
353 | Size (m): 0.10000000149011612
354 | Style: Flat Squares
355 | Topic: /lio_sam/mapping/map_local
356 | Unreliable: false
357 | Use Fixed Frame: true
358 | Use rainbow: true
359 | Value: false
360 | - Alpha: 1
361 | Autocompute Intensity Bounds: true
362 | Autocompute Value Bounds:
363 | Max Value: 10
364 | Min Value: -10
365 | Value: true
366 | Axis: Z
367 | Channel Name: intensity
368 | Class: rviz/PointCloud2
369 | Color: 255; 255; 255
370 | Color Transformer: Intensity
371 | Decay Time: 0
372 | Enabled: false
373 | Invert Rainbow: false
374 | Max Color: 255; 255; 255
375 | Max Intensity: 15.100096702575684
376 | Min Color: 0; 0; 0
377 | Min Intensity: 0.007923072203993797
378 | Name: Map (global)
379 | Position Transformer: XYZ
380 | Queue Size: 10
381 | Selectable: true
382 | Size (Pixels): 2
383 | Size (m): 0.009999999776482582
384 | Style: Points
385 | Topic: /lio_sam/mapping/map_global
386 | Unreliable: false
387 | Use Fixed Frame: true
388 | Use rainbow: true
389 | Value: false
390 | - Alpha: 1
391 | Buffer Length: 1
392 | Class: rviz/Path
393 | Color: 85; 255; 255
394 | Enabled: true
395 | Head Diameter: 0.30000001192092896
396 | Head Length: 0.20000000298023224
397 | Length: 0.30000001192092896
398 | Line Style: Billboards
399 | Line Width: 0.10000000149011612
400 | Name: Path (global)
401 | Offset:
402 | X: 0
403 | Y: 0
404 | Z: 0
405 | Pose Color: 255; 85; 255
406 | Pose Style: None
407 | Radius: 0.029999999329447746
408 | Shaft Diameter: 0.10000000149011612
409 | Shaft Length: 0.10000000149011612
410 | Topic: /lio_sam/mapping/path
411 | Unreliable: false
412 | Value: true
413 | - Alpha: 1
414 | Buffer Length: 1
415 | Class: rviz/Path
416 | Color: 255; 85; 255
417 | Enabled: true
418 | Head Diameter: 0.30000001192092896
419 | Head Length: 0.20000000298023224
420 | Length: 0.30000001192092896
421 | Line Style: Billboards
422 | Line Width: 0.10000000149011612
423 | Name: Path (local)
424 | Offset:
425 | X: 0
426 | Y: 0
427 | Z: 0
428 | Pose Color: 255; 85; 255
429 | Pose Style: None
430 | Radius: 0.029999999329447746
431 | Shaft Diameter: 0.10000000149011612
432 | Shaft Length: 0.10000000149011612
433 | Topic: /lio_sam/imu/path
434 | Unreliable: false
435 | Value: true
436 | - Alpha: 1
437 | Autocompute Intensity Bounds: true
438 | Autocompute Value Bounds:
439 | Max Value: 20.802837371826172
440 | Min Value: -0.03934507071971893
441 | Value: true
442 | Axis: Z
443 | Channel Name: intensity
444 | Class: rviz/PointCloud2
445 | Color: 255; 255; 255
446 | Color Transformer: AxisColor
447 | Decay Time: 0
448 | Enabled: false
449 | Invert Rainbow: false
450 | Max Color: 255; 255; 255
451 | Max Intensity: 15.100604057312012
452 | Min Color: 0; 0; 0
453 | Min Intensity: 0.014545874670147896
454 | Name: Loop closure
455 | Position Transformer: XYZ
456 | Queue Size: 10
457 | Selectable: true
458 | Size (Pixels): 2
459 | Size (m): 0.30000001192092896
460 | Style: Flat Squares
461 | Topic: /lio_sam/mapping/icp_loop_closure_corrected_cloud
462 | Unreliable: false
463 | Use Fixed Frame: true
464 | Use rainbow: true
465 | Value: false
466 | - Class: rviz/MarkerArray
467 | Enabled: true
468 | Marker Topic: /lio_sam/mapping/loop_closure_constraints
469 | Name: Loop constraint
470 | Namespaces:
471 | {}
472 | Queue Size: 100
473 | Value: true
474 | Enabled: true
475 | Name: Mapping
476 | Enabled: true
477 | Global Options:
478 | Background Color: 0; 0; 0
479 | Default Light: true
480 | Fixed Frame: map
481 | Frame Rate: 30
482 | Name: root
483 | Tools:
484 | - Class: rviz/Interact
485 | Hide Inactive Objects: true
486 | - Class: rviz/FocusCamera
487 | - Class: rviz/Measure
488 | Value: true
489 | Views:
490 | Current:
491 | Class: rviz/Orbit
492 | Distance: 97.43701171875
493 | Enable Stereo Rendering:
494 | Stereo Eye Separation: 0.05999999865889549
495 | Stereo Focal Distance: 1
496 | Swap Stereo Eyes: false
497 | Value: false
498 | Focal Point:
499 | X: 0
500 | Y: 0
501 | Z: 0
502 | Focal Shape Fixed Size: false
503 | Focal Shape Size: 0.05000000074505806
504 | Invert Z Axis: false
505 | Name: Current View
506 | Near Clip Distance: 0.009999999776482582
507 | Pitch: 1.1253981590270996
508 | Target Frame: base_link
509 | Value: Orbit (rviz)
510 | Yaw: 3.3316705226898193
511 | Saved: ~
512 | Window Geometry:
513 | Displays:
514 | collapsed: false
515 | Height: 1190
516 | Hide Left Dock: false
517 | Hide Right Dock: true
518 | QMainWindow State: 000000ff00000000fd0000000400000000000001a30000043efc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006e00fffffffb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000000a0049006d006100670065000000002e000001340000000000000000fb000000100044006900730070006c00610079007301000000470000043e000000f100fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730000000286000000a70000006e00ffffff000000010000011100000435fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002e00000435000000c200fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b000000040fc0100000002fb0000000800540069006d00650000000000000004b00000000000000000fb0000000800540069006d00650100000000000004500000000000000000000005f20000043e00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
519 | Selection:
520 | collapsed: false
521 | Tool Properties:
522 | collapsed: false
523 | Views:
524 | collapsed: true
525 | Width: 1948
526 | X: 725
527 | Y: 300
528 |
--------------------------------------------------------------------------------
/launch/include/module_loam.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/launch/include/module_navsat.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/launch/include/module_robot_state_publisher.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/launch/include/module_rviz.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/launch/include/rosconsole/rosconsole_error.conf:
--------------------------------------------------------------------------------
1 | # Set the default ros output to warning and higher
2 | log4j.logger.ros = ERROR
--------------------------------------------------------------------------------
/launch/include/rosconsole/rosconsole_info.conf:
--------------------------------------------------------------------------------
1 | # Set the default ros output to warning and higher
2 | log4j.logger.ros = INFO
--------------------------------------------------------------------------------
/launch/include/rosconsole/rosconsole_warn.conf:
--------------------------------------------------------------------------------
1 | # Set the default ros output to warning and higher
2 | log4j.logger.ros = WARN
--------------------------------------------------------------------------------
/launch/run.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/msg/cloud_info.msg:
--------------------------------------------------------------------------------
1 | # Cloud Info
2 | Header header
3 |
4 | int32[] startRingIndex
5 | int32[] endRingIndex
6 |
7 | int32[] pointColInd # point column index in range image
8 | float32[] pointRange # point range
9 |
10 | int64 imuAvailable
11 | int64 odomAvailable
12 |
13 | # Attitude for LOAM initialization
14 | float32 imuRollInit
15 | float32 imuPitchInit
16 | float32 imuYawInit
17 |
18 | # Initial guess from imu pre-integration
19 | float32 initialGuessX
20 | float32 initialGuessY
21 | float32 initialGuessZ
22 | float32 initialGuessRoll
23 | float32 initialGuessPitch
24 | float32 initialGuessYaw
25 |
26 | # Point cloud messages
27 | sensor_msgs/PointCloud2 cloud_deskewed # original cloud deskewed
28 | sensor_msgs/PointCloud2 cloud_corner # extracted corner feature
29 | sensor_msgs/PointCloud2 cloud_surface # extracted surface feature
30 |
31 | # 3rd party messages
32 | sensor_msgs/PointCloud2 key_frame_cloud
33 | sensor_msgs/PointCloud2 key_frame_color
34 | sensor_msgs/PointCloud2 key_frame_poses
35 | sensor_msgs/PointCloud2 key_frame_map
36 |
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | lio_sam
4 | 1.0.0
5 | Lidar Odometry
6 |
7 | Tixiao Shan
8 |
9 | TODO
10 |
11 | Tixiao Shan
12 |
13 | catkin
14 |
15 | roscpp
16 | roscpp
17 | rospy
18 | rospy
19 |
20 | tf
21 | tf
22 |
23 | cv_bridge
24 | cv_bridge
25 |
26 | pcl_conversions
27 | pcl_conversions
28 |
29 | std_msgs
30 | std_msgs
31 | sensor_msgs
32 | sensor_msgs
33 | geometry_msgs
34 | geometry_msgs
35 | nav_msgs
36 | nav_msgs
37 | visualization_msgs
38 | visualization_msgs
39 |
40 | message_generation
41 | message_generation
42 | message_runtime
43 | message_runtime
44 |
45 | GTSAM
46 | GTSAM
47 |
48 |
49 |
--------------------------------------------------------------------------------
/src/featureExtraction.cpp:
--------------------------------------------------------------------------------
1 | #include "utility.h"
2 | #include "lio_sam/cloud_info.h"
3 |
4 | struct smoothness_t{
5 | float value;
6 | size_t ind;
7 | };
8 |
9 | struct by_value{
10 | bool operator()(smoothness_t const &left, smoothness_t const &right) {
11 | return left.value < right.value;
12 | }
13 | };
14 |
15 | class FeatureExtraction : public ParamServer
16 | {
17 |
18 | public:
19 |
20 | ros::Subscriber subLaserCloudInfo;
21 |
22 | ros::Publisher pubLaserCloudInfo;
23 | ros::Publisher pubCornerPoints;
24 | ros::Publisher pubSurfacePoints;
25 |
26 | pcl::PointCloud::Ptr extractedCloud;
27 | pcl::PointCloud::Ptr cornerCloud;
28 | pcl::PointCloud::Ptr surfaceCloud;
29 |
30 | pcl::VoxelGrid downSizeFilter;
31 |
32 | lio_sam::cloud_info cloudInfo;
33 | std_msgs::Header cloudHeader;
34 |
35 | std::vector cloudSmoothness;
36 | float *cloudCurvature;
37 | int *cloudNeighborPicked;
38 | int *cloudLabel;
39 |
40 | FeatureExtraction()
41 | {
42 | subLaserCloudInfo = nh.subscribe("lio_sam/deskew/cloud_info", 1, &FeatureExtraction::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());
43 |
44 | pubLaserCloudInfo = nh.advertise ("lio_sam/feature/cloud_info", 1);
45 | pubCornerPoints = nh.advertise("lio_sam/feature/cloud_corner", 1);
46 | pubSurfacePoints = nh.advertise("lio_sam/feature/cloud_surface", 1);
47 |
48 | initializationValue();
49 | }
50 |
51 | void initializationValue()
52 | {
53 | cloudSmoothness.resize(N_SCAN*Horizon_SCAN);
54 |
55 | downSizeFilter.setLeafSize(odometrySurfLeafSize, odometrySurfLeafSize, odometrySurfLeafSize);
56 |
57 | extractedCloud.reset(new pcl::PointCloud());
58 | cornerCloud.reset(new pcl::PointCloud());
59 | surfaceCloud.reset(new pcl::PointCloud());
60 |
61 | cloudCurvature = new float[N_SCAN*Horizon_SCAN];
62 | cloudNeighborPicked = new int[N_SCAN*Horizon_SCAN];
63 | cloudLabel = new int[N_SCAN*Horizon_SCAN];
64 | }
65 |
66 | void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
67 | {
68 | cloudInfo = *msgIn; // new cloud info
69 | cloudHeader = msgIn->header; // new cloud header
70 | pcl::fromROSMsg(msgIn->cloud_deskewed, *extractedCloud); // new cloud for extraction
71 |
72 | calculateSmoothness();
73 |
74 | markOccludedPoints();
75 |
76 | extractFeatures();
77 |
78 | publishFeatureCloud();
79 | }
80 |
81 | void calculateSmoothness()
82 | {
83 | int cloudSize = extractedCloud->points.size();
84 | for (int i = 5; i < cloudSize - 5; i++)
85 | {
86 | float diffRange = cloudInfo.pointRange[i-5] + cloudInfo.pointRange[i-4]
87 | + cloudInfo.pointRange[i-3] + cloudInfo.pointRange[i-2]
88 | + cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i] * 10
89 | + cloudInfo.pointRange[i+1] + cloudInfo.pointRange[i+2]
90 | + cloudInfo.pointRange[i+3] + cloudInfo.pointRange[i+4]
91 | + cloudInfo.pointRange[i+5];
92 |
93 | cloudCurvature[i] = diffRange*diffRange;//diffX * diffX + diffY * diffY + diffZ * diffZ;
94 |
95 | cloudNeighborPicked[i] = 0;
96 | cloudLabel[i] = 0;
97 | // cloudSmoothness for sorting
98 | cloudSmoothness[i].value = cloudCurvature[i];
99 | cloudSmoothness[i].ind = i;
100 | }
101 | }
102 |
103 | void markOccludedPoints()
104 | {
105 | int cloudSize = extractedCloud->points.size();
106 | // mark occluded points and parallel beam points
107 | for (int i = 5; i < cloudSize - 6; ++i)
108 | {
109 | // occluded points
110 | float depth1 = cloudInfo.pointRange[i];
111 | float depth2 = cloudInfo.pointRange[i+1];
112 | int columnDiff = std::abs(int(cloudInfo.pointColInd[i+1] - cloudInfo.pointColInd[i]));
113 |
114 | if (columnDiff < 10){
115 | // 10 pixel diff in range image
116 | if (depth1 - depth2 > 0.3){
117 | cloudNeighborPicked[i - 5] = 1;
118 | cloudNeighborPicked[i - 4] = 1;
119 | cloudNeighborPicked[i - 3] = 1;
120 | cloudNeighborPicked[i - 2] = 1;
121 | cloudNeighborPicked[i - 1] = 1;
122 | cloudNeighborPicked[i] = 1;
123 | }else if (depth2 - depth1 > 0.3){
124 | cloudNeighborPicked[i + 1] = 1;
125 | cloudNeighborPicked[i + 2] = 1;
126 | cloudNeighborPicked[i + 3] = 1;
127 | cloudNeighborPicked[i + 4] = 1;
128 | cloudNeighborPicked[i + 5] = 1;
129 | cloudNeighborPicked[i + 6] = 1;
130 | }
131 | }
132 | // parallel beam
133 | float diff1 = std::abs(float(cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i]));
134 | float diff2 = std::abs(float(cloudInfo.pointRange[i+1] - cloudInfo.pointRange[i]));
135 |
136 | if (diff1 > 0.02 * cloudInfo.pointRange[i] && diff2 > 0.02 * cloudInfo.pointRange[i])
137 | cloudNeighborPicked[i] = 1;
138 | }
139 | }
140 |
141 | void extractFeatures()
142 | {
143 | cornerCloud->clear();
144 | surfaceCloud->clear();
145 |
146 | pcl::PointCloud::Ptr surfaceCloudScan(new pcl::PointCloud());
147 | pcl::PointCloud::Ptr surfaceCloudScanDS(new pcl::PointCloud());
148 |
149 | for (int i = 0; i < N_SCAN; i++)
150 | {
151 | surfaceCloudScan->clear();
152 |
153 | for (int j = 0; j < 6; j++)
154 | {
155 |
156 | int sp = (cloudInfo.startRingIndex[i] * (6 - j) + cloudInfo.endRingIndex[i] * j) / 6;
157 | int ep = (cloudInfo.startRingIndex[i] * (5 - j) + cloudInfo.endRingIndex[i] * (j + 1)) / 6 - 1;
158 |
159 | if (sp >= ep)
160 | continue;
161 |
162 | std::sort(cloudSmoothness.begin()+sp, cloudSmoothness.begin()+ep, by_value());
163 |
164 | int largestPickedNum = 0;
165 | for (int k = ep; k >= sp; k--)
166 | {
167 | int ind = cloudSmoothness[k].ind;
168 | if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] > edgeThreshold)
169 | {
170 | largestPickedNum++;
171 | if (largestPickedNum <= 20){
172 | cloudLabel[ind] = 1;
173 | cornerCloud->push_back(extractedCloud->points[ind]);
174 | } else {
175 | break;
176 | }
177 |
178 | cloudNeighborPicked[ind] = 1;
179 | for (int l = 1; l <= 5; l++)
180 | {
181 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));
182 | if (columnDiff > 10)
183 | break;
184 | cloudNeighborPicked[ind + l] = 1;
185 | }
186 | for (int l = -1; l >= -5; l--)
187 | {
188 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));
189 | if (columnDiff > 10)
190 | break;
191 | cloudNeighborPicked[ind + l] = 1;
192 | }
193 | }
194 | }
195 |
196 | for (int k = sp; k <= ep; k++)
197 | {
198 | int ind = cloudSmoothness[k].ind;
199 | if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] < surfThreshold)
200 | {
201 |
202 | cloudLabel[ind] = -1;
203 | cloudNeighborPicked[ind] = 1;
204 |
205 | for (int l = 1; l <= 5; l++) {
206 |
207 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));
208 | if (columnDiff > 10)
209 | break;
210 |
211 | cloudNeighborPicked[ind + l] = 1;
212 | }
213 | for (int l = -1; l >= -5; l--) {
214 |
215 | int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));
216 | if (columnDiff > 10)
217 | break;
218 |
219 | cloudNeighborPicked[ind + l] = 1;
220 | }
221 | }
222 | }
223 |
224 | for (int k = sp; k <= ep; k++)
225 | {
226 | if (cloudLabel[k] <= 0){
227 | surfaceCloudScan->push_back(extractedCloud->points[k]);
228 | }
229 | }
230 | }
231 |
232 | surfaceCloudScanDS->clear();
233 | downSizeFilter.setInputCloud(surfaceCloudScan);
234 | downSizeFilter.filter(*surfaceCloudScanDS);
235 |
236 | *surfaceCloud += *surfaceCloudScanDS;
237 | }
238 | }
239 |
240 | void freeCloudInfoMemory()
241 | {
242 | cloudInfo.startRingIndex.clear();
243 | cloudInfo.endRingIndex.clear();
244 | cloudInfo.pointColInd.clear();
245 | cloudInfo.pointRange.clear();
246 | }
247 |
248 | void publishFeatureCloud()
249 | {
250 | // free cloud info memory
251 | freeCloudInfoMemory();
252 | // save newly extracted features
253 | cloudInfo.cloud_corner = publishCloud(pubCornerPoints, cornerCloud, cloudHeader.stamp, lidarFrame);
254 | cloudInfo.cloud_surface = publishCloud(pubSurfacePoints, surfaceCloud, cloudHeader.stamp, lidarFrame);
255 | // publish to mapOptimization
256 | pubLaserCloudInfo.publish(cloudInfo);
257 | }
258 | };
259 |
260 |
261 | int main(int argc, char** argv)
262 | {
263 | ros::init(argc, argv, "lio_sam");
264 |
265 | FeatureExtraction FE;
266 |
267 | ROS_INFO("\033[1;32m----> Feature Extraction Started.\033[0m");
268 |
269 | ros::spin();
270 |
271 | return 0;
272 | }
--------------------------------------------------------------------------------
/src/imageProjection.cpp:
--------------------------------------------------------------------------------
1 | #include "utility.h"
2 | #include "lio_sam/cloud_info.h"
3 |
4 | struct VelodynePointXYZIRT
5 | {
6 | PCL_ADD_POINT4D
7 | PCL_ADD_INTENSITY;
8 | uint16_t ring;
9 | float time;
10 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW
11 | } EIGEN_ALIGN16;
12 | POINT_CLOUD_REGISTER_POINT_STRUCT (VelodynePointXYZIRT,
13 | (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
14 | (uint16_t, ring, ring) (float, time, time)
15 | )
16 |
17 | struct OusterPointXYZIRT {
18 | PCL_ADD_POINT4D;
19 | float intensity;
20 | uint32_t t;
21 | uint16_t reflectivity;
22 | uint8_t ring;
23 | uint16_t noise;
24 | uint32_t range;
25 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW
26 | } EIGEN_ALIGN16;
27 | POINT_CLOUD_REGISTER_POINT_STRUCT(OusterPointXYZIRT,
28 | (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
29 | (uint32_t, t, t) (uint16_t, reflectivity, reflectivity)
30 | (uint8_t, ring, ring) (uint16_t, noise, noise) (uint32_t, range, range)
31 | )
32 |
33 | // Use the Velodyne point format as a common representation
34 | using PointXYZIRT = VelodynePointXYZIRT;
35 |
36 | const int queueLength = 2000;
37 |
38 | class ImageProjection : public ParamServer
39 | {
40 | private:
41 |
42 | std::mutex imuLock;
43 | std::mutex odoLock;
44 |
45 | ros::Subscriber subLaserCloud;
46 | ros::Publisher pubLaserCloud;
47 |
48 | ros::Publisher pubExtractedCloud;
49 | ros::Publisher pubLaserCloudInfo;
50 |
51 | ros::Subscriber subImu;
52 | std::deque imuQueue;
53 |
54 | ros::Subscriber subOdom;
55 | std::deque odomQueue;
56 |
57 | std::deque cloudQueue;
58 | sensor_msgs::PointCloud2 currentCloudMsg;
59 |
60 | double *imuTime = new double[queueLength];
61 | double *imuRotX = new double[queueLength];
62 | double *imuRotY = new double[queueLength];
63 | double *imuRotZ = new double[queueLength];
64 |
65 | int imuPointerCur;
66 | bool firstPointFlag;
67 | Eigen::Affine3f transStartInverse;
68 |
69 | pcl::PointCloud::Ptr laserCloudIn;
70 | pcl::PointCloud::Ptr tmpOusterCloudIn;
71 | pcl::PointCloud::Ptr fullCloud;
72 | pcl::PointCloud::Ptr extractedCloud;
73 |
74 | int deskewFlag;
75 | cv::Mat rangeMat;
76 |
77 | bool odomDeskewFlag;
78 | float odomIncreX;
79 | float odomIncreY;
80 | float odomIncreZ;
81 |
82 | lio_sam::cloud_info cloudInfo;
83 | double timeScanCur;
84 | double timeScanEnd;
85 | std_msgs::Header cloudHeader;
86 |
87 | vector columnIdnCountVec;
88 |
89 |
90 | public:
91 | ImageProjection():
92 | deskewFlag(0)
93 | {
94 | subImu = nh.subscribe(imuTopic, 2000, &ImageProjection::imuHandler, this, ros::TransportHints().tcpNoDelay());
95 | subOdom = nh.subscribe(odomTopic+"_incremental", 2000, &ImageProjection::odometryHandler, this, ros::TransportHints().tcpNoDelay());
96 | subLaserCloud = nh.subscribe(pointCloudTopic, 5, &ImageProjection::cloudHandler, this, ros::TransportHints().tcpNoDelay());
97 |
98 | pubExtractedCloud = nh.advertise ("lio_sam/deskew/cloud_deskewed", 1);
99 | pubLaserCloudInfo = nh.advertise ("lio_sam/deskew/cloud_info", 1);
100 |
101 | allocateMemory();
102 | resetParameters();
103 |
104 | pcl::console::setVerbosityLevel(pcl::console::L_ERROR);
105 | }
106 |
107 | void allocateMemory()
108 | {
109 | laserCloudIn.reset(new pcl::PointCloud());
110 | tmpOusterCloudIn.reset(new pcl::PointCloud());
111 | fullCloud.reset(new pcl::PointCloud());
112 | extractedCloud.reset(new pcl::PointCloud());
113 |
114 | fullCloud->points.resize(N_SCAN*Horizon_SCAN);
115 |
116 | cloudInfo.startRingIndex.assign(N_SCAN, 0);
117 | cloudInfo.endRingIndex.assign(N_SCAN, 0);
118 |
119 | cloudInfo.pointColInd.assign(N_SCAN*Horizon_SCAN, 0);
120 | cloudInfo.pointRange.assign(N_SCAN*Horizon_SCAN, 0);
121 |
122 | resetParameters();
123 | }
124 |
125 | void resetParameters()
126 | {
127 | laserCloudIn->clear();
128 | extractedCloud->clear();
129 | // reset range matrix for range image projection
130 | rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX));
131 |
132 | imuPointerCur = 0;
133 | firstPointFlag = true;
134 | odomDeskewFlag = false;
135 |
136 | for (int i = 0; i < queueLength; ++i)
137 | {
138 | imuTime[i] = 0;
139 | imuRotX[i] = 0;
140 | imuRotY[i] = 0;
141 | imuRotZ[i] = 0;
142 | }
143 |
144 | columnIdnCountVec.assign(N_SCAN, 0);
145 | }
146 |
147 | ~ImageProjection(){}
148 |
149 | void imuHandler(const sensor_msgs::Imu::ConstPtr& imuMsg)
150 | {
151 | sensor_msgs::Imu thisImu = imuConverter(*imuMsg);
152 |
153 | std::lock_guard lock1(imuLock);
154 | imuQueue.push_back(thisImu);
155 |
156 | // debug IMU data
157 | // cout << std::setprecision(6);
158 | // cout << "IMU acc: " << endl;
159 | // cout << "x: " << thisImu.linear_acceleration.x <<
160 | // ", y: " << thisImu.linear_acceleration.y <<
161 | // ", z: " << thisImu.linear_acceleration.z << endl;
162 | // cout << "IMU gyro: " << endl;
163 | // cout << "x: " << thisImu.angular_velocity.x <<
164 | // ", y: " << thisImu.angular_velocity.y <<
165 | // ", z: " << thisImu.angular_velocity.z << endl;
166 | // double imuRoll, imuPitch, imuYaw;
167 | // tf::Quaternion orientation;
168 | // tf::quaternionMsgToTF(thisImu.orientation, orientation);
169 | // tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);
170 | // cout << "IMU roll pitch yaw: " << endl;
171 | // cout << "roll: " << imuRoll << ", pitch: " << imuPitch << ", yaw: " << imuYaw << endl << endl;
172 | }
173 |
174 | void odometryHandler(const nav_msgs::Odometry::ConstPtr& odometryMsg)
175 | {
176 | std::lock_guard lock2(odoLock);
177 | odomQueue.push_back(*odometryMsg);
178 | }
179 |
180 | void cloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
181 | {
182 | if (!cachePointCloud(laserCloudMsg))
183 | return;
184 |
185 | if (!deskewInfo())
186 | return;
187 |
188 | projectPointCloud();
189 |
190 | cloudExtraction();
191 |
192 | publishClouds();
193 |
194 | resetParameters();
195 | }
196 |
197 | bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
198 | {
199 | // cache point cloud
200 | cloudQueue.push_back(*laserCloudMsg);
201 | if (cloudQueue.size() <= 2)
202 | return false;
203 |
204 | // convert cloud
205 | currentCloudMsg = std::move(cloudQueue.front());
206 | cloudQueue.pop_front();
207 | if (sensor == SensorType::VELODYNE || sensor == SensorType::LIVOX)
208 | {
209 | pcl::moveFromROSMsg(currentCloudMsg, *laserCloudIn);
210 | }
211 | else if (sensor == SensorType::OUSTER)
212 | {
213 | // Convert to Velodyne format
214 | pcl::moveFromROSMsg(currentCloudMsg, *tmpOusterCloudIn);
215 | laserCloudIn->points.resize(tmpOusterCloudIn->size());
216 | laserCloudIn->is_dense = tmpOusterCloudIn->is_dense;
217 | for (size_t i = 0; i < tmpOusterCloudIn->size(); i++)
218 | {
219 | auto &src = tmpOusterCloudIn->points[i];
220 | auto &dst = laserCloudIn->points[i];
221 | dst.x = src.x;
222 | dst.y = src.y;
223 | dst.z = src.z;
224 | dst.intensity = src.intensity;
225 | dst.ring = src.ring;
226 | dst.time = src.t * 1e-9f;
227 | }
228 | }
229 | else
230 | {
231 | ROS_ERROR_STREAM("Unknown sensor type: " << int(sensor));
232 | ros::shutdown();
233 | }
234 |
235 | // get timestamp
236 | cloudHeader = currentCloudMsg.header;
237 | timeScanCur = cloudHeader.stamp.toSec();
238 | timeScanEnd = timeScanCur + laserCloudIn->points.back().time;
239 |
240 | // check dense flag
241 | if (laserCloudIn->is_dense == false)
242 | {
243 | ROS_ERROR("Point cloud is not in dense format, please remove NaN points first!");
244 | ros::shutdown();
245 | }
246 |
247 | // check ring channel
248 | static int ringFlag = 0;
249 | if (ringFlag == 0)
250 | {
251 | ringFlag = -1;
252 | for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)
253 | {
254 | if (currentCloudMsg.fields[i].name == "ring")
255 | {
256 | ringFlag = 1;
257 | break;
258 | }
259 | }
260 | if (ringFlag == -1)
261 | {
262 | ROS_ERROR("Point cloud ring channel not available, please configure your point cloud data!");
263 | ros::shutdown();
264 | }
265 | }
266 |
267 | // check point time
268 | if (deskewFlag == 0)
269 | {
270 | deskewFlag = -1;
271 | for (auto &field : currentCloudMsg.fields)
272 | {
273 | if (field.name == "time" || field.name == "t")
274 | {
275 | deskewFlag = 1;
276 | break;
277 | }
278 | }
279 | if (deskewFlag == -1)
280 | ROS_WARN("Point cloud timestamp not available, deskew function disabled, system will drift significantly!");
281 | }
282 |
283 | return true;
284 | }
285 |
286 | bool deskewInfo()
287 | {
288 | std::lock_guard lock1(imuLock);
289 | std::lock_guard lock2(odoLock);
290 |
291 | // make sure IMU data available for the scan
292 | if (imuQueue.empty() || imuQueue.front().header.stamp.toSec() > timeScanCur || imuQueue.back().header.stamp.toSec() < timeScanEnd)
293 | {
294 | ROS_DEBUG("Waiting for IMU data ...");
295 | return false;
296 | }
297 |
298 | imuDeskewInfo();
299 |
300 | odomDeskewInfo();
301 |
302 | return true;
303 | }
304 |
305 | void imuDeskewInfo()
306 | {
307 | cloudInfo.imuAvailable = false;
308 |
309 | while (!imuQueue.empty())
310 | {
311 | if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
312 | imuQueue.pop_front();
313 | else
314 | break;
315 | }
316 |
317 | if (imuQueue.empty())
318 | return;
319 |
320 | imuPointerCur = 0;
321 |
322 | for (int i = 0; i < (int)imuQueue.size(); ++i)
323 | {
324 | sensor_msgs::Imu thisImuMsg = imuQueue[i];
325 | double currentImuTime = thisImuMsg.header.stamp.toSec();
326 |
327 | // get roll, pitch, and yaw estimation for this scan
328 | if (currentImuTime <= timeScanCur)
329 | imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit);
330 |
331 | if (currentImuTime > timeScanEnd + 0.01)
332 | break;
333 |
334 | if (imuPointerCur == 0){
335 | imuRotX[0] = 0;
336 | imuRotY[0] = 0;
337 | imuRotZ[0] = 0;
338 | imuTime[0] = currentImuTime;
339 | ++imuPointerCur;
340 | continue;
341 | }
342 |
343 | // get angular velocity
344 | double angular_x, angular_y, angular_z;
345 | imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z);
346 |
347 | // integrate rotation
348 | double timeDiff = currentImuTime - imuTime[imuPointerCur-1];
349 | imuRotX[imuPointerCur] = imuRotX[imuPointerCur-1] + angular_x * timeDiff;
350 | imuRotY[imuPointerCur] = imuRotY[imuPointerCur-1] + angular_y * timeDiff;
351 | imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur-1] + angular_z * timeDiff;
352 | imuTime[imuPointerCur] = currentImuTime;
353 | ++imuPointerCur;
354 | }
355 |
356 | --imuPointerCur;
357 |
358 | if (imuPointerCur <= 0)
359 | return;
360 |
361 | cloudInfo.imuAvailable = true;
362 | }
363 |
364 | void odomDeskewInfo()
365 | {
366 | cloudInfo.odomAvailable = false;
367 |
368 | while (!odomQueue.empty())
369 | {
370 | if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
371 | odomQueue.pop_front();
372 | else
373 | break;
374 | }
375 |
376 | if (odomQueue.empty())
377 | return;
378 |
379 | if (odomQueue.front().header.stamp.toSec() > timeScanCur)
380 | return;
381 |
382 | // get start odometry at the beinning of the scan
383 | nav_msgs::Odometry startOdomMsg;
384 |
385 | for (int i = 0; i < (int)odomQueue.size(); ++i)
386 | {
387 | startOdomMsg = odomQueue[i];
388 |
389 | if (ROS_TIME(&startOdomMsg) < timeScanCur)
390 | continue;
391 | else
392 | break;
393 | }
394 |
395 | tf::Quaternion orientation;
396 | tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation);
397 |
398 | double roll, pitch, yaw;
399 | tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
400 |
401 | // Initial guess used in mapOptimization
402 | cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x;
403 | cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y;
404 | cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z;
405 | cloudInfo.initialGuessRoll = roll;
406 | cloudInfo.initialGuessPitch = pitch;
407 | cloudInfo.initialGuessYaw = yaw;
408 |
409 | cloudInfo.odomAvailable = true;
410 |
411 | // get end odometry at the end of the scan
412 | odomDeskewFlag = false;
413 |
414 | if (odomQueue.back().header.stamp.toSec() < timeScanEnd)
415 | return;
416 |
417 | nav_msgs::Odometry endOdomMsg;
418 |
419 | for (int i = 0; i < (int)odomQueue.size(); ++i)
420 | {
421 | endOdomMsg = odomQueue[i];
422 |
423 | if (ROS_TIME(&endOdomMsg) < timeScanEnd)
424 | continue;
425 | else
426 | break;
427 | }
428 |
429 | if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0])))
430 | return;
431 |
432 | Eigen::Affine3f transBegin = pcl::getTransformation(startOdomMsg.pose.pose.position.x, startOdomMsg.pose.pose.position.y, startOdomMsg.pose.pose.position.z, roll, pitch, yaw);
433 |
434 | tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation);
435 | tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
436 | Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x, endOdomMsg.pose.pose.position.y, endOdomMsg.pose.pose.position.z, roll, pitch, yaw);
437 |
438 | Eigen::Affine3f transBt = transBegin.inverse() * transEnd;
439 |
440 | float rollIncre, pitchIncre, yawIncre;
441 | pcl::getTranslationAndEulerAngles(transBt, odomIncreX, odomIncreY, odomIncreZ, rollIncre, pitchIncre, yawIncre);
442 |
443 | odomDeskewFlag = true;
444 | }
445 |
446 | void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur)
447 | {
448 | *rotXCur = 0; *rotYCur = 0; *rotZCur = 0;
449 |
450 | int imuPointerFront = 0;
451 | while (imuPointerFront < imuPointerCur)
452 | {
453 | if (pointTime < imuTime[imuPointerFront])
454 | break;
455 | ++imuPointerFront;
456 | }
457 |
458 | if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0)
459 | {
460 | *rotXCur = imuRotX[imuPointerFront];
461 | *rotYCur = imuRotY[imuPointerFront];
462 | *rotZCur = imuRotZ[imuPointerFront];
463 | } else {
464 | int imuPointerBack = imuPointerFront - 1;
465 | double ratioFront = (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
466 | double ratioBack = (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
467 | *rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack;
468 | *rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack;
469 | *rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack;
470 | }
471 | }
472 |
473 | void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur)
474 | {
475 | *posXCur = 0; *posYCur = 0; *posZCur = 0;
476 |
477 | // If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented.
478 |
479 | // if (cloudInfo.odomAvailable == false || odomDeskewFlag == false)
480 | // return;
481 |
482 | // float ratio = relTime / (timeScanEnd - timeScanCur);
483 |
484 | // *posXCur = ratio * odomIncreX;
485 | // *posYCur = ratio * odomIncreY;
486 | // *posZCur = ratio * odomIncreZ;
487 | }
488 |
489 | PointType deskewPoint(PointType *point, double relTime)
490 | {
491 | if (deskewFlag == -1 || cloudInfo.imuAvailable == false)
492 | return *point;
493 |
494 | double pointTime = timeScanCur + relTime;
495 |
496 | float rotXCur, rotYCur, rotZCur;
497 | findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur);
498 |
499 | float posXCur, posYCur, posZCur;
500 | findPosition(relTime, &posXCur, &posYCur, &posZCur);
501 |
502 | if (firstPointFlag == true)
503 | {
504 | transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur)).inverse();
505 | firstPointFlag = false;
506 | }
507 |
508 | // transform points to start
509 | Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur);
510 | Eigen::Affine3f transBt = transStartInverse * transFinal;
511 |
512 | PointType newPoint;
513 | newPoint.x = transBt(0,0) * point->x + transBt(0,1) * point->y + transBt(0,2) * point->z + transBt(0,3);
514 | newPoint.y = transBt(1,0) * point->x + transBt(1,1) * point->y + transBt(1,2) * point->z + transBt(1,3);
515 | newPoint.z = transBt(2,0) * point->x + transBt(2,1) * point->y + transBt(2,2) * point->z + transBt(2,3);
516 | newPoint.intensity = point->intensity;
517 |
518 | return newPoint;
519 | }
520 |
521 | void projectPointCloud()
522 | {
523 | int cloudSize = laserCloudIn->points.size();
524 | // range image projection
525 | for (int i = 0; i < cloudSize; ++i)
526 | {
527 | PointType thisPoint;
528 | thisPoint.x = laserCloudIn->points[i].x;
529 | thisPoint.y = laserCloudIn->points[i].y;
530 | thisPoint.z = laserCloudIn->points[i].z;
531 | thisPoint.intensity = laserCloudIn->points[i].intensity;
532 |
533 | float range = pointDistance(thisPoint);
534 | if (range < lidarMinRange || range > lidarMaxRange)
535 | continue;
536 |
537 | int rowIdn = laserCloudIn->points[i].ring;
538 | if (rowIdn < 0 || rowIdn >= N_SCAN)
539 | continue;
540 |
541 | if (rowIdn % downsampleRate != 0)
542 | continue;
543 |
544 | int columnIdn = -1;
545 | if (sensor == SensorType::VELODYNE || sensor == SensorType::OUSTER)
546 | {
547 | float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI;
548 | static float ang_res_x = 360.0/float(Horizon_SCAN);
549 | columnIdn = -round((horizonAngle-90.0)/ang_res_x) + Horizon_SCAN/2;
550 | if (columnIdn >= Horizon_SCAN)
551 | columnIdn -= Horizon_SCAN;
552 | }
553 | else if (sensor == SensorType::LIVOX)
554 | {
555 | columnIdn = columnIdnCountVec[rowIdn];
556 | columnIdnCountVec[rowIdn] += 1;
557 | }
558 |
559 | if (columnIdn < 0 || columnIdn >= Horizon_SCAN)
560 | continue;
561 |
562 | if (rangeMat.at(rowIdn, columnIdn) != FLT_MAX)
563 | continue;
564 |
565 | thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time);
566 |
567 | rangeMat.at(rowIdn, columnIdn) = range;
568 |
569 | int index = columnIdn + rowIdn * Horizon_SCAN;
570 | fullCloud->points[index] = thisPoint;
571 | }
572 | }
573 |
574 | void cloudExtraction()
575 | {
576 | int count = 0;
577 | // extract segmented cloud for lidar odometry
578 | for (int i = 0; i < N_SCAN; ++i)
579 | {
580 | cloudInfo.startRingIndex[i] = count - 1 + 5;
581 |
582 | for (int j = 0; j < Horizon_SCAN; ++j)
583 | {
584 | if (rangeMat.at(i,j) != FLT_MAX)
585 | {
586 | // mark the points' column index for marking occlusion later
587 | cloudInfo.pointColInd[count] = j;
588 | // save range info
589 | cloudInfo.pointRange[count] = rangeMat.at(i,j);
590 | // save extracted cloud
591 | extractedCloud->push_back(fullCloud->points[j + i*Horizon_SCAN]);
592 | // size of extracted cloud
593 | ++count;
594 | }
595 | }
596 | cloudInfo.endRingIndex[i] = count -1 - 5;
597 | }
598 | }
599 |
600 | void publishClouds()
601 | {
602 | cloudInfo.header = cloudHeader;
603 | cloudInfo.cloud_deskewed = publishCloud(pubExtractedCloud, extractedCloud, cloudHeader.stamp, lidarFrame);
604 | pubLaserCloudInfo.publish(cloudInfo);
605 | }
606 | };
607 |
608 | int main(int argc, char** argv)
609 | {
610 | ros::init(argc, argv, "lio_sam");
611 |
612 | ImageProjection IP;
613 |
614 | ROS_INFO("\033[1;32m----> Image Projection Started.\033[0m");
615 |
616 | ros::MultiThreadedSpinner spinner(3);
617 | spinner.spin();
618 |
619 | return 0;
620 | }
621 |
--------------------------------------------------------------------------------
/src/imuPreintegration.cpp:
--------------------------------------------------------------------------------
1 | #include "utility.h"
2 |
3 | #include
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 |
16 | #include
17 | #include
18 |
19 | using gtsam::symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
20 | using gtsam::symbol_shorthand::V; // Vel (xdot,ydot,zdot)
21 | using gtsam::symbol_shorthand::B; // Bias (ax,ay,az,gx,gy,gz)
22 |
23 | class TransformFusion : public ParamServer
24 | {
25 | public:
26 | std::mutex mtx;
27 |
28 | ros::Subscriber subImuOdometry;
29 | ros::Subscriber subLaserOdometry;
30 |
31 | ros::Publisher pubImuOdometry;
32 | ros::Publisher pubImuPath;
33 |
34 | Eigen::Affine3f lidarOdomAffine;
35 | Eigen::Affine3f imuOdomAffineFront;
36 | Eigen::Affine3f imuOdomAffineBack;
37 |
38 | tf::TransformListener tfListener;
39 | tf::StampedTransform lidar2Baselink;
40 |
41 | double lidarOdomTime = -1;
42 | deque imuOdomQueue;
43 |
44 | TransformFusion()
45 | {
46 | if(lidarFrame != baselinkFrame)
47 | {
48 | try
49 | {
50 | tfListener.waitForTransform(lidarFrame, baselinkFrame, ros::Time(0), ros::Duration(3.0));
51 | tfListener.lookupTransform(lidarFrame, baselinkFrame, ros::Time(0), lidar2Baselink);
52 | }
53 | catch (tf::TransformException ex)
54 | {
55 | ROS_ERROR("%s",ex.what());
56 | }
57 | }
58 |
59 | subLaserOdometry = nh.subscribe("lio_sam/mapping/odometry", 5, &TransformFusion::lidarOdometryHandler, this, ros::TransportHints().tcpNoDelay());
60 | subImuOdometry = nh.subscribe(odomTopic+"_incremental", 2000, &TransformFusion::imuOdometryHandler, this, ros::TransportHints().tcpNoDelay());
61 |
62 | pubImuOdometry = nh.advertise(odomTopic, 2000);
63 | pubImuPath = nh.advertise ("lio_sam/imu/path", 1);
64 | }
65 |
66 | Eigen::Affine3f odom2affine(nav_msgs::Odometry odom)
67 | {
68 | double x, y, z, roll, pitch, yaw;
69 | x = odom.pose.pose.position.x;
70 | y = odom.pose.pose.position.y;
71 | z = odom.pose.pose.position.z;
72 | tf::Quaternion orientation;
73 | tf::quaternionMsgToTF(odom.pose.pose.orientation, orientation);
74 | tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
75 | return pcl::getTransformation(x, y, z, roll, pitch, yaw);
76 | }
77 |
78 | void lidarOdometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg)
79 | {
80 | std::lock_guard lock(mtx);
81 |
82 | lidarOdomAffine = odom2affine(*odomMsg);
83 |
84 | lidarOdomTime = odomMsg->header.stamp.toSec();
85 | }
86 |
87 | void imuOdometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg)
88 | {
89 | // static tf
90 | static tf::TransformBroadcaster tfMap2Odom;
91 | static tf::Transform map_to_odom = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Vector3(0, 0, 0));
92 | tfMap2Odom.sendTransform(tf::StampedTransform(map_to_odom, odomMsg->header.stamp, mapFrame, odometryFrame));
93 |
94 | std::lock_guard lock(mtx);
95 |
96 | imuOdomQueue.push_back(*odomMsg);
97 |
98 | // get latest odometry (at current IMU stamp)
99 | if (lidarOdomTime == -1)
100 | return;
101 | while (!imuOdomQueue.empty())
102 | {
103 | if (imuOdomQueue.front().header.stamp.toSec() <= lidarOdomTime)
104 | imuOdomQueue.pop_front();
105 | else
106 | break;
107 | }
108 | Eigen::Affine3f imuOdomAffineFront = odom2affine(imuOdomQueue.front());
109 | Eigen::Affine3f imuOdomAffineBack = odom2affine(imuOdomQueue.back());
110 | Eigen::Affine3f imuOdomAffineIncre = imuOdomAffineFront.inverse() * imuOdomAffineBack;
111 | Eigen::Affine3f imuOdomAffineLast = lidarOdomAffine * imuOdomAffineIncre;
112 | float x, y, z, roll, pitch, yaw;
113 | pcl::getTranslationAndEulerAngles(imuOdomAffineLast, x, y, z, roll, pitch, yaw);
114 |
115 | // publish latest odometry
116 | nav_msgs::Odometry laserOdometry = imuOdomQueue.back();
117 | laserOdometry.pose.pose.position.x = x;
118 | laserOdometry.pose.pose.position.y = y;
119 | laserOdometry.pose.pose.position.z = z;
120 | laserOdometry.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw);
121 | pubImuOdometry.publish(laserOdometry);
122 |
123 | // publish tf
124 | static tf::TransformBroadcaster tfOdom2BaseLink;
125 | tf::Transform tCur;
126 | tf::poseMsgToTF(laserOdometry.pose.pose, tCur);
127 | if(lidarFrame != baselinkFrame)
128 | tCur = tCur * lidar2Baselink;
129 | tf::StampedTransform odom_2_baselink = tf::StampedTransform(tCur, odomMsg->header.stamp, odometryFrame, baselinkFrame);
130 | tfOdom2BaseLink.sendTransform(odom_2_baselink);
131 |
132 | // publish IMU path
133 | static nav_msgs::Path imuPath;
134 | static double last_path_time = -1;
135 | double imuTime = imuOdomQueue.back().header.stamp.toSec();
136 | if (imuTime - last_path_time > 0.1)
137 | {
138 | last_path_time = imuTime;
139 | geometry_msgs::PoseStamped pose_stamped;
140 | pose_stamped.header.stamp = imuOdomQueue.back().header.stamp;
141 | pose_stamped.header.frame_id = odometryFrame;
142 | pose_stamped.pose = laserOdometry.pose.pose;
143 | imuPath.poses.push_back(pose_stamped);
144 | while(!imuPath.poses.empty() && imuPath.poses.front().header.stamp.toSec() < lidarOdomTime - 1.0)
145 | imuPath.poses.erase(imuPath.poses.begin());
146 | if (pubImuPath.getNumSubscribers() != 0)
147 | {
148 | imuPath.header.stamp = imuOdomQueue.back().header.stamp;
149 | imuPath.header.frame_id = odometryFrame;
150 | pubImuPath.publish(imuPath);
151 | }
152 | }
153 | }
154 | };
155 |
156 | class IMUPreintegration : public ParamServer
157 | {
158 | public:
159 |
160 | std::mutex mtx;
161 |
162 | ros::Subscriber subImu;
163 | ros::Subscriber subOdometry;
164 | ros::Publisher pubImuOdometry;
165 |
166 | bool systemInitialized = false;
167 |
168 | gtsam::noiseModel::Diagonal::shared_ptr priorPoseNoise;
169 | gtsam::noiseModel::Diagonal::shared_ptr priorVelNoise;
170 | gtsam::noiseModel::Diagonal::shared_ptr priorBiasNoise;
171 | gtsam::noiseModel::Diagonal::shared_ptr correctionNoise;
172 | gtsam::noiseModel::Diagonal::shared_ptr correctionNoise2;
173 | gtsam::Vector noiseModelBetweenBias;
174 |
175 |
176 | gtsam::PreintegratedImuMeasurements *imuIntegratorOpt_;
177 | gtsam::PreintegratedImuMeasurements *imuIntegratorImu_;
178 |
179 | std::deque imuQueOpt;
180 | std::deque imuQueImu;
181 |
182 | gtsam::Pose3 prevPose_;
183 | gtsam::Vector3 prevVel_;
184 | gtsam::NavState prevState_;
185 | gtsam::imuBias::ConstantBias prevBias_;
186 |
187 | gtsam::NavState prevStateOdom;
188 | gtsam::imuBias::ConstantBias prevBiasOdom;
189 |
190 | bool doneFirstOpt = false;
191 | double lastImuT_imu = -1;
192 | double lastImuT_opt = -1;
193 |
194 | gtsam::ISAM2 optimizer;
195 | gtsam::NonlinearFactorGraph graphFactors;
196 | gtsam::Values graphValues;
197 |
198 | const double delta_t = 0;
199 |
200 | int key = 1;
201 |
202 | // T_bl: tramsform points from lidar frame to imu frame
203 | gtsam::Pose3 imu2Lidar = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(-extTrans.x(), -extTrans.y(), -extTrans.z()));
204 | // T_lb: tramsform points from imu frame to lidar frame
205 | gtsam::Pose3 lidar2Imu = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(extTrans.x(), extTrans.y(), extTrans.z()));
206 |
207 | IMUPreintegration()
208 | {
209 | subImu = nh.subscribe (imuTopic, 2000, &IMUPreintegration::imuHandler, this, ros::TransportHints().tcpNoDelay());
210 | subOdometry = nh.subscribe("lio_sam/mapping/odometry_incremental", 5, &IMUPreintegration::odometryHandler, this, ros::TransportHints().tcpNoDelay());
211 |
212 | pubImuOdometry = nh.advertise (odomTopic+"_incremental", 2000);
213 |
214 | boost::shared_ptr p = gtsam::PreintegrationParams::MakeSharedU(imuGravity);
215 | p->accelerometerCovariance = gtsam::Matrix33::Identity(3,3) * pow(imuAccNoise, 2); // acc white noise in continuous
216 | p->gyroscopeCovariance = gtsam::Matrix33::Identity(3,3) * pow(imuGyrNoise, 2); // gyro white noise in continuous
217 | p->integrationCovariance = gtsam::Matrix33::Identity(3,3) * pow(1e-4, 2); // error committed in integrating position from velocities
218 | gtsam::imuBias::ConstantBias prior_imu_bias((gtsam::Vector(6) << 0, 0, 0, 0, 0, 0).finished());; // assume zero initial bias
219 |
220 | priorPoseNoise = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1e-2, 1e-2, 1e-2, 1e-2, 1e-2, 1e-2).finished()); // rad,rad,rad,m, m, m
221 | priorVelNoise = gtsam::noiseModel::Isotropic::Sigma(3, 1e4); // m/s
222 | priorBiasNoise = gtsam::noiseModel::Isotropic::Sigma(6, 1e-3); // 1e-2 ~ 1e-3 seems to be good
223 | correctionNoise = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 0.05, 0.05, 0.05, 0.1, 0.1, 0.1).finished()); // rad,rad,rad,m, m, m
224 | correctionNoise2 = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1, 1, 1, 1, 1, 1).finished()); // rad,rad,rad,m, m, m
225 | noiseModelBetweenBias = (gtsam::Vector(6) << imuAccBiasN, imuAccBiasN, imuAccBiasN, imuGyrBiasN, imuGyrBiasN, imuGyrBiasN).finished();
226 |
227 | imuIntegratorImu_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for IMU message thread
228 | imuIntegratorOpt_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for optimization
229 | }
230 |
231 | void resetOptimization()
232 | {
233 | gtsam::ISAM2Params optParameters;
234 | optParameters.relinearizeThreshold = 0.1;
235 | optParameters.relinearizeSkip = 1;
236 | optimizer = gtsam::ISAM2(optParameters);
237 |
238 | gtsam::NonlinearFactorGraph newGraphFactors;
239 | graphFactors = newGraphFactors;
240 |
241 | gtsam::Values NewGraphValues;
242 | graphValues = NewGraphValues;
243 | }
244 |
245 | void resetParams()
246 | {
247 | lastImuT_imu = -1;
248 | doneFirstOpt = false;
249 | systemInitialized = false;
250 | }
251 |
252 | void odometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg)
253 | {
254 | std::lock_guard lock(mtx);
255 |
256 | double currentCorrectionTime = ROS_TIME(odomMsg);
257 |
258 | // make sure we have imu data to integrate
259 | if (imuQueOpt.empty())
260 | return;
261 |
262 | float p_x = odomMsg->pose.pose.position.x;
263 | float p_y = odomMsg->pose.pose.position.y;
264 | float p_z = odomMsg->pose.pose.position.z;
265 | float r_x = odomMsg->pose.pose.orientation.x;
266 | float r_y = odomMsg->pose.pose.orientation.y;
267 | float r_z = odomMsg->pose.pose.orientation.z;
268 | float r_w = odomMsg->pose.pose.orientation.w;
269 | bool degenerate = (int)odomMsg->pose.covariance[0] == 1 ? true : false;
270 | gtsam::Pose3 lidarPose = gtsam::Pose3(gtsam::Rot3::Quaternion(r_w, r_x, r_y, r_z), gtsam::Point3(p_x, p_y, p_z));
271 |
272 |
273 | // 0. initialize system
274 | if (systemInitialized == false)
275 | {
276 | resetOptimization();
277 |
278 | // pop old IMU message
279 | while (!imuQueOpt.empty())
280 | {
281 | if (ROS_TIME(&imuQueOpt.front()) < currentCorrectionTime - delta_t)
282 | {
283 | lastImuT_opt = ROS_TIME(&imuQueOpt.front());
284 | imuQueOpt.pop_front();
285 | }
286 | else
287 | break;
288 | }
289 | // initial pose
290 | prevPose_ = lidarPose.compose(lidar2Imu);
291 | gtsam::PriorFactor priorPose(X(0), prevPose_, priorPoseNoise);
292 | graphFactors.add(priorPose);
293 | // initial velocity
294 | prevVel_ = gtsam::Vector3(0, 0, 0);
295 | gtsam::PriorFactor priorVel(V(0), prevVel_, priorVelNoise);
296 | graphFactors.add(priorVel);
297 | // initial bias
298 | prevBias_ = gtsam::imuBias::ConstantBias();
299 | gtsam::PriorFactor priorBias(B(0), prevBias_, priorBiasNoise);
300 | graphFactors.add(priorBias);
301 | // add values
302 | graphValues.insert(X(0), prevPose_);
303 | graphValues.insert(V(0), prevVel_);
304 | graphValues.insert(B(0), prevBias_);
305 | // optimize once
306 | optimizer.update(graphFactors, graphValues);
307 | graphFactors.resize(0);
308 | graphValues.clear();
309 |
310 | imuIntegratorImu_->resetIntegrationAndSetBias(prevBias_);
311 | imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_);
312 |
313 | key = 1;
314 | systemInitialized = true;
315 | return;
316 | }
317 |
318 |
319 | // reset graph for speed
320 | if (key == 100)
321 | {
322 | // get updated noise before reset
323 | gtsam::noiseModel::Gaussian::shared_ptr updatedPoseNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(X(key-1)));
324 | gtsam::noiseModel::Gaussian::shared_ptr updatedVelNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(V(key-1)));
325 | gtsam::noiseModel::Gaussian::shared_ptr updatedBiasNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(B(key-1)));
326 | // reset graph
327 | resetOptimization();
328 | // add pose
329 | gtsam::PriorFactor priorPose(X(0), prevPose_, updatedPoseNoise);
330 | graphFactors.add(priorPose);
331 | // add velocity
332 | gtsam::PriorFactor priorVel(V(0), prevVel_, updatedVelNoise);
333 | graphFactors.add(priorVel);
334 | // add bias
335 | gtsam::PriorFactor priorBias(B(0), prevBias_, updatedBiasNoise);
336 | graphFactors.add(priorBias);
337 | // add values
338 | graphValues.insert(X(0), prevPose_);
339 | graphValues.insert(V(0), prevVel_);
340 | graphValues.insert(B(0), prevBias_);
341 | // optimize once
342 | optimizer.update(graphFactors, graphValues);
343 | graphFactors.resize(0);
344 | graphValues.clear();
345 |
346 | key = 1;
347 | }
348 |
349 |
350 | // 1. integrate imu data and optimize
351 | while (!imuQueOpt.empty())
352 | {
353 | // pop and integrate imu data that is between two optimizations
354 | sensor_msgs::Imu *thisImu = &imuQueOpt.front();
355 | double imuTime = ROS_TIME(thisImu);
356 | if (imuTime < currentCorrectionTime - delta_t)
357 | {
358 | double dt = (lastImuT_opt < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_opt);
359 | imuIntegratorOpt_->integrateMeasurement(
360 | gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z),
361 | gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y, thisImu->angular_velocity.z), dt);
362 |
363 | lastImuT_opt = imuTime;
364 | imuQueOpt.pop_front();
365 | }
366 | else
367 | break;
368 | }
369 | // add imu factor to graph
370 | const gtsam::PreintegratedImuMeasurements& preint_imu = dynamic_cast(*imuIntegratorOpt_);
371 | gtsam::ImuFactor imu_factor(X(key - 1), V(key - 1), X(key), V(key), B(key - 1), preint_imu);
372 | graphFactors.add(imu_factor);
373 | // add imu bias between factor
374 | graphFactors.add(gtsam::BetweenFactor(B(key - 1), B(key), gtsam::imuBias::ConstantBias(),
375 | gtsam::noiseModel::Diagonal::Sigmas(sqrt(imuIntegratorOpt_->deltaTij()) * noiseModelBetweenBias)));
376 | // add pose factor
377 | gtsam::Pose3 curPose = lidarPose.compose(lidar2Imu);
378 | gtsam::PriorFactor pose_factor(X(key), curPose, degenerate ? correctionNoise2 : correctionNoise);
379 | graphFactors.add(pose_factor);
380 | // insert predicted values
381 | gtsam::NavState propState_ = imuIntegratorOpt_->predict(prevState_, prevBias_);
382 | graphValues.insert(X(key), propState_.pose());
383 | graphValues.insert(V(key), propState_.v());
384 | graphValues.insert(B(key), prevBias_);
385 | // optimize
386 | optimizer.update(graphFactors, graphValues);
387 | optimizer.update();
388 | graphFactors.resize(0);
389 | graphValues.clear();
390 | // Overwrite the beginning of the preintegration for the next step.
391 | gtsam::Values result = optimizer.calculateEstimate();
392 | prevPose_ = result.at(X(key));
393 | prevVel_ = result.at(V(key));
394 | prevState_ = gtsam::NavState(prevPose_, prevVel_);
395 | prevBias_ = result.at(B(key));
396 | // Reset the optimization preintegration object.
397 | imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_);
398 | // check optimization
399 | if (failureDetection(prevVel_, prevBias_))
400 | {
401 | resetParams();
402 | return;
403 | }
404 |
405 |
406 | // 2. after optiization, re-propagate imu odometry preintegration
407 | prevStateOdom = prevState_;
408 | prevBiasOdom = prevBias_;
409 | // first pop imu message older than current correction data
410 | double lastImuQT = -1;
411 | while (!imuQueImu.empty() && ROS_TIME(&imuQueImu.front()) < currentCorrectionTime - delta_t)
412 | {
413 | lastImuQT = ROS_TIME(&imuQueImu.front());
414 | imuQueImu.pop_front();
415 | }
416 | // repropogate
417 | if (!imuQueImu.empty())
418 | {
419 | // reset bias use the newly optimized bias
420 | imuIntegratorImu_->resetIntegrationAndSetBias(prevBiasOdom);
421 | // integrate imu message from the beginning of this optimization
422 | for (int i = 0; i < (int)imuQueImu.size(); ++i)
423 | {
424 | sensor_msgs::Imu *thisImu = &imuQueImu[i];
425 | double imuTime = ROS_TIME(thisImu);
426 | double dt = (lastImuQT < 0) ? (1.0 / 500.0) :(imuTime - lastImuQT);
427 |
428 | imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z),
429 | gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y, thisImu->angular_velocity.z), dt);
430 | lastImuQT = imuTime;
431 | }
432 | }
433 |
434 | ++key;
435 | doneFirstOpt = true;
436 | }
437 |
438 | bool failureDetection(const gtsam::Vector3& velCur, const gtsam::imuBias::ConstantBias& biasCur)
439 | {
440 | Eigen::Vector3f vel(velCur.x(), velCur.y(), velCur.z());
441 | if (vel.norm() > 30)
442 | {
443 | ROS_WARN("Large velocity, reset IMU-preintegration!");
444 | return true;
445 | }
446 |
447 | Eigen::Vector3f ba(biasCur.accelerometer().x(), biasCur.accelerometer().y(), biasCur.accelerometer().z());
448 | Eigen::Vector3f bg(biasCur.gyroscope().x(), biasCur.gyroscope().y(), biasCur.gyroscope().z());
449 | if (ba.norm() > 1.0 || bg.norm() > 1.0)
450 | {
451 | ROS_WARN("Large bias, reset IMU-preintegration!");
452 | return true;
453 | }
454 |
455 | return false;
456 | }
457 |
458 | void imuHandler(const sensor_msgs::Imu::ConstPtr& imu_raw)
459 | {
460 | std::lock_guard lock(mtx);
461 |
462 | sensor_msgs::Imu thisImu = imuConverter(*imu_raw);
463 |
464 | imuQueOpt.push_back(thisImu);
465 | imuQueImu.push_back(thisImu);
466 |
467 | if (doneFirstOpt == false)
468 | return;
469 |
470 | double imuTime = ROS_TIME(&thisImu);
471 | double dt = (lastImuT_imu < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_imu);
472 | lastImuT_imu = imuTime;
473 |
474 | // integrate this single imu message
475 | imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu.linear_acceleration.x, thisImu.linear_acceleration.y, thisImu.linear_acceleration.z),
476 | gtsam::Vector3(thisImu.angular_velocity.x, thisImu.angular_velocity.y, thisImu.angular_velocity.z), dt);
477 |
478 | // predict odometry
479 | gtsam::NavState currentState = imuIntegratorImu_->predict(prevStateOdom, prevBiasOdom);
480 |
481 | // publish odometry
482 | nav_msgs::Odometry odometry;
483 | odometry.header.stamp = thisImu.header.stamp;
484 | odometry.header.frame_id = odometryFrame;
485 | odometry.child_frame_id = "odom_imu";
486 |
487 | // transform imu pose to ldiar
488 | gtsam::Pose3 imuPose = gtsam::Pose3(currentState.quaternion(), currentState.position());
489 | gtsam::Pose3 lidarPose = imuPose.compose(imu2Lidar);
490 |
491 | odometry.pose.pose.position.x = lidarPose.translation().x();
492 | odometry.pose.pose.position.y = lidarPose.translation().y();
493 | odometry.pose.pose.position.z = lidarPose.translation().z();
494 | odometry.pose.pose.orientation.x = lidarPose.rotation().toQuaternion().x();
495 | odometry.pose.pose.orientation.y = lidarPose.rotation().toQuaternion().y();
496 | odometry.pose.pose.orientation.z = lidarPose.rotation().toQuaternion().z();
497 | odometry.pose.pose.orientation.w = lidarPose.rotation().toQuaternion().w();
498 |
499 | odometry.twist.twist.linear.x = currentState.velocity().x();
500 | odometry.twist.twist.linear.y = currentState.velocity().y();
501 | odometry.twist.twist.linear.z = currentState.velocity().z();
502 | odometry.twist.twist.angular.x = thisImu.angular_velocity.x + prevBiasOdom.gyroscope().x();
503 | odometry.twist.twist.angular.y = thisImu.angular_velocity.y + prevBiasOdom.gyroscope().y();
504 | odometry.twist.twist.angular.z = thisImu.angular_velocity.z + prevBiasOdom.gyroscope().z();
505 | pubImuOdometry.publish(odometry);
506 | }
507 | };
508 |
509 |
510 | int main(int argc, char** argv)
511 | {
512 | ros::init(argc, argv, "roboat_loam");
513 |
514 | IMUPreintegration ImuP;
515 |
516 | TransformFusion TF;
517 |
518 | ROS_INFO("\033[1;32m----> IMU Preintegration Started.\033[0m");
519 |
520 | ros::MultiThreadedSpinner spinner(4);
521 | spinner.spin();
522 |
523 | return 0;
524 | }
525 |
--------------------------------------------------------------------------------
/srv/save_map.srv:
--------------------------------------------------------------------------------
1 | float32 resolution
2 | string destination
3 | ---
4 | bool success
5 |
--------------------------------------------------------------------------------