├── .gitignore ├── LICENSE ├── README.md ├── r4d_1_single_marker ├── CMakeLists.txt ├── include │ └── r4d_1_single_marker │ │ └── SingleMarkerLocalizer.h ├── launch │ └── eval.launch ├── package.xml ├── scripts │ └── plot.py └── src │ └── single_marker_node.cpp ├── r4d_2_multimarker ├── CMakeLists.txt ├── include │ └── r4d_2_multimarker │ │ └── MultimarkerLocalizer.h ├── launch │ └── eval.launch ├── package.xml ├── scripts │ ├── plot.py │ └── plot_solve_time.py └── src │ └── multimarker_node.cpp ├── r4d_3_imu ├── CMakeLists.txt ├── include │ └── r4d_3_imu │ │ └── ImuLocalizer.h ├── launch │ └── eval.launch ├── package.xml ├── scripts │ └── plot.py └── src │ └── imu_node.cpp ├── r4d_common ├── CMakeLists.txt ├── include │ └── r4d_common │ │ ├── BagRunner.h │ │ ├── MarkerFactor.h │ │ ├── gtsam_opencv_interop.h │ │ ├── ros_gtsam_interop.h │ │ └── ros_opencv_interop.h ├── package.xml ├── setup.py └── src │ └── r4d_common │ ├── __init__.py │ └── plot_tools.py └── rcars_detector_msgs ├── CMakeLists.txt ├── README.md ├── msg ├── Tag.msg ├── TagArray.msg └── TagPoses.msg └── package.xml /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Nicolo Valigi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Robotics for developers - tutorial code 2 | ======================================= 3 | 4 | This is the source code for my [tutorial on Robotics for developers](https://nicolovaligi.com/pages/robotics-for-developers-tutorial.html). It implements a SLAM (Simultaneous Localization And Mapping) system using fiducial markers and IMU data. The code is mostly C++ with ROS and OpenCV, plus some Python (using *pandas* and *plotly*) for analyzing and plotting the results. 5 | 6 | Packages 7 | -------- 8 | 9 | Each step of the tutorial roughly corresponds to one *catkin* package. You'll also need the `gtsam_catkin` package for non-linear optimization. 10 | 11 | More information 12 | ---------------- 13 | 14 | Refer to the blog linked above for more information on the project. 15 | -------------------------------------------------------------------------------- /r4d_1_single_marker/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(r4d_1_single_marker) 3 | 4 | add_definitions(-std=c++11) 5 | 6 | find_package(catkin REQUIRED COMPONENTS 7 | r4d_common 8 | ) 9 | 10 | catkin_package( 11 | # INCLUDE_DIRS include 12 | # LIBRARIES r4d_1_single_marker 13 | # CATKIN_DEPENDS geometry_msgs nav_msgs r4d_common rcars_detector_msgs std_msgs tf 14 | # DEPENDS system_lib 15 | ) 16 | 17 | include_directories( 18 | include 19 | ${catkin_INCLUDE_DIRS} 20 | ) 21 | 22 | add_executable(single_marker_node src/single_marker_node.cpp) 23 | 24 | target_link_libraries(single_marker_node 25 | ${catkin_LIBRARIES} 26 | ) 27 | -------------------------------------------------------------------------------- /r4d_1_single_marker/include/r4d_1_single_marker/SingleMarkerLocalizer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @file SingleMarkerLocalizer.h 3 | * @brief Camera localization using a single marker 4 | */ 5 | 6 | #pragma once 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | 21 | #include 22 | #include 23 | #include 24 | #include 25 | 26 | namespace fiducial_slam { 27 | 28 | /// Robot localization using a single marker 29 | struct SingleMarkerLocalizer { 30 | SingleMarkerLocalizer(ros::NodeHandle &nh, ros::NodeHandle& nh_p) : 31 | nh_(nh), 32 | nh_p_(nh_p), 33 | tag_size_(0.16), 34 | pub_cv_(nh_.advertise("/p_cv", 10)), 35 | pub_gtsam_(nh_.advertise("/p_gtsam", 10)) { 36 | } 37 | 38 | void image_info_cb(const sensor_msgs::CameraInfoConstPtr &m) { 39 | if (!K_) { 40 | K_ = fiducial_slam::caminfo_to_gtsam_calib(m); 41 | K_->print("Calibration from camera_info msg:\n"); 42 | } 43 | } 44 | 45 | void handle_external_pose(const geometry_msgs::Pose& ext_pose, const std_msgs::Header& header, std::string label) { 46 | auto pVC = gmsgs_pose_to_gtsam(ext_pose).inverse(); 47 | 48 | // This is the first pair of measurements: determine the calibration 49 | // between ground truth and marker 50 | if (!ground_truth_calib_ && first_cam_pose_) { 51 | // MV = MC * VC^-1 52 | ground_truth_calib_ = (*first_cam_pose_) * pVC.inverse(); 53 | } 54 | 55 | if (ground_truth_calib_) { 56 | // MC = MV * VC 57 | auto pMC = (*ground_truth_calib_) * pVC; 58 | 59 | tf::Transform tf_transf; 60 | tf::poseMsgToTF(gtsam_pose_to_gmsgs(pMC), tf_transf); 61 | tf_br_.sendTransform( 62 | tf::StampedTransform(tf_transf, header.stamp, "world", label)); 63 | } 64 | } 65 | 66 | void filter_output_cb(const nav_msgs::OdometryConstPtr &m) { 67 | handle_external_pose(m->pose.pose, m->header, "ekf"); 68 | } 69 | 70 | void ground_truth_cb(const geometry_msgs::TransformStampedConstPtr &m) { 71 | geometry_msgs::Pose pose; 72 | Eigen::Vector3d t; 73 | tf::vectorMsgToEigen(m->transform.translation, t); 74 | tf::pointEigenToMsg(t, pose.position); 75 | pose.orientation = m->transform.rotation; 76 | 77 | //handle_external_pose(pose, m->header, "vicon"); 78 | } 79 | 80 | geometry_msgs::Pose pose_using_cv(const rcars_detector_msgs::Tag &tag) { 81 | auto cv_calib = fiducial_slam::gtsam_calib_to_cv(K_); 82 | auto cv_corners = fiducial_slam::tag_to_cv_points(tag, tag_size_); 83 | return fiducial_slam::cv_marker_pose(cv_corners, cv_calib); 84 | } 85 | 86 | geometry_msgs::Pose pose_using_gtsam(const rcars_detector_msgs::Tag &tag) { 87 | gtsam::NonlinearFactorGraph graph; 88 | gtsam::Values estimate; 89 | 90 | auto pixel_noise = gtsam::noiseModel::Isotropic::shared_ptr( 91 | gtsam::noiseModel::Isotropic::Sigma(8, 1)); 92 | auto origin_noise = gtsam::noiseModel::Isotropic::shared_ptr( 93 | gtsam::noiseModel::Isotropic::Sigma(6, 1e-3)); 94 | 95 | auto s_ic = gtsam::Symbol('C', 0); 96 | auto s_marker = gtsam::Symbol('M', tag.id); 97 | 98 | estimate.insert(s_marker, gtsam::Pose3()); 99 | auto guess = fiducial_slam::gmsgs_pose_to_gtsam(tag.pose).inverse(); 100 | // Initialize off the "right" value 101 | auto guess_distorsion = gtsam::Pose3( 102 | gtsam::Rot3::Ypr(0.2, -0.2, 0.1), 103 | gtsam::Point3(0.1, -0.05, 0)); 104 | estimate.insert(s_ic, guess * guess_distorsion); 105 | 106 | auto corners = fiducial_slam::tag_to_gtsam_points(tag); 107 | graph.push_back(fiducial_slam::MarkerFactor(corners, pixel_noise, s_ic, s_marker, K_, tag_size_)); 108 | graph.push_back(gtsam::PriorFactor(s_marker, gtsam::Pose3(), origin_noise)); 109 | 110 | auto result = gtsam::GaussNewtonOptimizer(graph, estimate).optimize(); 111 | auto result_p_ic = result.at(s_ic); 112 | 113 | // NOTE: inverse here to account for the passive/active interpretation 114 | return fiducial_slam::gtsam_pose_to_gmsgs(result_p_ic.inverse()); 115 | } 116 | 117 | void tags_cb(const rcars_detector_msgs::TagArrayConstPtr msg) { 118 | // A couple of utility closures to publish data 119 | auto send_tf = [&](const geometry_msgs::Pose& p, std::string frame) { 120 | tf::Transform tf_transf; 121 | tf::poseMsgToTF(p, tf_transf); 122 | tf_br_.sendTransform( 123 | tf::StampedTransform(tf_transf, msg->header.stamp, "world", frame)); 124 | }; 125 | 126 | auto pose_to_odometry = [&](const geometry_msgs::Pose& p) { 127 | nav_msgs::Odometry odom; 128 | odom.header = msg->header; 129 | odom.pose.pose = p; 130 | return odom; 131 | }; 132 | 133 | if (msg->tags.size()) { 134 | if (!tracked_tag_id_) { 135 | // Let's just start tracking the first tag we've ever seen 136 | tracked_tag_id_ = msg->tags[0].id; 137 | ROS_WARN_STREAM("Starting to track marker with id " << static_cast(*tracked_tag_id_)); 138 | } 139 | 140 | // Look for the tag we have been tracking 141 | auto tracked_it = std::find_if(msg->tags.begin(), msg->tags.end(), [&](const rcars_detector_msgs::Tag &tag) { 142 | return tag.id == *tracked_tag_id_; 143 | }); 144 | 145 | if (tracked_it != msg->tags.end()) { 146 | auto &tracked_tag = *tracked_it; 147 | auto corners = fiducial_slam::tag_to_gtsam_points(tracked_tag); 148 | 149 | { 150 | std::cout << "Pose by rcars_detector\n" << tracked_tag.pose << "\n"; 151 | send_tf(tracked_tag.pose, "dtct"); 152 | } 153 | 154 | { 155 | auto cv_pose = pose_using_cv(tracked_tag); 156 | std::cout << "Pose by OpenCV\n" << cv_pose << "\n"; 157 | 158 | pub_cv_.publish(pose_to_odometry(cv_pose)); 159 | send_tf(cv_pose, "cv"); 160 | } 161 | 162 | { 163 | auto gtsam_pose = pose_using_gtsam(tracked_tag); 164 | std::cout << "Pose by GTSAM\n" << gtsam_pose << "\n"; 165 | 166 | pub_gtsam_.publish(pose_to_odometry(gtsam_pose)); 167 | send_tf(gtsam_pose, "gtsam"); 168 | 169 | // If needed, set the first camera pose that will used to 170 | // find the calibration w.r.t. the ground truth meas. 171 | if (!first_cam_pose_) { 172 | first_cam_pose_ = gmsgs_pose_to_gtsam(gtsam_pose); 173 | } 174 | } 175 | std::cout << "\n\n"; 176 | } else { 177 | ROS_WARN_THROTTLE(1, "Tracked marker not found"); 178 | } 179 | } 180 | } 181 | 182 | void imu_cb(const sensor_msgs::ImuConstPtr &m) { 183 | } 184 | 185 | boost::shared_ptr K_; 186 | double tag_size_; 187 | boost::optional tracked_tag_id_; // Id of the (single) tag we are tracking 188 | boost::optional first_cam_pose_; 189 | boost::optional ground_truth_calib_; 190 | 191 | ros::NodeHandle nh_, nh_p_; 192 | tf::TransformBroadcaster tf_br_; 193 | 194 | ros::Publisher pub_cv_, pub_gtsam_; 195 | }; 196 | } -------------------------------------------------------------------------------- /r4d_1_single_marker/launch/eval.launch: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 10 | 11 | 12 | 13 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /r4d_1_single_marker/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | r4d_1_single_marker 4 | 0.0.0 5 | The r4d_1_single_marker package 6 | 7 | nicolov 8 | 9 | BSD 10 | 11 | catkin 12 | r4d_common 13 | r4d_common 14 | 15 | 16 | -------------------------------------------------------------------------------- /r4d_1_single_marker/scripts/plot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from IPython import embed 4 | import plotly.plotly as py 5 | from r4d_common.plot_tools import get_data, simple_plot, odom_pos 6 | 7 | def position_comparison(): 8 | s = [ 9 | ['cv', '~/fiducial_slam/out_singlemarker_table.bag', '/p_cv', odom_pos('z')], 10 | ['gtsam', '~/fiducial_slam/out_singlemarker_table.bag', '/p_gtsam', odom_pos('z')], 11 | ] 12 | 13 | df = get_data(s).iloc[:100:2] 14 | 15 | fig = { 16 | 'data': [ 17 | { 18 | 'x': df.index - df.index[0], 19 | 'y': df[who], 20 | 'name': who, 'mode': 'markers', 21 | } for who in ('cv', 'gtsam') 22 | ], 23 | 'layout': { 24 | 'xaxis': {'title': 'Time [s]'}, 25 | 'yaxis': {'title': "Camera Z position [m]"} 26 | } 27 | } 28 | 29 | url = py.plot(fig, filename='r4d_1_camera_position') 30 | 31 | if __name__ == "__main__": 32 | position_comparison() -------------------------------------------------------------------------------- /r4d_1_single_marker/src/single_marker_node.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @file single_marker_test.cpp 3 | * @brief Bag loading code for the SingleMarkerLocalizer class 4 | */ 5 | 6 | #include 7 | 8 | #include 9 | #include 10 | 11 | int main(int argc, char *argv[]) { 12 | ros::init(argc, argv, "single_marker_test"); 13 | ros::NodeHandle nh; 14 | ros::NodeHandle nh_p("~"); 15 | 16 | fiducial_slam::BagRunner runner(nh, nh_p); 17 | runner.run(); 18 | } -------------------------------------------------------------------------------- /r4d_2_multimarker/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(r4d_2_multimarker) 3 | 4 | add_definitions(-std=c++11) 5 | 6 | find_package(catkin REQUIRED COMPONENTS 7 | r4d_common 8 | ) 9 | 10 | catkin_package( 11 | # INCLUDE_DIRS include 12 | # LIBRARIES r4d_2_tracking 13 | # CATKIN_DEPENDS r4d_common 14 | # DEPENDS system_lib 15 | ) 16 | 17 | include_directories( 18 | include 19 | ${catkin_INCLUDE_DIRS} 20 | ) 21 | 22 | add_executable(multimarker_node src/multimarker_node.cpp) 23 | 24 | target_link_libraries(multimarker_node 25 | ${catkin_LIBRARIES} 26 | ) 27 | -------------------------------------------------------------------------------- /r4d_2_multimarker/include/r4d_2_multimarker/MultimarkerLocalizer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @file MultimarkerLocalizer.h 3 | * @brief Camera localization using multiple markers 4 | */ 5 | 6 | #pragma once 7 | 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | 28 | #include 29 | 30 | #include 31 | #include 32 | #include 33 | #include 34 | 35 | namespace fiducial_slam { 36 | 37 | /// Robot localization using a single marker 38 | struct MultimarkerLocalizer { 39 | MultimarkerLocalizer(ros::NodeHandle &nh, ros::NodeHandle &nh_p) : 40 | nh_(nh), 41 | nh_p_(nh_p), 42 | pub_ic_(nh_.advertise("/p_ic", 10)), 43 | pub_map_(nh_.advertise("/markers_map", 10)), 44 | pub_extpose_(nh_.advertise("/ext_pose", 10)), 45 | pub_origin_marker_(nh_.advertise("/origin_marker", 10)), 46 | pub_solve_time_(nh_.advertise("/solve_time", 10)), 47 | 48 | initialized_(false), 49 | pixel_noise_(gtsam::noiseModel::Isotropic::Sigma(8, 0.5)), 50 | landmark_noise_(gtsam::noiseModel::Isotropic::Sigma(6, 1e-6)), 51 | marker_prior_noise_(gtsam::noiseModel::Isotropic::Sigma(6, 1e-4)), 52 | motion_noise_(), 53 | tag_size_(0.16), 54 | motion_sigma_t_(1.0), 55 | motion_sigma_r_(1.0), 56 | ic_counter_(1) { 57 | 58 | nh_p_.param("motion_sigma_t", motion_sigma_t_, motion_sigma_t_); 59 | nh_p_.param("motion_sigma_r", motion_sigma_r_, motion_sigma_r_); 60 | ROS_WARN_STREAM("Using sigmas " << motion_sigma_r_ << " " << motion_sigma_t_); 61 | motion_noise_ = gtsam::noiseModel::Diagonal::Sigmas( 62 | (gtsam::Vector(6) << 63 | motion_sigma_r_, 64 | motion_sigma_r_, 65 | motion_sigma_r_, 66 | motion_sigma_t_, 67 | motion_sigma_t_, 68 | motion_sigma_t_).finished()); 69 | } 70 | 71 | void image_info_cb(const sensor_msgs::CameraInfoConstPtr &m) { 72 | if (!K_) { 73 | K_ = fiducial_slam::caminfo_to_gtsam_calib(m); 74 | K_->print("Calibration from camera_info msg:\n"); 75 | } 76 | } 77 | 78 | void handle_external_pose(const geometry_msgs::Pose& ext_pose, const std_msgs::Header& header, std::string label) { 79 | auto pVC = gmsgs_pose_to_gtsam(ext_pose).inverse(); 80 | 81 | // This is the first pair of measurements: determine the calibration 82 | // between ground truth and marker 83 | if (!ground_truth_calib_ && first_cam_pose_) { 84 | // MV = MC * VC^-1 85 | ground_truth_calib_ = (*first_cam_pose_) * pVC.inverse(); 86 | } 87 | 88 | if (ground_truth_calib_) { 89 | // MC = MV * VC 90 | auto pMC = (*ground_truth_calib_) * pVC; 91 | 92 | tf::Transform tf_transf; 93 | tf::poseMsgToTF(gtsam_pose_to_gmsgs(pMC), tf_transf); 94 | tf_br_.sendTransform( 95 | tf::StampedTransform(tf_transf, header.stamp, "world", label)); 96 | nav_msgs::Odometry odom; 97 | odom.header = header; 98 | odom.pose.pose = gtsam_pose_to_gmsgs(pMC); 99 | pub_extpose_.publish(odom); 100 | } 101 | } 102 | 103 | void filter_output_cb(const nav_msgs::OdometryConstPtr &m) { 104 | handle_external_pose(m->pose.pose, m->header, "ekf"); 105 | } 106 | 107 | void ground_truth_cb(const geometry_msgs::TransformStampedConstPtr &m) { 108 | geometry_msgs::Pose pose; 109 | Eigen::Vector3d t; 110 | tf::vectorMsgToEigen(m->transform.translation, t); 111 | tf::pointEigenToMsg(t, pose.position); 112 | pose.orientation = m->transform.rotation; 113 | 114 | //handle_external_pose(pose, m->header, "vicon"); 115 | } 116 | 117 | void tags_cb(const rcars_detector_msgs::TagArrayConstPtr msg) { 118 | // Symbol for the current camera pose 119 | gtsam::Symbol s_ic('x', ic_counter_); 120 | 121 | if (!K_) { 122 | ROS_WARN_THROTTLE(1, "Uncalibrated camera!"); 123 | return; 124 | } 125 | 126 | if (!msg->tags.size()) { 127 | ROS_WARN_THROTTLE(1, "Skipping frame without markers."); 128 | return; 129 | } 130 | 131 | auto origin_it = std::find_if(msg->tags.begin(), msg->tags.end(), [&](const rcars_detector_msgs::Tag &tag) { 132 | return tag.id == 0; 133 | }); 134 | if (origin_it == msg->tags.end()) { 135 | // return; 136 | } 137 | 138 | if (!initialized_) { 139 | initialized_ = true; 140 | 141 | // TODO: allow setting the marker used as origin 142 | auto &origin_tag = msg->tags[0]; 143 | origin_id_ = origin_tag.id; 144 | 145 | ROS_INFO_STREAM("Using marker " << static_cast(*origin_id_) << " as origin."); 146 | 147 | // Add a prior on the "origin" marker to set the origin 148 | gtsam::Symbol symb_origin('M', origin_tag.id); 149 | gtsam::PriorFactor origin_prior(symb_origin, 150 | gtsam::Pose3(), 151 | landmark_noise_); 152 | graph_.push_back(origin_prior); 153 | estimate_.insert(symb_origin, gtsam::Pose3()); 154 | 155 | // Add the first (and only) estimate on the camera pose 156 | // based on the detector's estimate 157 | auto guess = gmsgs_pose_to_gtsam(origin_tag.pose).inverse(); 158 | estimate_.insert(s_ic, guess); 159 | } 160 | 161 | gtsam::Symbol s_prev_ic('x', ic_counter_ - 1); 162 | if (ic_counter_ > 0 && estimate_.exists(s_prev_ic)) { 163 | estimate_.insert(s_ic, estimate_.at(s_prev_ic)); 164 | 165 | // Add motion prior (zero velocity in this case) 166 | graph_.push_back(gtsam::BetweenFactor(s_prev_ic, s_ic, gtsam::Pose3(), motion_noise_)); 167 | } 168 | 169 | std::cout << ic_counter_ << " "; 170 | for (auto const &tag : msg->tags) { 171 | // TODO: change me! 172 | if (tag.id != 0) { 173 | // continue; 174 | } 175 | std::cout << static_cast(tag.id) << " | "; 176 | gtsam::Symbol s_marker('M', tag.id); 177 | auto corners = tag_to_gtsam_points(tag); 178 | 179 | if (!estimate_.exists(s_marker)) { 180 | // It's the first time we see this new marker: add an estimate 181 | // based on the detector pose 182 | auto init_pose = estimate_.at(s_ic) * gmsgs_pose_to_gtsam(tag.pose); 183 | // estimate_.insert(s_marker, init_pose); 184 | 185 | gtsam::NonlinearFactorGraph g; 186 | gtsam::Values v; 187 | auto current_ic = estimate_.at(s_ic); 188 | g.push_back(gtsam::PriorFactor(s_ic, current_ic, landmark_noise_)); 189 | v.insert(s_ic, current_ic); 190 | g.push_back(MarkerFactor(corners, pixel_noise_, s_ic, s_marker, K_, tag_size_)); 191 | v.insert(s_marker, init_pose); 192 | auto r = gtsam::GaussNewtonOptimizer(g, v).optimize(); 193 | 194 | auto bproj = r.at(s_marker); 195 | estimate_.insert(s_marker, bproj); 196 | 197 | // graph_.push_back( 198 | // gtsam::PriorFactor(s_marker, init_pose, marker_prior_noise_)); 199 | 200 | init_pose.print("From message was\n"); 201 | } 202 | 203 | graph_.push_back(MarkerFactor(corners, pixel_noise_, s_ic, s_marker, K_, tag_size_)); 204 | } 205 | std::cout << "\n"; 206 | 207 | // estimate_ = gtsam::GaussNewtonOptimizer(graph_, estimate_).optimize(); 208 | auto time_begin = ros::Time::now(); 209 | estimate_ = gtsam::LevenbergMarquardtOptimizer(graph_, estimate_).optimize(); 210 | auto time_end = ros::Time::now(); 211 | 212 | // Publish time needed for solution 213 | { 214 | std_msgs::Duration duration_msg; 215 | duration_msg.data = time_end - time_begin; 216 | pub_solve_time_.publish(duration_msg); 217 | } 218 | 219 | auto gtsam_ic = estimate_.at(s_ic); 220 | 221 | // A couple of utility closures to publish data 222 | auto send_tf = [&](const geometry_msgs::Pose& p, std::string frame) { 223 | tf::Transform tf_transf; 224 | tf::poseMsgToTF(p, tf_transf); 225 | tf_br_.sendTransform( 226 | tf::StampedTransform(tf_transf, msg->header.stamp, "world", frame)); 227 | }; 228 | 229 | auto pose_to_odometry = [&](const geometry_msgs::Pose& p) { 230 | nav_msgs::Odometry odom; 231 | odom.header = msg->header; 232 | odom.pose.pose = p; 233 | return odom; 234 | }; 235 | 236 | // Publish GTSAM TF and odometry 237 | { 238 | send_tf( 239 | gtsam_pose_to_gmsgs(gtsam_ic.inverse()), "gtsam"); 240 | 241 | pub_ic_.publish( 242 | pose_to_odometry( 243 | gtsam_pose_to_gmsgs(gtsam_ic.inverse()))); 244 | 245 | if (!first_cam_pose_) { 246 | first_cam_pose_ = gtsam_ic.inverse(); 247 | } 248 | } 249 | 250 | // While the origin marker is visible, also publish the pose estimate by the detector 251 | { 252 | auto origin_it = std::find_if(msg->tags.begin(), msg->tags.end(), [&](const rcars_detector_msgs::Tag &tag) { 253 | return tag.id == *origin_id_; 254 | }); 255 | 256 | if (origin_it != msg->tags.end()) { 257 | send_tf(origin_it->pose, "dtct"); 258 | 259 | pub_origin_marker_.publish( 260 | pose_to_odometry( 261 | origin_it->pose)); 262 | } 263 | } 264 | 265 | // Publish the "map", i.e. the absolute position of all the markers we have ever seen 266 | { 267 | rcars_detector_msgs::TagArray map; 268 | map.header = msg->header; 269 | 270 | auto tag_filter = [](const gtsam::Key& k) { 271 | return (k >= gtsam::Symbol('M', 0)) && (k < gtsam::Symbol('N', 0)); 272 | }; 273 | for (const auto& kv : estimate_.filter(tag_filter)) { 274 | rcars_detector_msgs::Tag elem; 275 | // The tag id was stored as the index in the node's gtsam::Symbol 276 | gtsam::Symbol s(kv.key); 277 | elem.id = s.index(); 278 | auto this_tag_pose = estimate_.at(s); 279 | elem.pose = gtsam_pose_to_gmsgs(this_tag_pose); 280 | map.tags.push_back(elem); 281 | } 282 | pub_map_.publish(map); 283 | } 284 | 285 | if (ic_counter_ == 1) { 286 | std::ofstream os("/Users/niko/graph_2.dot"); 287 | graph_.saveGraph(os); 288 | } 289 | 290 | ic_counter_ += 1; 291 | } 292 | 293 | void imu_cb(const sensor_msgs::ImuConstPtr &m) { 294 | 295 | } 296 | 297 | boost::shared_ptr K_; 298 | gtsam::NonlinearFactorGraph graph_; 299 | gtsam::Values estimate_; 300 | size_t ic_counter_; // Counter for the last camera pose id 301 | bool initialized_; 302 | boost::optional origin_id_; // id of the marker used as origin 303 | boost::optional first_cam_pose_; 304 | boost::optional ground_truth_calib_; 305 | 306 | gtsam::noiseModel::Isotropic::shared_ptr pixel_noise_; 307 | gtsam::noiseModel::Isotropic::shared_ptr landmark_noise_; 308 | gtsam::noiseModel::Isotropic::shared_ptr marker_prior_noise_; 309 | gtsam::noiseModel::Diagonal::shared_ptr motion_noise_; 310 | 311 | double tag_size_; 312 | double motion_sigma_t_, motion_sigma_r_; // Sigma for the motion noise model 313 | 314 | ros::NodeHandle nh_, nh_p_; 315 | tf::TransformBroadcaster tf_br_; 316 | ros::Publisher pub_ic_, pub_map_, pub_extpose_, pub_origin_marker_, pub_solve_time_; 317 | }; 318 | } -------------------------------------------------------------------------------- /r4d_2_multimarker/launch/eval.launch: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 10 | 11 | 12 | 13 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /r4d_2_multimarker/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | r4d_2_multimarker 4 | 0.0.0 5 | The r4d_2_multimarker package 6 | 7 | nicolov 8 | 9 | BSD 10 | 11 | catkin 12 | r4d_common 13 | r4d_common 14 | 15 | 16 | -------------------------------------------------------------------------------- /r4d_2_multimarker/scripts/plot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from IPython import embed 4 | import plotly.plotly as py 5 | import plotly.graph_objs as go 6 | from r4d_common.plot_tools import * 7 | 8 | def position_comparison(): 9 | s = [ 10 | ['single', '~/fiducial_slam/out_multimarker_table.bag', '/origin_marker', odom_pos('x')], 11 | ['multi', '~/fiducial_slam/out_multimarker_table.bag', '/p_ic', odom_pos('x')], 12 | ['has_m0', '~/fiducial_slam/markers_table.bag', '/rcars/detector/tags', has_marker(0)], 13 | ['has_m1', '~/fiducial_slam/markers_table.bag', '/rcars/detector/tags', has_marker(1)], 14 | ] 15 | 16 | df = get_data(s) 17 | start_time = df.index[0] 18 | df = df.iloc[140:250] 19 | 20 | data = [go.Bar(x=df.index - start_time, 21 | y=df.has_m0, 22 | name='Marker 0', 23 | yaxis='y2', 24 | opacity=0.3)] \ 25 | + [go.Bar(x=df.index - start_time, 26 | y=df.has_m1, 27 | name='Marker 1', 28 | yaxis='y2', 29 | opacity=0.3)] \ 30 | + [go.Scatter(x=df.index - start_time, 31 | y=df[who], 32 | mode='markers', 33 | name=who.title(), 34 | yaxis='y1') for who in ('single', 'multi')] 35 | 36 | layout = go.Layout( 37 | barmode='stack', 38 | title='', 39 | xaxis={'title': 'Time [s]'}, 40 | yaxis={'title': 'Camera X position [m]'}, 41 | yaxis2={'title': 'Marker visibility', 'overlaying': 'y', 'side': 'right', 'range': (0, 12), 'showticklabels': False}) 42 | 43 | 44 | fig = go.Figure(data=data, layout=layout) 45 | url = py.plot(fig, filename='r4d_2_single_vs_multi') 46 | 47 | 48 | def map_building(): 49 | s = [ 50 | ['has_m0', '~/fiducial_slam/markers_table.bag', '/rcars/detector/tags', has_marker(0)], 51 | ['has_m1', '~/fiducial_slam/markers_table.bag', '/rcars/detector/tags', has_marker(1)], 52 | ['m1', '~/fiducial_slam/out_multimarker_table.bag', '/markers_map', marker_pos(1, 'z')], 53 | ] 54 | 55 | df = get_data(s) 56 | 57 | embed() 58 | 59 | start_time = df.index[0] 60 | df = df.iloc[140:440] 61 | 62 | data = [ 63 | go.Scatter(x=df.index - start_time, 64 | y=df.m1, 65 | name='Position'), 66 | go.Bar(x=df.index - start_time, 67 | y=df.has_m0, 68 | name='Marker 0', 69 | yaxis='y2', 70 | opacity=0.3), 71 | go.Bar(x=df.index - start_time, 72 | y=df.has_m1, 73 | name='Marker 1', 74 | yaxis='y2', 75 | opacity=0.3) 76 | ] 77 | 78 | layout = go.Layout( 79 | barmode='stack', 80 | xaxis={'title': 'Time [s]'}, 81 | yaxis={'title': 'Relative marker Z position [m]'}, 82 | yaxis2={'overlaying': 'y', 'side': 'right', 'range': (0, 6), 'showticklabels': False}) 83 | 84 | fig = go.Figure(data=data, layout=layout) 85 | url = py.plot(fig, filename='r4d_2_map') 86 | 87 | 88 | if __name__ == "__main__": 89 | # position_comparison() 90 | map_building() -------------------------------------------------------------------------------- /r4d_2_multimarker/scripts/plot_solve_time.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from IPython import embed 4 | import plotly.plotly as py 5 | import plotly.graph_objs as go 6 | from r4d_common.plot_tools import get_data, simple_plot, odom_pos 7 | 8 | def solve_time(): 9 | s = [ 10 | ['elapsed_time', '~/fiducial_slam/out_multimarker_table.bag', '/solve_time', lambda m: m.data.to_sec()], 11 | ] 12 | 13 | df = get_data(s).iloc[:1000:50] 14 | 15 | embed() 16 | 17 | fig = { 18 | 'data': [go.Bar(x=range(1, 1000, 50), 19 | y=df.elapsed_time, 20 | name='Solve time',)], 21 | 'layout': { 22 | 'xaxis': {'title': 'Frame number'}, 23 | 'yaxis': {'title': "Solve time [s]"} 24 | } 25 | } 26 | 27 | url = py.plot(fig, filename='r4d_2_multimarker_solve_time') 28 | 29 | if __name__ == "__main__": 30 | solve_time() -------------------------------------------------------------------------------- /r4d_2_multimarker/src/multimarker_node.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @file multi_marker_test.cpp 3 | * @brief Bag loading code for the MultimarkerLocalizer class 4 | */ 5 | 6 | #include 7 | 8 | #include 9 | #include 10 | 11 | int main(int argc, char *argv[]) { 12 | ros::init(argc, argv, "multi_marker_node"); 13 | ros::NodeHandle nh; 14 | ros::NodeHandle nh_p("~"); 15 | 16 | fiducial_slam::BagRunner runner(nh, nh_p); 17 | runner.run(); 18 | } -------------------------------------------------------------------------------- /r4d_3_imu/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(r4d_3_imu) 3 | 4 | add_definitions(-std=c++11) 5 | 6 | find_package(catkin REQUIRED COMPONENTS 7 | r4d_common 8 | ) 9 | 10 | catkin_package( 11 | # INCLUDE_DIRS include 12 | # LIBRARIES r4d_2_tracking 13 | # CATKIN_DEPENDS r4d_common 14 | # DEPENDS system_lib 15 | ) 16 | 17 | include_directories( 18 | include 19 | ${catkin_INCLUDE_DIRS} 20 | ) 21 | 22 | add_executable(imu_node src/imu_node.cpp) 23 | 24 | target_link_libraries(imu_node 25 | ${catkin_LIBRARIES} 26 | ) 27 | -------------------------------------------------------------------------------- /r4d_3_imu/include/r4d_3_imu/ImuLocalizer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @file ImuLocalizer.h 3 | * @brief Camera localization using multiple markers 4 | */ 5 | 6 | #pragma once 7 | 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | 29 | #include 30 | #include 31 | #include 32 | #include 33 | 34 | namespace fiducial_slam { 35 | using gtsam::symbol_shorthand::X; // Body pose 36 | using gtsam::symbol_shorthand::V; // Velocity 37 | using gtsam::symbol_shorthand::B; // Biases 38 | using gtsam::symbol_shorthand::M; // Marker pose 39 | 40 | /// Robot localization using a single marker 41 | struct ImuLocalizer { 42 | ImuLocalizer(ros::NodeHandle &nh, ros::NodeHandle &nh_p) : 43 | nh_(nh), 44 | nh_p_(nh_p), 45 | pub_ic_(nh_.advertise("/p_ic", 10)), 46 | pub_map_(nh_.advertise("/markers_map", 10)), 47 | pub_extpose_(nh_.advertise("/ext_pose", 10)), 48 | pub_origin_marker_(nh_.advertise("/origin_marker", 10)), 49 | pub_biases_(nh_.advertise("/imu_biases", 10)), 50 | pub_prediction_(nh_.advertise("/p_ic_pred", 10)), 51 | 52 | landmark_noise_(gtsam::noiseModel::Isotropic::Sigma(6, 1e-6)), 53 | vel_init_noise_(gtsam::noiseModel::Isotropic::Sigma(3, 1e-2)), 54 | tag_size_(0.16), 55 | accel_init_done_(false), 56 | pose_init_done_(false), 57 | gravity_magn_(9.8), 58 | ic_counter_(1) { 59 | 60 | nh_p.param("accel_sigma", accel_sigma_, 1e-6); 61 | nh_p.param("gyro_sigma", gyro_sigma_, 4e-6); 62 | nh_p.param("accel_bias_sigma", accel_bias_sigma_, 1e-3); 63 | nh_p.param("gyro_bias_sigma", gyro_bias_sigma_, 1e-3); 64 | nh_p.param("bias_prior_sigma", bias_prior_sigma_, 1e-3); 65 | nh_p.param("pixel_sigma", pixel_sigma_, 0.5); 66 | 67 | ROS_WARN_STREAM( 68 | "\naccel_sigma = " << accel_sigma_ << 69 | "\ngyro_sigma = " << gyro_sigma_ << 70 | "\naccel_bias_sigma = " << accel_bias_sigma_ << 71 | "\ngyro_bias_sigma = " << gyro_bias_sigma_ << 72 | "\nbias_prior_sigma = " << bias_prior_sigma_ << 73 | "\npixel_sigma = " << pixel_sigma_); 74 | 75 | // If/when to pause marker-based correction for evaluation 76 | { 77 | int p; 78 | nh_p.param("pause_correction_start", p, 0); 79 | if (p > 0) { 80 | pause_correction_start_ = p; 81 | nh_p.param("pause_correction_duration", pause_correction_duration_, 0); 82 | ROS_WARN_STREAM( 83 | "Eval: Disabling correction at frame " << p << " for " << pause_correction_duration_ << 84 | " frames"); 85 | } 86 | } 87 | 88 | nh_p.param("correction_skip", correction_skip_, 1); 89 | 90 | /* 91 | vea_x 0.0083632 92 | vea_y -0.00390811 93 | vea_z 0.712747 94 | vea_w 0.70136 95 | vep_0 -0.0116603 96 | vep_1 -0.0570516 97 | vep_2 -0.00124568 98 | */ 99 | 100 | extrinsics_ = gtsam::Pose3(gtsam::Rot3::Quaternion(0.7, 0, 0, 0.7), gtsam::Point3(0.05, 0, 0)); 101 | bias_prior_noise_ = gtsam::noiseModel::Isotropic::Sigma(6, bias_prior_sigma_); 102 | pixel_noise_ = gtsam::noiseModel::Isotropic::Sigma(8, pixel_sigma_); 103 | 104 | preint_params_ = gtsam::PreintegrationParams::MakeSharedU(gravity_magn_); 105 | preint_params_->accelerometerCovariance = accel_sigma_ * gtsam::I_3x3; 106 | preint_params_->gyroscopeCovariance = gyro_sigma_ * gtsam::I_3x3; 107 | preint_params_->integrationCovariance = 1e-8 * gtsam::I_3x3; 108 | 109 | bias_between_sigmas_vec_ = (gtsam::Vector(6) << gtsam::Vector3::Constant(accel_bias_sigma_), 110 | gtsam::Vector3::Constant(gyro_bias_sigma_)).finished(); 111 | } 112 | 113 | void image_info_cb(const sensor_msgs::CameraInfoConstPtr &m) { 114 | if (!K_) { 115 | K_ = fiducial_slam::caminfo_to_gtsam_calib(m); 116 | K_->print("Calibration from camera_info msg:\n"); 117 | } 118 | } 119 | 120 | void handle_external_pose(const geometry_msgs::Pose &ext_pose, const std_msgs::Header &header, 121 | std::string label) { 122 | auto pVC = gmsgs_pose_to_gtsam(ext_pose).inverse(); 123 | 124 | // This is the first pair of measurements: determine the calibration 125 | // between ground truth and marker 126 | if (!ground_truth_calib_ && first_cam_pose_) { 127 | // MV = MC * VC^-1 128 | ground_truth_calib_ = (*first_cam_pose_) * pVC.inverse(); 129 | } 130 | 131 | if (ground_truth_calib_) { 132 | // MC = MV * VC 133 | auto pMC = (*ground_truth_calib_) * pVC; 134 | 135 | tf::Transform tf_transf; 136 | tf::poseMsgToTF(gtsam_pose_to_gmsgs(pMC), tf_transf); 137 | tf_br_.sendTransform( 138 | tf::StampedTransform(tf_transf, header.stamp, "world", label)); 139 | nav_msgs::Odometry odom; 140 | odom.header = header; 141 | odom.pose.pose = gtsam_pose_to_gmsgs(pMC); 142 | pub_extpose_.publish(odom); 143 | } 144 | } 145 | 146 | void filter_output_cb(const nav_msgs::OdometryConstPtr &m) { 147 | handle_external_pose(m->pose.pose, m->header, "ekf"); 148 | } 149 | 150 | void ground_truth_cb(const geometry_msgs::TransformStampedConstPtr &m) { 151 | geometry_msgs::Pose pose; 152 | Eigen::Vector3d t; 153 | tf::vectorMsgToEigen(m->transform.translation, t); 154 | tf::pointEigenToMsg(t, pose.position); 155 | pose.orientation = m->transform.rotation; 156 | 157 | //handle_external_pose(pose, m->header, "vicon"); 158 | } 159 | 160 | void tags_cb(const rcars_detector_msgs::TagArrayConstPtr &msg) { 161 | // Symbol for the current camera pose 162 | auto s_ic = X(ic_counter_); 163 | auto s_vel = V(ic_counter_); 164 | auto s_bias = B(ic_counter_); 165 | 166 | if (!K_) { 167 | ROS_WARN_THROTTLE(1, "Uncalibrated camera!"); 168 | return; 169 | } 170 | 171 | if (!accel_init_done_) { 172 | ROS_WARN_STREAM( 173 | "Received markers, but ignoring them until IMU data is available to initialize"); 174 | return; 175 | } 176 | 177 | // While the origin marker is visible, also publish the pose estimate by the detector 178 | // We do it here so it's available even when artificially dropping markers 179 | if (origin_id_) { 180 | publish_marker_by_id(*origin_id_, msg); 181 | } 182 | 183 | // For eval purposes, disable corrections and within a certain window 184 | if (pause_correction_start_ && ic_counter_ == *pause_correction_start_ && pause_correction_duration_ > 0) { 185 | ROS_WARN_STREAM("Skipping frame (" << pause_correction_duration_ << ") remaining"); 186 | pause_correction_duration_ -= 1; 187 | return; 188 | } 189 | 190 | if (!pose_init_done_) { 191 | pose_init_done_ = true; 192 | 193 | // Add estimate and prior for initial body pose 194 | graph_.push_back( 195 | gtsam::PriorFactor(s_ic, initial_pose_, landmark_noise_)); 196 | estimate_.insert(s_ic, initial_pose_); 197 | 198 | // Add an estimate and prior for zero initial velocity 199 | graph_.push_back( 200 | gtsam::PriorFactor(s_vel, gtsam::Vector3(), vel_init_noise_)); 201 | estimate_.insert(s_vel, gtsam::Vector3()); 202 | 203 | // Add an estimate and prior for zero initial bias 204 | graph_.push_back( 205 | gtsam::PriorFactor( 206 | s_bias, gtsam::imuBias::ConstantBias(), bias_prior_noise_)); 207 | estimate_.insert(s_bias, gtsam::imuBias::ConstantBias()); 208 | } 209 | 210 | auto s_prev_ic = X(ic_counter_ - 1); 211 | auto s_prev_vel = V(ic_counter_ - 1); 212 | auto s_prev_bias = B(ic_counter_ - 1); 213 | if (ic_counter_ > 0 && estimate_.exists(s_prev_ic)) { 214 | // TODO: use the imu data to predict the new pose/velocity 215 | estimate_.insert(s_ic, estimate_.at(s_prev_ic)); 216 | estimate_.insert(s_vel, estimate_.at(s_prev_vel)); 217 | estimate_.insert(s_bias, estimate_.at(s_prev_bias)); 218 | 219 | auto prev_bias = estimate_.at(s_prev_bias); 220 | gtsam::PreintegratedImuMeasurements pre_integr_data(preint_params_, prev_bias); 221 | 222 | int num_imus_found = 0; 223 | for (const auto &kv : imu_meas_) { 224 | if (kv.first > last_image_stamp_ && kv.first <= msg->header.stamp) { 225 | num_imus_found += 1; 226 | pre_integr_data.integrateMeasurement(kv.second.first, kv.second.second, 0.005); 227 | } 228 | } 229 | 230 | // Output IMU prediction 231 | { 232 | gtsam::NavState nav_begin(estimate_.at(s_prev_ic), 233 | estimate_.at(s_prev_vel)); 234 | auto nav_predicted = pre_integr_data.predict(nav_begin, estimate_.at( 235 | s_prev_bias)); 236 | auto new_pose_ros = gtsam_pose_to_gmsgs(nav_predicted.pose().inverse()); 237 | 238 | nav_msgs::Odometry odom; 239 | odom.header.stamp = msg->header.stamp; 240 | odom.pose.pose = new_pose_ros; 241 | pub_prediction_.publish(odom); 242 | } 243 | last_image_stamp_ = msg->header.stamp; 244 | 245 | // IMU factor 246 | graph_.add( 247 | gtsam::ImuFactor(s_prev_ic, s_prev_vel, s_ic, s_vel, s_bias, pre_integr_data)); 248 | // Bias evolution factor 249 | auto bias_between_noise = gtsam::noiseModel::Diagonal::Sigmas( 250 | sqrt(pre_integr_data.deltaTij()) * bias_between_sigmas_vec_); 251 | graph_.add( 252 | gtsam::BetweenFactor(s_prev_bias, 253 | s_bias, 254 | gtsam::imuBias::ConstantBias(), 255 | bias_between_noise)); 256 | } 257 | 258 | bool use_this_correction = (ic_counter_-1) % correction_skip_ == 0; 259 | 260 | std::cout << ic_counter_ << ": "; 261 | for (auto const &tag : msg->tags) { 262 | gtsam::Symbol s_marker('M', tag.id); 263 | 264 | std::cout << static_cast(tag.id) << " | "; 265 | 266 | if (!estimate_.exists(s_marker)) { 267 | // It's the first time we see this new marker: add an estimate 268 | // based on the detector pose 269 | auto detector_pose = gmsgs_pose_to_gtsam(tag.pose); 270 | auto init_pose = estimate_.at(s_ic) * detector_pose; 271 | estimate_.insert(s_marker, init_pose); 272 | 273 | if (!origin_id_) { 274 | // This tag will also be used as reference when comparing to the detector output 275 | origin_id_ = tag.id; 276 | ROS_INFO_STREAM("Using marker " << static_cast(*origin_id_) << " as origin."); 277 | first_origin_marker_pose_ = detector_pose; 278 | } 279 | } 280 | 281 | if (use_this_correction) { 282 | auto corners = tag_to_gtsam_points(tag); 283 | graph_.push_back( 284 | MarkerFactor(corners, pixel_noise_, s_ic, s_marker, K_, tag_size_, extrinsics_)); 285 | } 286 | } 287 | if (!use_this_correction) { 288 | std::cout << "(s)"; 289 | } 290 | 291 | std::cout << "\n"; 292 | 293 | estimate_ = gtsam::GaussNewtonOptimizer(graph_, estimate_).optimize(); 294 | 295 | if (!first_cam_pose_) { 296 | auto gtsam_ic = estimate_.at(s_ic); 297 | first_cam_pose_ = gtsam_ic.inverse(); 298 | } 299 | 300 | publish_gtsam_navstate(estimate_, msg->header, ic_counter_); 301 | publish_map(estimate_, msg->header); 302 | 303 | ic_counter_ += 1; 304 | } 305 | 306 | void do_accel_init() { 307 | gtsam::Vector3 acc_avg; 308 | for (auto &kv : imu_meas_) { 309 | acc_avg += kv.second.first; 310 | } 311 | acc_avg /= imu_meas_.size(); 312 | ROS_WARN_STREAM("Gravity-aligning with accel. vector:\n" << acc_avg); 313 | 314 | gtsam::Vector3 gravity_vec; 315 | gravity_vec << 0.0, 0.0, gravity_magn_; 316 | auto initial_att = gtsam::Rot3(Eigen::Quaterniond().setFromTwoVectors(acc_avg, gravity_vec)); 317 | initial_pose_ = gtsam::Pose3(initial_att, gtsam::Point3()); 318 | 319 | (initial_pose_ * acc_avg).print("Gravity vector after alignment:\n"); 320 | 321 | accel_init_done_ = true; 322 | } 323 | 324 | void imu_cb(const sensor_msgs::ImuConstPtr &m) { 325 | auto acc = gtsam::Vector3(m->linear_acceleration.x, m->linear_acceleration.y, m->linear_acceleration.z); 326 | auto gyro = gtsam::Vector3(m->angular_velocity.x, m->angular_velocity.y, m->angular_velocity.z); 327 | imu_meas_[m->header.stamp] = std::make_pair(acc, gyro); 328 | 329 | // Wait for 10 accelerometer measurements before aligning to gravity 330 | if (!accel_init_done_ && imu_meas_.size() > 10) { 331 | do_accel_init(); 332 | } 333 | } 334 | 335 | void publish_gtsam_navstate(const gtsam::Values &values, 336 | const std_msgs::Header &header, 337 | size_t step) { 338 | auto gtsam_ic = values.at(X(step)); 339 | auto gtsam_vel = values.at(V(step)); 340 | auto gtsam_bias = values.at(B(step)); 341 | 342 | tf::Transform tf_transf; 343 | tf::poseMsgToTF(gtsam_pose_to_gmsgs(gtsam_ic.inverse()), tf_transf); 344 | tf_br_.sendTransform(tf::StampedTransform(tf_transf, header.stamp, "world", "gtsam")); 345 | 346 | nav_msgs::Odometry odom; 347 | odom.header.stamp = header.stamp; 348 | odom.pose.pose = gtsam_pose_to_gmsgs(gtsam_ic.inverse()); 349 | odom.twist.twist.linear = gtsam_vector_to_gmsgs(gtsam_vel); 350 | pub_ic_.publish(odom); 351 | 352 | sensor_msgs::Imu imu; 353 | imu.header.stamp = header.stamp; 354 | imu.linear_acceleration.x = gtsam_bias.accelerometer()(0); 355 | imu.linear_acceleration.y = gtsam_bias.accelerometer()(1); 356 | imu.linear_acceleration.z = gtsam_bias.accelerometer()(2); 357 | imu.angular_velocity.x = gtsam_bias.gyroscope()(0); 358 | imu.angular_velocity.y = gtsam_bias.gyroscope()(1); 359 | imu.angular_velocity.z = gtsam_bias.gyroscope()(2); 360 | pub_biases_.publish(imu); 361 | } 362 | 363 | void publish_marker_by_id(unsigned char id, const rcars_detector_msgs::TagArrayConstPtr &msg) { 364 | auto origin_it = std::find_if(msg->tags.begin(), msg->tags.end(), 365 | [&](const rcars_detector_msgs::Tag &tag) { 366 | return tag.id == *origin_id_; 367 | }); 368 | if (origin_it != msg->tags.end()) { 369 | tf::Transform tf_transf; 370 | // Compose the detector pose with the initial origin marker pose and extrinsics 371 | // to align with gtsam output 372 | auto detector_pose = gmsgs_pose_to_gtsam(origin_it->pose); 373 | // Without extrinsics: 374 | // auto new_pose = detector_pose * first_origin_marker_pose_.inverse(); 375 | // With extrinsics and initial gravity alignment: 376 | // WB1 = WB0 * BC * C0M * (C1M)^-1 * (BC)^-1 377 | auto new_pose = 378 | extrinsics_ * detector_pose * first_origin_marker_pose_.inverse() * extrinsics_.inverse() * 379 | initial_pose_.inverse(); 380 | auto new_pose_ros = gtsam_pose_to_gmsgs(new_pose); 381 | 382 | tf::poseMsgToTF(new_pose_ros, tf_transf); 383 | tf_br_.sendTransform(tf::StampedTransform(tf_transf, msg->header.stamp, "world", "dtct")); 384 | 385 | nav_msgs::Odometry odom; 386 | odom.header.stamp = msg->header.stamp; 387 | odom.pose.pose = new_pose_ros; 388 | pub_origin_marker_.publish(odom); 389 | } 390 | } 391 | 392 | void publish_map(const gtsam::Values &values, const std_msgs::Header &header) { 393 | // Publish the "map", i.e. the absolute position of all the markers we have ever seen 394 | rcars_detector_msgs::TagArray map; 395 | map.header = header; 396 | 397 | auto tag_filter = [](const gtsam::Key &k) { 398 | return (k >= gtsam::Symbol('M', 0)) && (k < gtsam::Symbol('N', 0)); 399 | }; 400 | for (const auto &kv : estimate_.filter(tag_filter)) { 401 | rcars_detector_msgs::Tag elem; 402 | // The tag id was stored as the index in the node's gtsam::Symbol 403 | gtsam::Symbol s(kv.key); 404 | elem.id = s.index(); 405 | auto this_tag_pose = estimate_.at(s); 406 | elem.pose = gtsam_pose_to_gmsgs(this_tag_pose); 407 | map.tags.push_back(elem); 408 | } 409 | pub_map_.publish(map); 410 | } 411 | 412 | boost::shared_ptr K_; 413 | gtsam::NonlinearFactorGraph graph_; 414 | gtsam::Values estimate_; 415 | 416 | // Coordinate frames stuff 417 | size_t ic_counter_; // Counter for the last camera pose id 418 | bool pose_init_done_; 419 | boost::optional origin_id_; // id of the marker used as reference 420 | gtsam::Pose3 first_origin_marker_pose_; 421 | boost::optional first_cam_pose_; 422 | boost::optional ground_truth_calib_; 423 | bool accel_init_done_; 424 | gtsam::Pose3 initial_pose_; // Gets reset by IMU measurements to be 0, 0, 0 and gravity-aligned 425 | 426 | // IMU stuff 427 | double gravity_magn_; 428 | boost::shared_ptr preint_params_; 429 | double accel_sigma_, gyro_sigma_; 430 | double accel_bias_sigma_, gyro_bias_sigma_; 431 | gtsam::Pose3 extrinsics_; // Camera <-> IMU extrinsics 432 | gtsam::Vector6 bias_between_sigmas_vec_; // Vector of 6 sigmas for bias evolution 433 | 434 | // Measurements stuff 435 | std::map> imu_meas_; 436 | ros::Time last_image_stamp_; 437 | double tag_size_; 438 | 439 | // Noise models 440 | double bias_prior_sigma_, pixel_sigma_; 441 | gtsam::noiseModel::Isotropic::shared_ptr pixel_noise_, landmark_noise_, vel_init_noise_, vel_integration_noise_; 442 | gtsam::noiseModel::Diagonal::shared_ptr bias_prior_noise_; 443 | 444 | // Evaluation stuff 445 | boost::optional pause_correction_start_; 446 | int pause_correction_duration_; 447 | int correction_skip_; 448 | 449 | ros::NodeHandle nh_, nh_p_; 450 | tf::TransformBroadcaster tf_br_; 451 | ros::Publisher pub_ic_, pub_map_, pub_extpose_, pub_origin_marker_, pub_biases_, pub_prediction_; 452 | }; 453 | } 454 | -------------------------------------------------------------------------------- /r4d_3_imu/launch/eval.launch: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /r4d_3_imu/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | r4d_3_imu 4 | 0.0.0 5 | The r4d_2_multimarker package 6 | 7 | nicolov 8 | 9 | BSD 10 | 11 | catkin 12 | r4d_common 13 | r4d_common 14 | 15 | 16 | -------------------------------------------------------------------------------- /r4d_3_imu/scripts/plot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from IPython import embed 4 | import plotly.plotly as py 5 | import plotly.graph_objs as go 6 | from r4d_common.plot_tools import * 7 | 8 | 9 | def pause_biases(): 10 | bag = '~/fiducial_slam/out_imu_table_pause_200_40_skip_1.bag' 11 | s = [ 12 | ['accel_bias', bag, '/imu_biases', gyro_bias('y')], 13 | ] 14 | 15 | df = get_data(s) 16 | start_time = df.index[0] 17 | df = df.iloc[:1020] 18 | 19 | data = [] \ 20 | + [go.Scatter(x=df.index - start_time, 21 | y=df[who], 22 | mode='lines+markers', 23 | name=who.title()) for who in ('accel_bias',)] 24 | 25 | layout = go.Layout( 26 | # barmode='stack', 27 | title='', 28 | xaxis={'title': 'Time [s]'}, 29 | yaxis={'title': 'Gyro Y bias [rad/s]'}, 30 | ) 31 | 32 | 33 | fig = go.Figure(data=data, layout=layout) 34 | url = py.plot(fig, filename='r4d_3_imu_bias_pause') 35 | 36 | 37 | def pause_position_comparison(): 38 | bag = '~/fiducial_slam/out_imu_table_pause_200_40_skip_1.bag' 39 | s = [ 40 | ['Estimate', bag, '/p_ic', odom_pos('x')], 41 | ['Prediction', bag, '/p_ic_pred', odom_pos('x')], 42 | ] 43 | 44 | df = get_data(s) 45 | start_time = df.index[0] 46 | df = df.iloc[130:230] 47 | 48 | data = [] \ 49 | + [go.Scatter(x=df.index - start_time, 50 | y=df[who], 51 | mode='lines+markers', 52 | name=who.title()) for who in ('Estimate', 'Prediction')] 53 | 54 | layout = go.Layout( 55 | # barmode='stack', 56 | title='', 57 | xaxis={'title': 'Time [s]'}, 58 | yaxis={'title': 'Camera X position [m]'}, 59 | ) 60 | 61 | 62 | fig = go.Figure(data=data, layout=layout) 63 | url = py.plot(fig, filename='r4d_3_imu_prediction_pause_pos') 64 | 65 | 66 | def pause_orientation_comparison(): 67 | bag = '~/fiducial_slam/out_imu_table_pause_200_40_skip_1.bag' 68 | s = [ 69 | ['Estimate', bag, '/p_ic', odom_euler(1)], 70 | ['Prediction', bag, '/p_ic_pred', odom_euler(1)], 71 | ] 72 | 73 | df = get_data(s) 74 | start_time = df.index[0] 75 | df = df.iloc[130:230] 76 | 77 | data = [] \ 78 | + [go.Scatter(x=df.index - start_time, 79 | y=df[who], 80 | mode='lines+markers', 81 | name=who.title()) for who in ('Estimate', 'Prediction')] 82 | 83 | layout = go.Layout( 84 | # barmode='stack', 85 | title='', 86 | xaxis={'title': 'Time [s]'}, 87 | yaxis={'title': 'Pitch angle [rad]'}, 88 | ) 89 | 90 | 91 | fig = go.Figure(data=data, layout=layout) 92 | url = py.plot(fig, filename='r4d_3_imu_prediction_pause_orient') 93 | 94 | 95 | def skip_position_comparison(): 96 | bag = '~/fiducial_slam/out_imu_table_pause_0_0_skip_5.bag' 97 | s = [ 98 | ['Estimate', bag, '/p_ic', odom_pos('x')], 99 | ['Prediction', bag, '/p_ic_pred', odom_pos('x')], 100 | ] 101 | 102 | df = get_data(s) 103 | start_time = df.index[0] 104 | df = df.iloc[100:600] 105 | 106 | data = [] \ 107 | + [go.Scatter(x=df.index - start_time, 108 | y=df[who], 109 | mode='lines+markers', 110 | name=who.title()) for who in ('Estimate', 'Prediction')] 111 | 112 | layout = go.Layout( 113 | # barmode='stack', 114 | title='', 115 | xaxis={'title': 'Time [s]'}, 116 | yaxis={'title': 'Camera X position [m]'}, 117 | ) 118 | 119 | 120 | fig = go.Figure(data=data, layout=layout) 121 | url = py.plot(fig, filename='r4d_3_imu_prediction_skip_pos') 122 | 123 | 124 | def skip_orientation_comparison(): 125 | bag = '~/fiducial_slam/out_imu_table_pause_0_0_skip_5.bag' 126 | s = [ 127 | ['Estimate', bag, '/p_ic', odom_euler(1)], 128 | ['Prediction', bag, '/p_ic_pred', odom_euler(1)], 129 | ] 130 | 131 | df = get_data(s) 132 | start_time = df.index[0] 133 | df = df.iloc[100:600] 134 | 135 | data = [] \ 136 | + [go.Scatter(x=df.index - start_time, 137 | y=df[who], 138 | mode='lines+markers', 139 | name=who.title()) for who in ('Estimate', 'Prediction')] 140 | 141 | layout = go.Layout( 142 | # barmode='stack', 143 | title='', 144 | xaxis={'title': 'Time [s]'}, 145 | yaxis={'title': 'Pitch angle [rad]'}, 146 | ) 147 | 148 | 149 | fig = go.Figure(data=data, layout=layout) 150 | url = py.plot(fig, filename='r4d_3_imu_prediction_skip_orient') 151 | 152 | 153 | if __name__ == "__main__": 154 | pause_biases() 155 | # pause_position_comparison() 156 | # pause_orientation_comparison() 157 | # skip_position_comparison() 158 | # skip_orientation_comparison() -------------------------------------------------------------------------------- /r4d_3_imu/src/imu_node.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @file multi_marker_test.cpp 3 | * @brief Bag loading code for the ImuLocalizer class 4 | */ 5 | 6 | #include 7 | 8 | #include 9 | #include 10 | 11 | int main(int argc, char *argv[]) { 12 | ros::init(argc, argv, "multi_marker_node"); 13 | ros::NodeHandle nh; 14 | ros::NodeHandle nh_p("~"); 15 | 16 | fiducial_slam::BagRunner runner(nh, nh_p); 17 | runner.run(); 18 | } -------------------------------------------------------------------------------- /r4d_common/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(r4d_common) 3 | 4 | add_definitions(-std=c++11) 5 | 6 | find_package(catkin REQUIRED COMPONENTS 7 | geometry_msgs 8 | nav_msgs 9 | std_msgs 10 | rosbag 11 | tf 12 | eigen_conversions 13 | rcars_detector_msgs 14 | gtsam_catkin 15 | ) 16 | 17 | find_package(Eigen3 REQUIRED) 18 | find_package(OpenCV REQUIRED) 19 | 20 | catkin_python_setup() 21 | 22 | catkin_package( 23 | INCLUDE_DIRS include 24 | # LIBRARIES r4d_common 25 | CATKIN_DEPENDS geometry_msgs nav_msgs rcars_detector_msgs std_msgs rosbag tf eigen_conversions gtsam_catkin 26 | DEPENDS OpenCV Eigen3 27 | ) 28 | 29 | include_directories( 30 | include 31 | ${OpenCV_INCLUDE_DIRS} 32 | ${catkin_INCLUDE_DIRS} 33 | ) 34 | -------------------------------------------------------------------------------- /r4d_common/include/r4d_common/BagRunner.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @file BagRunner.h 3 | * @brief Templated helper class to load messages from bags 4 | */ 5 | 6 | #include 7 | #include 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | namespace fiducial_slam { 17 | 18 | template 19 | struct BagRunner { 20 | BagRunner (ros::NodeHandle& nh, ros::NodeHandle& nh_p) : 21 | nh_(nh), 22 | nh_p_(nh_p), 23 | localizer_(nh_, nh_p_) { 24 | std::string bag_fname("/Users/niko/rcars_table_markers.bag"); 25 | nh_p_.param("bag_fname", bag_fname, bag_fname); 26 | 27 | ROS_WARN_STREAM("Opening bag " << bag_fname); 28 | bag_.open(bag_fname, rosbag::bagmode::Read); 29 | } 30 | 31 | void run() { 32 | rosbag::View view(bag_); 33 | ros::Rate r(10); 34 | 35 | for (auto const &msg : view) { 36 | if (msg.getTopic() == "/cam0/camera_info") { 37 | auto caminfo_msg = msg.instantiate(); 38 | localizer_.image_info_cb(caminfo_msg); 39 | } else if (msg.getTopic() == "/rcars/detector/tags") { 40 | auto tags_msg = msg.instantiate(); 41 | localizer_.tags_cb(tags_msg); 42 | } else if (msg.getTopic() == "/vicon/auk/auk") { 43 | auto gtruth_msg = msg.instantiate(); 44 | localizer_.ground_truth_cb(gtruth_msg); 45 | } else if (msg.getTopic() == "/rcars/estimator/filterPose") { 46 | auto odom_msg = msg.instantiate(); 47 | localizer_.filter_output_cb(odom_msg); 48 | } else if (msg.getTopic() == "/imu0") { 49 | auto imu_msg = msg.instantiate(); 50 | localizer_.imu_cb(imu_msg); 51 | } 52 | 53 | ros::spinOnce(); 54 | if (!ros::ok()) { 55 | break; 56 | } 57 | } 58 | 59 | bag_.close(); 60 | } 61 | 62 | ros::NodeHandle nh_, nh_p_; 63 | Localizer localizer_; 64 | rosbag::Bag bag_; 65 | }; 66 | 67 | } -------------------------------------------------------------------------------- /r4d_common/include/r4d_common/MarkerFactor.h: -------------------------------------------------------------------------------- 1 | /** 2 | * @file MarkerFactor.h 3 | * @brief Factors for square fiducial marker observation 4 | */ 5 | 6 | #pragma once 7 | 8 | #include 9 | #include 10 | #include 11 | 12 | namespace fiducial_slam { 13 | using MARKER_POSE = gtsam::Pose3; 14 | 15 | /* 16 | * Factor for a constraint derived when observing a square fiducial marker. 17 | */ 18 | class MarkerFactor : public gtsam::NoiseModelFactor2 { 19 | std::array corners_; 20 | boost::shared_ptr K_; 21 | double half_tag_size_; 22 | gtsam::Pose3 body_P_sensor_; 23 | 24 | public: 25 | typedef gtsam::NoiseModelFactor2 Base; 26 | typedef fiducial_slam::MarkerFactor This; 27 | typedef boost::shared_ptr shared_ptr; 28 | 29 | /* 30 | * Constructor 31 | * @param corners are the image positions of the corner of the observed marker 32 | * @param model is the noise model 33 | * @param K shared pointer to the constant camera calibration 34 | * @param tag_size edge length of the marker 35 | */ 36 | MarkerFactor(const std::array &corners, 37 | const gtsam::SharedNoiseModel &model, 38 | gtsam::Key cam_key, 39 | gtsam::Key marker_key, 40 | const boost::shared_ptr &K, 41 | double tag_size, 42 | gtsam::Pose3 body_P_sensor = gtsam::Pose3()) : 43 | Base(model, cam_key, marker_key), 44 | corners_(corners), 45 | K_(K), 46 | half_tag_size_(0.5 * tag_size), 47 | body_P_sensor_(body_P_sensor) { } 48 | 49 | virtual ~MarkerFactor() { } 50 | 51 | virtual gtsam::NonlinearFactor::shared_ptr clone() const { 52 | return boost::static_pointer_cast( 53 | gtsam::NonlinearFactor::shared_ptr(new This(*this)) 54 | ); 55 | } 56 | 57 | /// Evaluate error h(x)-z and optionally derivatives 58 | gtsam::Vector evaluateError(const gtsam::Pose3 &body_pose, 59 | const MARKER_POSE &marker_pose, 60 | boost::optional H1 = boost::none, 61 | boost::optional H2 = boost::none) const { 62 | // Jacobian d(camera in world)/d(body in world) 63 | gtsam::Matrix Hbody; 64 | auto cam_pose = body_pose.compose(body_P_sensor_, Hbody); 65 | 66 | gtsam::PinholeCamera camera (cam_pose, *K_); 67 | 68 | // Jacobians have one row for each component of the residual vector 69 | if (H1) { 70 | *H1 = Eigen::Matrix::Zero(); 71 | } 72 | if (H2) { 73 | *H2 = Eigen::Matrix::Zero(); 74 | } 75 | 76 | gtsam::Matrix Hcorner, Hcamera; 77 | 78 | gtsam::Matrix residuals; 79 | residuals = Eigen::Matrix::Ones() * 2.0 * K_->fx(); 80 | 81 | for (int i = 0; i < 4; i++) { 82 | // Jacobian d(point in world) / d(marker pose) 83 | gtsam::Matrix H0; 84 | // Coordinates of the current corner in the marker frame 85 | gtsam::Point3 marker_corner(i % 2 == 1 ? half_tag_size_ : -half_tag_size_, 86 | i < 2 ? half_tag_size_ : -half_tag_size_, 87 | 0); 88 | // Use the marker pose to transform into world coordinates 89 | auto world_corner = marker_pose.transform_from(marker_corner, H0); 90 | 91 | try { 92 | gtsam::Point2 reproj_error( 93 | camera.project(world_corner, Hcamera, Hcorner, boost::none) - corners_[i]); 94 | 95 | // Jacobian w.r.t. body ( dres/dcam * dcam/dbody) 96 | if (H1) { 97 | (*H1).block<2, 6>(2 * i, 0) = Hcamera * Hbody; 98 | } 99 | 100 | // Jacobian w.r.t. marker pose 101 | if (H2) { 102 | // Chain Jacobians of marker to corner transformation and camera projection 103 | (*H2).block<2, 6>(2 * i, 0) = Hcorner * H0; 104 | } 105 | 106 | // Copy in the right block of the complete residual vector 107 | residuals.block<2, 1>(2 * i, 0) = reproj_error.vector(); 108 | } catch (gtsam::CheiralityException &e) { 109 | // One of the corners has moved behind the camera 110 | std::cout << "CExc\n"; 111 | } 112 | } 113 | 114 | return residuals; 115 | } 116 | }; 117 | } -------------------------------------------------------------------------------- /r4d_common/include/r4d_common/gtsam_opencv_interop.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @file gtsam_opencv_interop.h 3 | * @brief Utility functions for GTSAM/OpenCV interop 4 | */ 5 | 6 | #pragma once 7 | 8 | #include 9 | 10 | #include 11 | #include 12 | 13 | #include 14 | 15 | namespace fiducial_slam { 16 | 17 | /// GTSAM-style camera calibration to OpenCV projection and distorsion matrices 18 | std::pair gtsam_calib_to_cv(const boost::shared_ptr K_) { 19 | cv::Mat proj; 20 | cv::eigen2cv(K_->K(), proj); 21 | cv::Mat dist; 22 | cv::eigen2cv(K_->k(), dist); 23 | return std::make_pair(proj, dist); 24 | }; 25 | 26 | } -------------------------------------------------------------------------------- /r4d_common/include/r4d_common/ros_gtsam_interop.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @file ros_gtsam_interop.h 3 | * @brief Utility functions for GTSAM/ROS interop 4 | */ 5 | 6 | #pragma once 7 | 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | #include 16 | 17 | namespace fiducial_slam { 18 | 19 | /// Convert a ROS camera info message to a GTSAM calibration 20 | boost::shared_ptr caminfo_to_gtsam_calib(const sensor_msgs::CameraInfoConstPtr &m, 21 | const bool use_distortion = false) { 22 | /* rcars_detector uses the rectified image, so we should ignore the distorsion coefficients 23 | * in the CameraInfo message. 24 | */ 25 | boost::shared_ptr K; 26 | if (use_distortion) { 27 | K.reset(new gtsam::Cal3DS2(m->K[0], m->K[4], 0, m->K[2], m->K[5], 28 | m->D[0], m->D[1], m->D[2], m->D[3])); 29 | } else { 30 | K.reset(new gtsam::Cal3DS2(m->K[0], m->K[4], 0, m->K[2], m->K[5], 31 | 0.0, 0.0, 0.0, 0.0)); 32 | } 33 | return K; 34 | } 35 | 36 | std::array tag_to_gtsam_points(const rcars_detector_msgs::Tag &m) { 37 | std::array corners; 38 | for (int i = 0; i < 4; ++i) { 39 | corners[i] = gtsam::Point2(m.corners[i].x, m.corners[i].y); 40 | } 41 | 42 | return corners; 43 | }; 44 | 45 | /// Convert a ROS geometry_msgs/Pose message to a GTSAM pose 46 | gtsam::Pose3 gmsgs_pose_to_gtsam(const geometry_msgs::Pose &p) { 47 | gtsam::Point3 point(p.position.x, p.position.y, p.position.z); 48 | auto rot = gtsam::Rot3::Quaternion(p.orientation.w, p.orientation.x, p.orientation.y, p.orientation.z); 49 | return gtsam::Pose3(rot, point); 50 | } 51 | 52 | geometry_msgs::Pose gtsam_pose_to_gmsgs(const gtsam::Pose3 &pose) { 53 | geometry_msgs::Pose p; 54 | tf::pointEigenToMsg(pose.translation().vector(), p.position); 55 | tf::quaternionEigenToMsg(pose.rotation().toQuaternion(), p.orientation); 56 | return p; 57 | } 58 | 59 | geometry_msgs::Vector3 gtsam_vector_to_gmsgs(const gtsam::Vector3 &vec) { 60 | geometry_msgs::Vector3 v; 61 | tf::vectorEigenToMsg(vec, v); 62 | return v; 63 | } 64 | 65 | /// Convert a ROS geometry_msgs/Transform message to a GTSAM pose 66 | gtsam::Pose3 gmsgs_transf_to_gtsam(const geometry_msgs::Transform &transf) { 67 | geometry_msgs::Pose pose; 68 | pose.orientation = transf.rotation; 69 | Eigen::Vector3d t; 70 | tf::vectorMsgToEigen(transf.translation, t); 71 | tf::pointEigenToMsg(t, pose.position); 72 | 73 | return gmsgs_pose_to_gtsam(pose); 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /r4d_common/include/r4d_common/ros_opencv_interop.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @file ros_opencv_interop.h 3 | * @brief Utility functions for ROS/OpenCV interop 4 | */ 5 | 6 | #pragma once 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | namespace fiducial_slam { 14 | using cv_corners_t = std::pair, std::vector>; 15 | 16 | /// Convert a Tag message to OpenCV points ready for solvePnP 17 | cv_corners_t tag_to_cv_points(const rcars_detector_msgs::Tag &m, 18 | double tag_size) { 19 | std::vector image_points; 20 | std::vector world_points; 21 | 22 | const double half = 0.5 * tag_size; 23 | 24 | for (int i = 0; i < 4; ++i) { 25 | image_points.emplace_back(m.corners[i].x, m.corners[i].y); 26 | } 27 | 28 | world_points.emplace_back(-half, half, 0); // top left 29 | world_points.emplace_back( half, half, 0); // top right 30 | world_points.emplace_back(-half, -half, 0); // bottom left 31 | world_points.emplace_back( half, -half, 0); // bottom right 32 | 33 | auto ret = std::make_pair(image_points, world_points); 34 | return ret; 35 | } 36 | 37 | /// Find camera-marker pose using OpenCV 38 | geometry_msgs::Pose cv_marker_pose(cv_corners_t& corners, 39 | std::pair& calibration) { 40 | auto& cv_proj = calibration.first; 41 | auto& cv_dist = calibration.second; 42 | 43 | cv::Mat cv_r, cv_t; 44 | cv::solvePnP(corners.second, corners.first, cv_proj, cv_dist, cv_r, cv_t); 45 | 46 | cv::Mat cv_r_mat; 47 | cv::Rodrigues(cv_r, cv_r_mat); 48 | 49 | Eigen::Vector3d translation; 50 | translation << cv_t.at(0), cv_t.at(1), cv_t.at(2); 51 | Eigen::Map> cv_rot_mat_eig(cv_r_mat.ptr(), 3, 3); 52 | Eigen::Quaterniond rotation(cv_rot_mat_eig); 53 | 54 | geometry_msgs::Pose result; 55 | tf::pointEigenToMsg(translation, result.position); 56 | tf::quaternionEigenToMsg(rotation, result.orientation); 57 | 58 | return result; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /r4d_common/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | r4d_common 4 | 0.0.0 5 | The r4d_common package 6 | 7 | nicolov 8 | 9 | BSD 10 | 11 | catkin 12 | geometry_msgs 13 | nav_msgs 14 | rcars_detector_msgs 15 | std_msgs 16 | rosbag 17 | tf 18 | eigen_conversions 19 | gtsam_catkin 20 | geometry_msgs 21 | nav_msgs 22 | rcars_detector_msgs 23 | std_msgs 24 | rosbag 25 | tf 26 | eigen_conversions 27 | gtsam_catkin 28 | 29 | 30 | -------------------------------------------------------------------------------- /r4d_common/setup.py: -------------------------------------------------------------------------------- 1 | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD 2 | 3 | from distutils.core import setup 4 | from catkin_pkg.python_setup import generate_distutils_setup 5 | 6 | # fetch values from package.xml 7 | setup_args = generate_distutils_setup( 8 | packages=['r4d_common'], 9 | package_dir={'':'src'}) 10 | 11 | setup(**setup_args) -------------------------------------------------------------------------------- /r4d_common/src/r4d_common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nicolov/robotics_for_developers/25b7c97382ce0110040c854c5b0fc1bff3fe7a5e/r4d_common/src/r4d_common/__init__.py -------------------------------------------------------------------------------- /r4d_common/src/r4d_common/plot_tools.py: -------------------------------------------------------------------------------- 1 | import os 2 | import rosbag 3 | import pandas as pd 4 | from tf.transformations import euler_from_quaternion 5 | from itertools import groupby 6 | import matplotlib.pyplot as plt 7 | import pprint 8 | 9 | def get_data(s): 10 | data = {k: [[], []] for k in [x[0] for x in s]} 11 | 12 | for bag_file, by_bag_group in groupby(sorted(s, key=lambda x: x[1] + x[2]), lambda x: x[1]): 13 | by_bag_group = list(by_bag_group) 14 | bag = rosbag.Bag(os.path.expanduser(bag_file)) 15 | 16 | # for this bag, build a mapping topic name -> [spec, spec] 17 | mapping = {x[2]: [] for x in by_bag_group} 18 | for topic, by_topic_group in groupby(by_bag_group, key=lambda x: x[2]): 19 | by_topic_group = list(by_topic_group) 20 | for spec in by_topic_group: 21 | mapping[topic].append(spec) 22 | 23 | print "{}\n".format(bag_file) + "="*20 24 | pprint.pprint(mapping) 25 | print 26 | 27 | for topic, msg, t in bag.read_messages(mapping.keys()): 28 | for spec in mapping[topic]: 29 | label, _, _, plot_fn = spec 30 | if hasattr(msg, 'header'): 31 | data[label][0].append(msg.header.stamp.to_sec()) 32 | else: 33 | data[label][0].append(t) 34 | data[label][1].append(plot_fn(msg)) 35 | 36 | return pd.DataFrame.from_dict({k: pd.Series(v[1], index=v[0]) for k, v in data.items()}) 37 | 38 | def simple_plot(s): 39 | df = get_data(s) 40 | df.plot(marker='.') 41 | plt.show() 42 | 43 | # 44 | # A couple of utility functions to extract data from common types of messages 45 | 46 | odom_pos = lambda c: lambda m: getattr(m.pose.pose.position, c) 47 | odom_vel = lambda c: lambda m: getattr(m.twist.twist.linear, c) 48 | odom_quat = lambda c: lambda m: getattr(m.pose.pose.orientation, c) 49 | 50 | def odom_euler(i): 51 | def f(m): 52 | q = [getattr(m.pose.pose.orientation, c) for c in 'xyzw'] 53 | angles = euler_from_quaternion(q, axes='szyx') 54 | return angles[i] 55 | return f 56 | 57 | def markers_count(): 58 | def f(msg): 59 | return len(msg.tags) 60 | return f 61 | 62 | def has_marker(id): 63 | def f(msg): 64 | this_marker = [marker for marker in msg.tags if marker.id == id] 65 | return len(this_marker) 66 | return f 67 | 68 | def marker_pos(id, coord): 69 | def f(msg): 70 | this_marker = [marker for marker in msg.tags if marker.id == id] 71 | if this_marker: 72 | return getattr(this_marker[0].pose.position, coord) 73 | else: 74 | return None 75 | return f 76 | 77 | def marker_quat(id, coord): 78 | def f(msg): 79 | this_marker = [marker for marker in msg.tags if marker.id == id] 80 | if this_marker: 81 | return getattr(this_marker[0].pose.orientation, coord) 82 | else: 83 | return None 84 | return f 85 | 86 | acc_bias = lambda c: lambda m: getattr(m.linear_acceleration, c) 87 | gyro_bias = lambda c: lambda m: getattr(m.angular_velocity, c) -------------------------------------------------------------------------------- /rcars_detector_msgs/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(rcars_detector_msgs) 3 | 4 | #get_cmake_property(_variableNames VARIABLES) 5 | #foreach (_variableName ${_variableNames}) 6 | # message(STATUS "${_variableName}=${${_variableName}}") 7 | #endforeach() 8 | 9 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -O3 -march=native") 10 | 11 | set(${CMAKE_BUILD_TYPE} RELEASE) 12 | 13 | find_package(catkin REQUIRED COMPONENTS 14 | message_generation 15 | std_msgs 16 | geometry_msgs 17 | ) 18 | 19 | add_message_files(FILES 20 | Tag.msg 21 | TagArray.msg 22 | TagPoses.msg 23 | ) 24 | 25 | generate_messages( 26 | DEPENDENCIES 27 | std_msgs 28 | geometry_msgs 29 | ) 30 | 31 | catkin_package( 32 | INCLUDE_DIRS 33 | CATKIN_DEPENDS geometry_msgs 34 | CATKIN_DEPENDS message_runtime 35 | ) 36 | 37 | 38 | include_directories( 39 | ${catkin_INCLUDE_DIRS} 40 | ) 41 | -------------------------------------------------------------------------------- /rcars_detector_msgs/README.md: -------------------------------------------------------------------------------- 1 | rcars_detector_msgs 2 | =================== 3 | 4 | Message definitions extracted from the [RCARS project](https://bitbucket.org/adrlab/rcars/wiki/Home). -------------------------------------------------------------------------------- /rcars_detector_msgs/msg/Tag.msg: -------------------------------------------------------------------------------- 1 | # Tag Id 2 | uint8 id 3 | 4 | # Corner indeces 5 | uint8 TOP_LEFT = 0 6 | uint8 TOP_RIGHT = 1 7 | uint8 BOTTOM_LEFT = 2 8 | uint8 BOTTOM_RIGHT = 3 9 | 10 | # Tag corners in image coordinates 11 | geometry_msgs/Point[4] corners 12 | 13 | # Tag pose in camera frame 14 | geometry_msgs/Pose pose -------------------------------------------------------------------------------- /rcars_detector_msgs/msg/TagArray.msg: -------------------------------------------------------------------------------- 1 | # timestamp / sequence, usually copied over from corresponding camera image 2 | Header header 3 | 4 | # vector of all detected tags 5 | Tag[] tags 6 | -------------------------------------------------------------------------------- /rcars_detector_msgs/msg/TagPoses.msg: -------------------------------------------------------------------------------- 1 | Header header 2 | 3 | int64[] tagIds 4 | geometry_msgs/Pose[] poses -------------------------------------------------------------------------------- /rcars_detector_msgs/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | rcars_detector_msgs 4 | 0.0.1 5 | Package for detecting AprilTags in an image 6 | 7 | 8 | 9 | 10 | Michael Neunert 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | Michael Neunert 30 | 31 | 32 | 33 | 34 | 35 | 36 | catkin 37 | 38 | message_generation 39 | sensor_msgs 40 | std_msgs 41 | geometry_msgs 42 | 43 | message_runtime 44 | sensor_msgs 45 | std_msgs 46 | geometry_msgs 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | --------------------------------------------------------------------------------