├── dataset
├── PI_3030.png
├── PI_3058.png
├── PI_S1_R1.png
├── PI_S1_X1.png
├── PI_S1_X2.png
├── PI_S1_Y1.png
├── PI_S1_Y2.png
├── PI_S1_Z1.png
├── PI_S1_Z2.png
└── robot-arm-capture-setup.png
├── package.xml
├── launch
├── display.launch
├── slam.launch
├── depth_camera.launch
└── raw_camera.launch
├── CMakeLists.txt
├── init
├── pirvs_ros_init.py
├── convert_rect_to_yaml.py
└── convert_raw_to_yaml.py
├── README.md
├── src
├── stereo_camera_node.cpp
├── depth_camera_node.cpp
└── slam_node.cpp
└── rviz
└── ironsides.rviz
/dataset/PI_3030.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_3030.png
--------------------------------------------------------------------------------
/dataset/PI_3058.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_3058.png
--------------------------------------------------------------------------------
/dataset/PI_S1_R1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_S1_R1.png
--------------------------------------------------------------------------------
/dataset/PI_S1_X1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_S1_X1.png
--------------------------------------------------------------------------------
/dataset/PI_S1_X2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_S1_X2.png
--------------------------------------------------------------------------------
/dataset/PI_S1_Y1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_S1_Y1.png
--------------------------------------------------------------------------------
/dataset/PI_S1_Y2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_S1_Y2.png
--------------------------------------------------------------------------------
/dataset/PI_S1_Z1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_S1_Z1.png
--------------------------------------------------------------------------------
/dataset/PI_S1_Z2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/PI_S1_Z2.png
--------------------------------------------------------------------------------
/dataset/robot-arm-capture-setup.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PerceptIn/IRONSIDES/HEAD/dataset/robot-arm-capture-setup.png
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | pirvs_ros
4 | 0.0.0
5 | The pirvs_ros package
6 |
7 | frozen
8 | EULA
9 | catkin
10 | roscpp
11 | rospy
12 | std_msgs
13 | image_transport
14 |
15 | roscpp
16 | rospy
17 | std_msgs
18 | image_transport
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/launch/display.launch:
--------------------------------------------------------------------------------
1 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/launch/slam.launch:
--------------------------------------------------------------------------------
1 |
24 |
25 |
26 |
27 |
28 |
29 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/launch/depth_camera.launch:
--------------------------------------------------------------------------------
1 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/launch/raw_camera.launch:
--------------------------------------------------------------------------------
1 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(pirvs_ros)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | add_compile_options(-std=c++11)
6 |
7 |
8 | find_package(catkin REQUIRED COMPONENTS
9 | roscpp
10 | rospy
11 | std_msgs
12 | tf2_ros
13 | image_transport
14 | cv_bridge
15 | camera_calibration_parsers
16 | )
17 |
18 | ### 3rd party ###
19 | # OpenCV
20 | find_package(OpenCV REQUIRED)
21 | include_directories(${OpenCV_INCLUDE_DIRS})
22 | link_directories(${OpenCV_LIBS})
23 |
24 | # LIBUSB
25 | find_package(PkgConfig)
26 | pkg_check_modules(LIBUSB libusb-1.0)
27 | include_directories(${LIBUSB_INCLUDE_DIRS})
28 | link_directories(${LIBUSB_LIBRARY_DIRS})
29 |
30 | find_package(PkgConfig REQUIRED)
31 | pkg_check_modules(GTK2 REQUIRED gtk+-2.0)
32 | include_directories(${GTK2_INCLUDE_DIRS})
33 | link_directories(${GTK2_INCLUDE_DIRS})
34 | # Add other flags to the compiler
35 | add_definitions(${GTK2_CFLAGS_OTHER})
36 |
37 | set(EXT_LIBS
38 | ${LIBUSB_LIBRARIES}
39 | ${GTK2_LIBRARIES}
40 | -lpthread
41 | -lpng
42 | -lz
43 | -ldl
44 | -fopenmp
45 | -fPIC
46 | -ljpeg
47 | )
48 |
49 | catkin_package(
50 | # INCLUDE_DIRS include
51 | # LIBRARIES pirvs_ros
52 | # CATKIN_DEPENDS roscpp rospy std_msgs
53 | # DEPENDS system_lib
54 | )
55 |
56 | include_directories(
57 | # include
58 | ${catkin_INCLUDE_DIRS}
59 | )
60 |
61 | add_library(pirvs INTERFACE)
62 | target_include_directories(pirvs INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/pirvs/include)
63 | target_link_libraries(pirvs INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/pirvs/lib/libPerceptInPIRVS.a ${EXT_LIBS})
64 |
65 | add_executable(stereo_camera_node src/stereo_camera_node.cpp)
66 | target_link_libraries(stereo_camera_node ${catkin_LIBRARIES} pirvs)
67 |
68 | add_executable(depth_camera_node src/depth_camera_node.cpp)
69 | target_link_libraries(depth_camera_node ${catkin_LIBRARIES} pirvs)
70 |
71 | add_executable(slam_node src/slam_node.cpp)
72 | target_link_libraries(slam_node ${catkin_LIBRARIES} pirvs)
73 |
--------------------------------------------------------------------------------
/init/pirvs_ros_init.py:
--------------------------------------------------------------------------------
1 | import json, yaml, sys, os, shutil
2 | import convert_raw_to_yaml as convert_raw
3 | import convert_rect_to_yaml as convert_rect
4 |
5 | if __name__ == "__main__":
6 |
7 | if len(sys.argv)<3:
8 | print "Not enough input arguments."
9 | print "[SDK folder]" "[calib_raw JSON]" "[calib_rectified JSON]"
10 |
11 | sdk_folder = sys.argv[1]
12 | calib_raw = sys.argv[2]
13 | calib_rectified = sys.argv[3]
14 | sdk_path = '../pirvs'
15 | if not os.path.exists(sdk_path):
16 | os.makedirs(sdk_path)
17 | print "Creating " + sdk_path
18 | shutil.copytree(sdk_folder + '/lib', '../pirvs/lib')
19 | shutil.copytree(sdk_folder + '/include', '../pirvs/include')
20 | shutil.copyfile(sdk_folder + '/voc_ironsides.json', '../pirvs/voc_ironsides.json')
21 | else:
22 | print "Error: Please remove pirvs folder."
23 | sys.exit()
24 | calib_path = '../pirvs/calib'
25 | if not os.path.exists(calib_path):
26 | os.makedirs(calib_path)
27 | else:
28 | print "Error: Please remove pirvs/calib folder."
29 | sys.exit()
30 |
31 | shutil.copyfile(calib_raw, calib_path + '/calib_PerceptIn_Ironsides.json')
32 | with open(calib_raw) as ff:
33 | data = json.load(ff)
34 | print "Read from file:", calib_raw
35 | convert_raw.CreateCalib4LeftCam(data, calib_path + "/calib_PerceptIn_Ironsides_left_raw.yaml")
36 | convert_raw.CreateCalib4RightCam(data, calib_path + "/calib_PerceptIn_Ironsides_right_raw.yaml")
37 | convert_raw.CreateCalib4IMU(data, calib_path + "/calib_PerceptIn_Ironsides_imu.yaml")
38 | convert_raw.CreateCalib4Extrinsic(data, calib_path + "/calib_PerceptIn_Ironsides_extrinsic.yaml")
39 |
40 | shutil.copyfile(calib_rectified, calib_path + '/calib_PerceptIn_Ironsides_rectified.json')
41 | with open(calib_rectified) as ff:
42 | data = json.load(ff)
43 | print "Read from file:", calib_rectified
44 | convert_rect.CreateCalib4LeftCam(data, calib_path + "/calib_PerceptIn_Ironsides_rectified_left_raw.yaml")
45 | convert_rect.CreateCalib4RightCam(data, calib_path + "/calib_PerceptIn_Ironsides_rectified_right_raw.yaml")
46 | convert_rect.CreateCalib4IMU(data, calib_path + "/calib_PerceptIn_Ironsides_rectified_imu.yaml")
47 | convert_rect.CreateCalib4Extrinsic(data, calib_path + "/calib_PerceptIn_Ironsides_rectified_extrinsic.yaml")
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Ironsides - ROS Integration
2 |
3 | This package lets you use the Ironsides visual inertial computing module with ROS.
4 |
5 | ## Getting started
6 |
7 | - Install ROS (Kinetic)
8 | - http://wiki.ros.org/kinetic/Installation/Ubuntu
9 | - Create a ROS Workspace
10 | - mkdir -p ~/catkin_ws/src
11 | - cd ~/catkin_ws/
12 | - catkin_make
13 | - source ~/catkin_ws/devel/setup.bash
14 | - Import and make our code
15 | - place pirvs_ros folder in ~/catkin_ws/src
16 | - run init python script to import PIRVS SDK
17 | - cd ~/catkin_ws/src/pirvs_ros/init
18 | - python pirvs_ros_init.py [PIRVS SDK folder] [calib_raw JSON] [calib_rectified JSON]
19 | - cd ~/catkin_ws
20 | - catkin_make
21 |
22 | ## Quick Launch
23 | - Try Stereo_camera_node
24 | - switch Ironsides to Raw Mode
25 | - roslaunch pirvs_ros raw_camera.launch
26 | - Try Depth_node
27 | - switch Ironsides to Depth Mode
28 | - roslaunch pirvs_ros depth_camera.launch
29 | - Try Slam_node
30 | - switch Ironsides to Raw Mode
31 | - roslaunch pirvs_ros display.launch
32 |
33 | ### Test Stereo_camera_node
34 | - Published topic:
35 | - StereoImage/left: left image
36 | - StereoImage/right: right image
37 | - IMU/data_raw: IMU
38 | - StereoImage/left/cameraInfo: left cam calibration
39 | - StereoImage/right/cameraInfo: right cam calibration
40 | - Run Stereo_camera_node:
41 | - rosrun pirvs_ros stereo_camera_node [cam_calib_left.yaml] [cam_calib_right.yaml]
42 | - Check image:
43 | - rosrun image_view image_view image:=[topic name]
44 | - e.g., rosrun image_view image_view image:=/pirvs_cam_raw/StereoImage/left
45 | - Check IMU,cameraInfo:
46 | - rostopic echo [topic name]
47 | - e.g., rostopic echo IMU/data_raw
48 |
49 | ### Test Depth_camera_node
50 | - Published topic:
51 | - StereoImage/rect_left: rectified left image
52 | - StereoImage/rect_right: rectified right image
53 | - StereoImage/detph: depth image
54 | - Run Depth_camera_node:
55 | - rosrun pirvs_ros depth_camera_node
56 | - Check image:
57 | - rosrun image_view image_view image:=[topic name]
58 | - e.g., rosrun image_view image_view image:=/pirvs_cam_depth/StereoImage/rect_left
59 |
60 | ### Test Slam_node
61 | - Published topic:
62 | - published topics:
63 | - StereoImage/left: left image
64 | - StereoImage/right: right image
65 | - Pose: camera pose
66 | - odom: odom information for kviz
67 | - Run slam_node:
68 | - rosrun pirvs_ros slam_node [calib.json] [vocab.json]
69 | - Check image:
70 | - rosrun image_view image_view image:=[topic name]
71 | - e.g., rosrun image_view image_view image:=/pirvs_slam/StereoImage/left
72 | - Check Pose:
73 | - rostopic echo [topic name]
74 | - e.g., rostopic echo Pose
75 | - Check Pose using RVIZ:
76 | - rosrun rviz rviz
77 | - add odometry, listen topic "odom"
78 | - set fixed frame to my frame
79 |
80 | For more information visit [PerceptIn ROS Wiki](https://wiki.ros.org/perceptin)
81 |
--------------------------------------------------------------------------------
/init/convert_rect_to_yaml.py:
--------------------------------------------------------------------------------
1 | import json, yaml, sys
2 | #create extrinsic calib file
3 | def CreateCalib4Extrinsic(data, output):
4 | p_T_t = data["children"]["child"]["children"]["child"]["children"]["child"]["parent_T_this"]
5 | left_T_right = { "rows": 4,
6 | "cols": 4,
7 | "data": [float(i) for i in p_T_t[0]] + \
8 | [float(i) for i in p_T_t[1]] + \
9 | [float(i) for i in p_T_t[2]] + \
10 | [float(i) for i in p_T_t[3]]}
11 |
12 | p_T_t = data["children"]["child"]["children"]["child"]["parent_T_this"]
13 | IMU_T_left = { "rows": 4,
14 | "cols": 4,
15 | "data": [float(i) for i in p_T_t[0]] + \
16 | [float(i) for i in p_T_t[1]] + \
17 | [float(i) for i in p_T_t[2]] + \
18 | [float(i) for i in p_T_t[3]]}
19 |
20 | p_T_t = data["children"]["child"]["parent_T_this"]
21 | root_T_IMU = { "rows": 4,
22 | "cols": 4,
23 | "data": [float(i) for i in p_T_t[0]] + \
24 | [float(i) for i in p_T_t[1]] + \
25 | [float(i) for i in p_T_t[2]] + \
26 | [float(i) for i in p_T_t[3]]}
27 | calib_extrinsic = { "left_T_right": left_T_right,
28 | "imu_T_left" : IMU_T_left,
29 | "root_T_imu" : root_T_IMU}
30 | with open(output, 'w') as yaml_file:
31 | # ymal_file.write('# ')
32 | yaml.safe_dump(calib_extrinsic, yaml_file, default_flow_style=False)
33 | print " ---created " + output
34 |
35 | #create IMU calib file
36 | def CreateCalib4IMU(data, output):
37 | model = data["children"]["child"]["model"]
38 | accel = {"bias": [float(i) for i in model["accel"]["bias"]],
39 | "TK" : [float(i) for i in model["accel"]["TK"][0]] + \
40 | [float(i) for i in model["accel"]["TK"][1]] + \
41 | [float(i) for i in model["accel"]["TK"][2]]
42 | }
43 | gyro = {"bias": [float(i) for i in model["gyro"]["bias"]],
44 | "TK" : [float(i) for i in model["gyro"]["TK"][0]] + \
45 | [float(i) for i in model["gyro"]["TK"][1]] + \
46 | [float(i) for i in model["gyro"]["TK"][2]]
47 | }
48 | calib_IMU = { "accel": accel,
49 | "gyro" : gyro }
50 | with open(output, 'w') as yaml_file:
51 | yaml.safe_dump(calib_IMU, yaml_file, default_flow_style=False)
52 | print " ---created " + output
53 |
54 | #create left camera calib file
55 | def CreateCalib4LeftCam(data, output):
56 | model = data["children"]["child"]["children"]["child"]["model"]
57 | camera_matrix = {"rows" : 3,
58 | "cols" : 3}
59 | camera_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + \
60 | [float(i) for i in model["camera_matrix"][1]] + \
61 | [float(i) for i in model["camera_matrix"][2]]
62 |
63 | projection_matrix = { "rows" : 3,
64 | "cols" : 4}
65 | projection_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + [0] + \
66 | [float(i) for i in model["camera_matrix"][1]] + [0] + \
67 | [float(i) for i in model["camera_matrix"][2]] + [0]
68 | calib_left = { "image_width" : int(model["image_size"]["width"]),
69 | "image_height" : int(model["image_size"]["height"]),
70 | "camera_name" : "stereo_left",
71 | "camera_matrix" : camera_matrix,
72 | "projection_matrix" : projection_matrix
73 | }
74 | with open(output, 'w') as yaml_file:
75 | yaml.safe_dump(calib_left, yaml_file, default_flow_style=False)
76 | print " ---created " + output
77 |
78 | #create right camera calib file
79 | def CreateCalib4RightCam(data, output):
80 | model = data["children"]["child"]["children"]["child"]["children"]["child"]["model"]
81 | camera_matrix = {"rows" : 3,
82 | "cols" : 3}
83 | camera_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + \
84 | [float(i) for i in model["camera_matrix"][1]] + \
85 | [float(i) for i in model["camera_matrix"][2]]
86 | projection_matrix = { "rows" : 3,
87 | "cols" : 4}
88 | projection_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + [0] + \
89 | [float(i) for i in model["camera_matrix"][1]] + [0] + \
90 | [float(i) for i in model["camera_matrix"][2]] + [0]
91 | calib_right= { "image_width" : int(model["image_size"]["width"]),
92 | "image_height" : int(model["image_size"]["height"]),
93 | "camera_name" : "stereo_right",
94 | "camera_matrix" : camera_matrix,
95 | "projection_matrix" : projection_matrix
96 | }
97 |
98 | with open(output, 'w') as yaml_file:
99 | yaml.safe_dump(calib_right, yaml_file, default_flow_style=False)
100 | print " ---created " + output
101 | if __name__ == "__main__":
102 | if len(sys.argv)<3:
103 | print "Not enough input arguments."
104 | print "[calib_input JSON]" "[calib_output YAML]"
105 | calib_file = sys.argv[1]
106 | output_file = sys.argv[2]
107 | with open(calib_file) as ff:
108 | data = json.load(ff)
109 | print "Read from file:", calib_file
110 | CreateCalib4LeftCam(data,output_file[:-5]+"_left_raw.yaml")
111 | CreateCalib4RightCam(data,output_file[:-5]+"_right_raw.yaml")
112 | CreateCalib4IMU(data,output_file[:-5]+"_imu.yaml")
113 | CreateCalib4Extrinsic(data,output_file[:-5]+"_extrinsic.yaml")
114 |
--------------------------------------------------------------------------------
/src/stereo_camera_node.cpp:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 PerceptIn
3 | *
4 | * This End-User License Agreement (EULA) is a legal agreement between you
5 | * (the purchaser) and PerceptIn regarding your use of
6 | * PerceptIn Robotics Vision System (PIRVS), including PIRVS SDK and
7 | * associated documentation (the "Software").
8 | *
9 | * IF YOU DO NOT AGREE TO ALL OF THE TERMS OF THIS EULA, DO NOT INSTALL,
10 | * USE OR COPY THE SOFTWARE.
11 | *
12 | * :+++++; ++'
13 | * ++++++++; ++'
14 | * ++` .++ .'''''` '''', :': `''''', ;''': '''''''`++' '` ''
15 | * ++` ++ ++''''.;+'''++, ;++'++, ++'''',,+'''++:+''++''.++',+' ++
16 | * ++` :+++ ++ ;+. ++ ++ ++ ++ ,+, ++ ++ ++',+++ ++
17 | * +++++++` ++ ;+. .++ ++ ++ ,+, .++ ++ ++',+++. ++
18 | * +++:` +++++ ;+.;++' ++ +++++ ,+++++; ++ ++',+,++ ++
19 | * ++` ++``` ;+`++ ++ ++``` ,+'. ++ ++',+,,+'++
20 | * ++` ++ ;+.,++ ++ ++ ,+, ++ ++',+, ++++
21 | * ++` ++ ;+. :++ ++. ,++ ++ ,+, ++ ++',+, +++
22 | * '+` ++''''.:+. ;++ '+'+' ++'''',,+, '+ ++',+, ;+'
23 | *
24 | */
25 |
26 | #include
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 | #include
34 | #include
35 | #include
36 | #include
37 |
38 | #define Q_SIZE_STEREOIMAGE 1
39 | #define Q_SIZE_CAMERAINFO 1
40 | #define Q_SIZE_IMU 1000
41 |
42 | std::shared_ptr gDevice = NULL;
43 |
44 | /**
45 | * Gracefully exit when CTRL-C is hit
46 | */
47 |
48 | void exit_handler(int s) {
49 | if (gDevice != NULL) {
50 | gDevice->StopDevice();
51 | }
52 | ros::shutdown;
53 | exit(1);
54 | }
55 | /**
56 | * Create IMU Message
57 | */
58 | void CreateIMUMsg(sensor_msgs::Imu *imu_msg,
59 | std::shared_ptr imu_data,
60 | ros::Time timestamp) {
61 | if (!imu_data) {
62 | return;
63 | }
64 | imu_msg->header.stamp = timestamp;
65 | imu_msg->header.frame_id = "IMU";
66 | imu_msg->angular_velocity.x = imu_data->ang_v.x;
67 | imu_msg->angular_velocity.y = imu_data->ang_v.y;
68 | imu_msg->angular_velocity.z = imu_data->ang_v.z;
69 | imu_msg->linear_acceleration.x = imu_data->accel.x;
70 | imu_msg->linear_acceleration.y = imu_data->accel.y;
71 | imu_msg->linear_acceleration.z = imu_data->accel.z;
72 | }
73 | /**
74 | * Create Image Message
75 | */
76 | void CreateImageMsg(sensor_msgs::ImagePtr *image_msg, cv::Mat cv_image,
77 | std::string frame_id, ros::Time timestamp) {
78 | std_msgs::Header image_header;
79 | image_header.stamp = timestamp;
80 | image_header.frame_id = frame_id;
81 | *image_msg = cv_bridge::CvImage(image_header,
82 | sensor_msgs::image_encodings::BGR8, cv_image)
83 | .toImageMsg();
84 | }
85 | int main(int argc, char **argv) {
86 | // install SIGNAL handler
87 |
88 | if (argc < 3) {
89 | ROS_ERROR(
90 | "Not enough input argument.\n"
91 | "Usage:\n%s [calib_left YAML] [calib_right YAML]\n",
92 | argv[0]);
93 | return -1;
94 | }
95 | const std::string file_calib_left(argv[1]);
96 | const std::string file_calib_right(argv[2]);
97 |
98 | struct sigaction sigIntHandler;
99 | sigIntHandler.sa_handler = exit_handler;
100 | sigemptyset(&sigIntHandler.sa_mask);
101 | sigIntHandler.sa_flags = 0;
102 | sigaction(SIGINT, &sigIntHandler, NULL);
103 | if (!PIRVS::CreatePerceptInDevice(&gDevice, PIRVS::PerceptInDevice::RAW_MODE,
104 | false) ||
105 | !gDevice) {
106 | ROS_ERROR("Failed to create device.\n");
107 | return -1;
108 | }
109 | if (!gDevice->StartDevice()) {
110 | ROS_ERROR("Failed to start device.\n");
111 | return -1;
112 | }
113 | // create Publisher and set topic names.
114 | ros::init(argc, argv, "ImagePublisher", ros::init_options::NoSigintHandler);
115 | ros::NodeHandle nh;
116 | image_transport::ImageTransport it(nh);
117 | image_transport::Publisher pub_left =
118 | it.advertise("StereoImage/left", Q_SIZE_STEREOIMAGE);
119 | image_transport::Publisher pub_right =
120 | it.advertise("StereoImage/right", Q_SIZE_STEREOIMAGE);
121 | ros::Publisher pub_imu =
122 | nh.advertise("IMU/data_raw", Q_SIZE_IMU);
123 | ros::Publisher pub_left_cam_info;
124 | pub_left_cam_info = nh.advertise(
125 | "StereoImage/left/camera_info", Q_SIZE_CAMERAINFO);
126 | sensor_msgs::CameraInfo camera_calib_msg_left;
127 | std::string cam_name_left;
128 | if (!camera_calibration_parsers::readCalibration(
129 | file_calib_left, cam_name_left, camera_calib_msg_left)) {
130 | ROS_ERROR("Error while loading left camera calib infor.");
131 | return -1;
132 | }
133 |
134 | ros::Publisher pub_right_cam_info;
135 | pub_right_cam_info = nh.advertise(
136 | "StereoImage/right/camera_info", Q_SIZE_CAMERAINFO);
137 | sensor_msgs::CameraInfo camera_calib_msg_right;
138 | std::string cam_name_right;
139 | if (!camera_calibration_parsers::readCalibration(
140 | file_calib_right, cam_name_right, camera_calib_msg_right)) {
141 | ROS_ERROR("Error while loading right camera calib infor.");
142 | return -1;
143 | }
144 |
145 | bool stereo_data_available = false;
146 | while (ros::ok) {
147 | std::shared_ptr data;
148 | if (!gDevice->GetData(&data)) {
149 | continue;
150 | }
151 | std::shared_ptr stereo_data =
152 | std::dynamic_pointer_cast(data);
153 | if (stereo_data) {
154 | ros::Time timestamp((double)stereo_data->timestamp / 1000.0f);
155 | cv::Mat cv_img_left =
156 | cv::Mat(stereo_data->img_l.height, stereo_data->img_l.width, CV_8UC3);
157 | memcpy(cv_img_left.data, stereo_data->img_l.data,
158 | cv_img_left.total() * cv_img_left.elemSize());
159 | cv::Mat cv_img_right =
160 | cv::Mat(stereo_data->img_r.height, stereo_data->img_r.width, CV_8UC3);
161 | memcpy(cv_img_right.data, stereo_data->img_r.data,
162 | cv_img_right.total() * cv_img_right.elemSize());
163 |
164 | sensor_msgs::ImagePtr image_msg_left;
165 | sensor_msgs::ImagePtr image_msg_right;
166 | CreateImageMsg(&image_msg_left, cv_img_left, "StereoRectLeft", timestamp);
167 | CreateImageMsg(&image_msg_right, cv_img_right, "StereoRectRight",
168 | timestamp);
169 | pub_left.publish(image_msg_left);
170 | pub_right.publish(image_msg_right);
171 | pub_left_cam_info.publish(camera_calib_msg_left);
172 | pub_right_cam_info.publish(camera_calib_msg_right);
173 | }
174 |
175 | std::shared_ptr imu_data =
176 | std::dynamic_pointer_cast(data);
177 | if (imu_data) {
178 | sensor_msgs::Imu imu_msg;
179 | ros::Time t((double)imu_data->timestamp / 1000.0f);
180 | CreateIMUMsg(&imu_msg, imu_data, t);
181 | pub_imu.publish(imu_msg);
182 | }
183 | }
184 | if (gDevice != NULL) {
185 | gDevice->StopDevice();
186 | }
187 | if (ros::ok) {
188 | ros::shutdown();
189 | }
190 | return 0;
191 | }
192 |
--------------------------------------------------------------------------------
/src/depth_camera_node.cpp:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 PerceptIn
3 | *
4 | * This End-User License Agreement (EULA) is a legal agreement between you
5 | * (the purchaser) and PerceptIn regarding your use of
6 | * PerceptIn Robotics Vision System (PIRVS), including PIRVS SDK and
7 | * associated documentation (the "Software").
8 | *
9 | * IF YOU DO NOT AGREE TO ALL OF THE TERMS OF THIS EULA, DO NOT INSTALL,
10 | * USE OR COPY THE SOFTWARE.
11 | *
12 | * :+++++; ++'
13 | * ++++++++; ++'
14 | * ++` .++ .'''''` '''', :': `''''', ;''': '''''''`++' '` ''
15 | * ++` ++ ++''''.;+'''++, ;++'++, ++'''',,+'''++:+''++''.++',+' ++
16 | * ++` :+++ ++ ;+. ++ ++ ++ ++ ,+, ++ ++ ++',+++ ++
17 | * +++++++` ++ ;+. .++ ++ ++ ,+, .++ ++ ++',+++. ++
18 | * +++:` +++++ ;+.;++' ++ +++++ ,+++++; ++ ++',+,++ ++
19 | * ++` ++``` ;+`++ ++ ++``` ,+'. ++ ++',+,,+'++
20 | * ++` ++ ;+.,++ ++ ++ ,+, ++ ++',+, ++++
21 | * ++` ++ ;+. :++ ++. ,++ ++ ,+, ++ ++',+, +++
22 | * '+` ++''''.:+. ;++ '+'+' ++'''',,+, '+ ++',+, ;+'
23 | *
24 | */
25 |
26 | #include
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 | #include
34 | #include
35 | #include
36 | #include
37 | #include
38 | #include
39 |
40 | #define Q_SIZE_STEREOIMAGE 1
41 | #define Q_SIZE_POINTCLOUD 5
42 | #define Q_SIZE_IMU 1000
43 |
44 | std::shared_ptr gDevice = NULL;
45 |
46 | /**
47 | * Gracefully exit when CTRL-C is hit
48 | */
49 |
50 | void exit_handler(int s) {
51 | if (gDevice != NULL) {
52 | gDevice->StopDevice();
53 | }
54 | ros::shutdown;
55 | exit(1);
56 | }
57 | /**
58 | * Convert from Ironsides coordinate to rviz corrdinate.
59 | * Rotation Matrix = [-1, 0, 0]
60 | * [ 0, 0,-1]
61 | * [ 0,-1, 0]
62 | * Quaternion = [ 0, -0.7071068, 0.7071068, 0 ]
63 | */
64 | void CreateRvizTranslateToInitFrame(geometry_msgs::TransformStamped *trans,
65 | std::string child_frame_id) {
66 | trans->header.frame_id = "init_frame";
67 | trans->child_frame_id = child_frame_id;
68 | trans->transform.translation.x = 0;
69 | trans->transform.translation.y = 0;
70 | trans->transform.translation.z = 0;
71 | trans->transform.rotation.x = 0;
72 | trans->transform.rotation.y = -0.7071068;
73 | trans->transform.rotation.z = 0.7071068;
74 | trans->transform.rotation.w = 0;
75 | }
76 | /**
77 | * Create IMU Message
78 | */
79 | void CreateIMUMsg(sensor_msgs::Imu *imu_msg,
80 | std::shared_ptr imu_data,
81 | ros::Time timestamp) {
82 | if (!imu_data) {
83 | return;
84 | }
85 | imu_msg->header.stamp = timestamp;
86 | imu_msg->header.frame_id = "IMU";
87 | imu_msg->angular_velocity.x = imu_data->ang_v.x;
88 | imu_msg->angular_velocity.y = imu_data->ang_v.y;
89 | imu_msg->angular_velocity.z = imu_data->ang_v.z;
90 | imu_msg->linear_acceleration.x = imu_data->accel.x;
91 | imu_msg->linear_acceleration.y = imu_data->accel.y;
92 | imu_msg->linear_acceleration.z = imu_data->accel.z;
93 | }
94 | /**
95 | * Create Image Message
96 | */
97 | void CreateImageMsg(sensor_msgs::ImagePtr *image_msg, cv::Mat cv_image,
98 | std::string frame_id, ros::Time timestamp) {
99 | std_msgs::Header image_header;
100 | image_header.stamp = timestamp;
101 | image_header.frame_id = frame_id;
102 | *image_msg = cv_bridge::CvImage(image_header,
103 | sensor_msgs::image_encodings::MONO8, cv_image)
104 | .toImageMsg();
105 | }
106 | int main(int argc, char **argv) {
107 | // install SIGNAL handler
108 | struct sigaction sigIntHandler;
109 | sigIntHandler.sa_handler = exit_handler;
110 | sigemptyset(&sigIntHandler.sa_mask);
111 | sigIntHandler.sa_flags = 0;
112 | sigaction(SIGINT, &sigIntHandler, NULL);
113 | if (!PIRVS::CreatePerceptInDevice(&gDevice,
114 | PIRVS::PerceptInDevice::DEPTH_MODE) ||
115 | !gDevice) {
116 | ROS_ERROR("Failed to create device.\n");
117 | return -1;
118 | }
119 | if (!gDevice->StartDevice()) {
120 | ROS_ERROR("Failed to start device.\n");
121 | return -1;
122 | }
123 |
124 | // create Publisher and set topic names.
125 | ros::init(argc, argv, "DepthImagePublisher",
126 | ros::init_options::NoSigintHandler);
127 | ros::NodeHandle nh;
128 | image_transport::ImageTransport it(nh);
129 | image_transport::Publisher pub_left =
130 | it.advertise("StereoImage/rect_left", Q_SIZE_STEREOIMAGE);
131 | image_transport::Publisher pub_right =
132 | it.advertise("StereoImage/rect_right", Q_SIZE_STEREOIMAGE);
133 | image_transport::Publisher pub_depth =
134 | it.advertise("StereoImage/depth", Q_SIZE_STEREOIMAGE);
135 | ros::Publisher pub_imu =
136 | nh.advertise("IMU/data_raw", Q_SIZE_IMU);
137 |
138 | bool stereo_data_available = false;
139 | while (ros::ok) {
140 | std::shared_ptr data;
141 | if (!gDevice->GetData(&data)) {
142 | continue;
143 | }
144 | std::shared_ptr stereo_depth_data =
145 | std::dynamic_pointer_cast(data);
146 | if (stereo_depth_data) {
147 | ros::Time timestamp((double)stereo_depth_data->timestamp / 1000.0f);
148 | cv::Mat cv_img_left = cv::Mat(stereo_depth_data->img_l.height,
149 | stereo_depth_data->img_l.width, CV_8UC1);
150 | memcpy(cv_img_left.data, stereo_depth_data->img_l.data,
151 | cv_img_left.total() * cv_img_left.elemSize());
152 | cv::Mat cv_img_right = cv::Mat(stereo_depth_data->img_r.height,
153 | stereo_depth_data->img_r.width, CV_8UC1);
154 | memcpy(cv_img_right.data, stereo_depth_data->img_r.data,
155 | cv_img_right.total() * cv_img_right.elemSize());
156 | cv::Mat cv_img_depth =
157 | cv::Mat_(stereo_depth_data->img_depth.height,
158 | stereo_depth_data->img_depth.width);
159 | memcpy(cv_img_depth.data, stereo_depth_data->img_depth.data,
160 | cv_img_depth.total() * cv_img_depth.elemSize());
161 |
162 | sensor_msgs::ImagePtr image_msg_left;
163 | sensor_msgs::ImagePtr image_msg_right;
164 | CreateImageMsg(&image_msg_left, cv_img_left, "StereoRectLeft", timestamp);
165 | CreateImageMsg(&image_msg_right, cv_img_right, "StereoRectRight",
166 | timestamp);
167 | pub_left.publish(image_msg_left);
168 | pub_right.publish(image_msg_right);
169 | // publish depth image
170 | std_msgs::Header image_header_depth;
171 | image_header_depth.stamp = timestamp;
172 | image_header_depth.frame_id = "StereoDepth";
173 | sensor_msgs::ImagePtr msg_depth =
174 | cv_bridge::CvImage(image_header_depth,
175 | sensor_msgs::image_encodings::MONO16, cv_img_depth)
176 | .toImageMsg();
177 | pub_depth.publish(msg_depth);
178 | }
179 | std::shared_ptr imu_data =
180 | std::dynamic_pointer_cast(data);
181 | if (imu_data) {
182 | sensor_msgs::Imu imu_msg;
183 | ros::Time t((double)imu_data->timestamp / 1000.0f);
184 | CreateIMUMsg(&imu_msg, imu_data, t);
185 | pub_imu.publish(imu_msg);
186 | }
187 | }
188 | if (gDevice != NULL) {
189 | gDevice->StopDevice();
190 | }
191 | if (ros::ok) {
192 | ros::shutdown();
193 | }
194 | return 0;
195 | }
196 |
--------------------------------------------------------------------------------
/rviz/ironsides.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 78
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Odometry1
9 | - /Odometry1/Shape1
10 | - /PointCloud1
11 | Splitter Ratio: 0.51522249
12 | Tree Height: 1716
13 | - Class: rviz/Selection
14 | Name: Selection
15 | - Class: rviz/Tool Properties
16 | Expanded:
17 | - /2D Pose Estimate1
18 | - /2D Nav Goal1
19 | - /Publish Point1
20 | Name: Tool Properties
21 | Splitter Ratio: 0.588679016
22 | - Class: rviz/Views
23 | Expanded:
24 | - /Current View1
25 | Name: Views
26 | Splitter Ratio: 0.5
27 | - Class: rviz/Time
28 | Experimental: false
29 | Name: Time
30 | SyncMode: 0
31 | SyncSource: Image
32 | Visualization Manager:
33 | Class: ""
34 | Displays:
35 | - Alpha: 0.5
36 | Cell Size: 1
37 | Class: rviz/Grid
38 | Color: 160; 160; 164
39 | Enabled: true
40 | Line Style:
41 | Line Width: 0.0299999993
42 | Value: Lines
43 | Name: Grid
44 | Normal Cell Count: 0
45 | Offset:
46 | X: 0
47 | Y: 0
48 | Z: 0
49 | Plane: XY
50 | Plane Cell Count: 10
51 | Reference Frame:
52 | Value: true
53 | - Alpha: 1
54 | Axes Length: 0.00999999978
55 | Axes Radius: 0.00999999978
56 | Class: rviz/Pose
57 | Color: 255; 25; 0
58 | Enabled: true
59 | Head Length: 0.300000012
60 | Head Radius: 0.100000001
61 | Name: Pose
62 | Shaft Length: 1
63 | Shaft Radius: 0.0500000007
64 | Shape: Axes
65 | Topic: /Pose
66 | Unreliable: false
67 | Value: true
68 | - Angle Tolerance: 0.00999999978
69 | Class: rviz/Odometry
70 | Covariance:
71 | Orientation:
72 | Alpha: 0.5
73 | Color: 255; 255; 127
74 | Color Style: Unique
75 | Frame: Local
76 | Offset: 1
77 | Scale: 1
78 | Value: true
79 | Position:
80 | Alpha: 0.300000012
81 | Color: 204; 51; 204
82 | Scale: 1
83 | Value: true
84 | Value: false
85 | Enabled: true
86 | Keep: 1
87 | Name: Odometry
88 | Position Tolerance: 0.00999999978
89 | Shape:
90 | Alpha: 1
91 | Axes Length: 0.0500000007
92 | Axes Radius: 0.00999999978
93 | Color: 255; 25; 0
94 | Head Length: 0.0299999993
95 | Head Radius: 0.00999999978
96 | Shaft Length: 0.100000001
97 | Shaft Radius: 0.00499999989
98 | Value: Axes
99 | Topic: /pirvs_slam/odom
100 | Unreliable: false
101 | Value: true
102 | - Class: rviz/Axes
103 | Enabled: true
104 | Length: 0.100000001
105 | Name: Axes
106 | Radius: 0.00999999978
107 | Reference Frame:
108 | Value: true
109 | - Class: rviz/Image
110 | Enabled: true
111 | Image Topic: /pirvs_slam/StereoImage/left
112 | Max Value: 1
113 | Median window: 5
114 | Min Value: 0
115 | Name: Image
116 | Normalize Range: true
117 | Queue Size: 2
118 | Transport Hint: raw
119 | Unreliable: false
120 | Value: true
121 | - Alpha: 1
122 | Autocompute Intensity Bounds: true
123 | Autocompute Value Bounds:
124 | Max Value: 10
125 | Min Value: -10
126 | Value: true
127 | Axis: Z
128 | Channel Name: intensity
129 | Class: rviz/PointCloud
130 | Color: 255; 255; 255
131 | Color Transformer: Intensity
132 | Decay Time: 0
133 | Enabled: true
134 | Invert Rainbow: false
135 | Max Color: 255; 255; 255
136 | Max Intensity: 4096
137 | Min Color: 0; 0; 0
138 | Min Intensity: 0
139 | Name: PointCloud
140 | Position Transformer: XYZ
141 | Queue Size: 10
142 | Selectable: true
143 | Size (Pixels): 3
144 | Size (m): 0.00999999978
145 | Style: Spheres
146 | Topic: /pirvs_slam/PointCloud
147 | Unreliable: false
148 | Use Fixed Frame: true
149 | Use rainbow: true
150 | Value: true
151 | Enabled: true
152 | Global Options:
153 | Background Color: 48; 48; 48
154 | Fixed Frame: init_frame
155 | Frame Rate: 30
156 | Name: root
157 | Tools:
158 | - Class: rviz/Interact
159 | Hide Inactive Objects: true
160 | - Class: rviz/MoveCamera
161 | - Class: rviz/Select
162 | - Class: rviz/FocusCamera
163 | - Class: rviz/Measure
164 | - Class: rviz/SetInitialPose
165 | Topic: /initialpose
166 | - Class: rviz/SetGoal
167 | Topic: /move_base_simple/goal
168 | - Class: rviz/PublishPoint
169 | Single click: true
170 | Topic: /clicked_point
171 | Value: true
172 | Views:
173 | Current:
174 | Class: rviz/Orbit
175 | Distance: 2.43775749
176 | Enable Stereo Rendering:
177 | Stereo Eye Separation: 0.0599999987
178 | Stereo Focal Distance: 1
179 | Swap Stereo Eyes: false
180 | Value: false
181 | Focal Point:
182 | X: 0
183 | Y: 0
184 | Z: 0
185 | Focal Shape Fixed Size: true
186 | Focal Shape Size: 0.0500000007
187 | Invert Z Axis: false
188 | Name: Current View
189 | Near Clip Distance: 0.00999999978
190 | Pitch: 0.590201616
191 | Target Frame:
192 | Value: Orbit (rviz)
193 | Yaw: 0.738587439
194 | Saved:
195 | - Class: rviz/Orbit
196 | Distance: 1.68391693
197 | Enable Stereo Rendering:
198 | Stereo Eye Separation: 0.0599999987
199 | Stereo Focal Distance: 1
200 | Swap Stereo Eyes: false
201 | Value: false
202 | Focal Point:
203 | X: 0
204 | Y: 0
205 | Z: 0
206 | Focal Shape Fixed Size: true
207 | Focal Shape Size: 0.0500000007
208 | Invert Z Axis: false
209 | Name: Orbit
210 | Near Clip Distance: 0.00999999978
211 | Pitch: 0.450202972
212 | Target Frame:
213 | Value: Orbit (rviz)
214 | Yaw: 0.800391972
215 | Window Geometry:
216 | Displays:
217 | collapsed: false
218 | Height: 2062
219 | Hide Left Dock: false
220 | Hide Right Dock: false
221 | Image:
222 | collapsed: false
223 | QMainWindow State: 000000ff00000000fd0000000400000000000001f900000755fc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006b00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003400000755000000ef00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d00650072006101000006ca0000001d0000000000000000fb0000000a0049006d00610067006503000009ae000004a2000003f80000034b000000010000016200000755fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003400000755000000cd00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e1000001970000000300000e8600000056fc0100000002fb0000000800540069006d0065010000000000000e860000043900fffffffb0000000800540069006d0065010000000000000450000000000000000000000b190000075500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
224 | Selection:
225 | collapsed: false
226 | Time:
227 | collapsed: false
228 | Tool Properties:
229 | collapsed: false
230 | Views:
231 | collapsed: false
232 | Width: 3718
233 | X: 122
234 | Y: 45
235 |
--------------------------------------------------------------------------------
/src/slam_node.cpp:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 PerceptIn
3 | *
4 | * This End-User License Agreement (EULA) is a legal agreement between you
5 | * (the purchaser) and PerceptIn regarding your use of
6 | * PerceptIn Robotics Vision System (PIRVS), including PIRVS SDK and
7 | * associated documentation (the "Software").
8 | *
9 | * IF YOU DO NOT AGREE TO ALL OF THE TERMS OF THIS EULA, DO NOT INSTALL,
10 | * USE OR COPY THE SOFTWARE.
11 | *
12 | * :+++++; ++'
13 | * ++++++++; ++'
14 | * ++` .++ .'''''` '''', :': `''''', ;''': '''''''`++' '` ''
15 | * ++` ++ ++''''.;+'''++, ;++'++, ++'''',,+'''++:+''++''.++',+' ++
16 | * ++` :+++ ++ ;+. ++ ++ ++ ++ ,+, ++ ++ ++',+++ ++
17 | * +++++++` ++ ;+. .++ ++ ++ ,+, .++ ++ ++',+++. ++
18 | * +++:` +++++ ;+.;++' ++ +++++ ,+++++; ++ ++',+,++ ++
19 | * ++` ++``` ;+`++ ++ ++``` ,+'. ++ ++',+,,+'++
20 | * ++` ++ ;+.,++ ++ ++ ,+, ++ ++',+, ++++
21 | * ++` ++ ;+. :++ ++. ,++ ++ ,+, ++ ++',+, +++
22 | * '+` ++''''.:+. ;++ '+'+' ++'''',,+, '+ ++',+, ;+'
23 | *
24 | */
25 |
26 | #include
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 | #include
34 | #include
35 | #include
36 | #include
37 | #include
38 | #include
39 | #include
40 | #include
41 | #include
42 | #include
43 | #include
44 |
45 | #define Q_SIZE_STEREOIMAGE 1
46 | #define Q_SIZE_POSESTAMPED 30
47 | #define Q_SIZE_ODOMETRY 50
48 | #define Q_SIZE_POINTCLOUD 5
49 |
50 | std::shared_ptr gDevice = NULL;
51 |
52 | /**
53 | * Gracefully exit when CTRL-C is hit
54 | */
55 | void exit_handler(int s) {
56 | if (gDevice != NULL) {
57 | gDevice->StopDevice();
58 | }
59 | exit(1);
60 | }
61 | /**
62 | * Convert from Ironsides coordinate to rviz corrdinate.
63 | * Rotation Matrix = [-1, 0, 0]
64 | * [ 0, 0,-1]
65 | * [ 0,-1, 0]
66 | * Quaternion = [ 0, -0.7071068, 0.7071068, 0 ]
67 | */
68 | void CreateRvizTranslateToInitFrame(geometry_msgs::TransformStamped *trans,
69 | std::string child_frame_id) {
70 | trans->header.frame_id = "init_frame";
71 | trans->child_frame_id = child_frame_id;
72 | trans->transform.translation.x = 0;
73 | trans->transform.translation.y = 0;
74 | trans->transform.translation.z = 0;
75 | trans->transform.rotation.x = 0;
76 | trans->transform.rotation.y = -0.7071068;
77 | trans->transform.rotation.z = 0.7071068;
78 | trans->transform.rotation.w = 0;
79 | }
80 | /**
81 | * Create Map Point Cloud Message From MAP
82 | */
83 | void CreateMapPointCloudMsg(sensor_msgs::PointCloud *pointCloud,
84 | std::shared_ptr map,
85 | ros::Time timestamp) {
86 | if (!map) {
87 | return;
88 | }
89 | std::vector map_points;
90 | map->GetPoints(&map_points);
91 | size_t t = map_points.size();
92 | int idx = 0;
93 | for (auto point : map_points) {
94 | geometry_msgs::Point32 point32;
95 | point32.x = point.x;
96 | point32.y = point.y;
97 | point32.z = point.z;
98 | pointCloud->points.push_back(point32);
99 | }
100 | pointCloud->header.stamp = timestamp;
101 | pointCloud->header.frame_id = "pointcloud";
102 | }
103 | /**
104 | * Create Pose Message
105 | */
106 | void CreatePoseMsg(geometry_msgs::PoseStamped *msg_poseStamped,
107 | PIRVS::PoseInQuaternion pose_in_quaternion,
108 | ros::Time timestamp) {
109 | msg_poseStamped->header.stamp = timestamp;
110 | msg_poseStamped->header.frame_id = "pose";
111 | msg_poseStamped->pose.position.x = pose_in_quaternion.tx;
112 | msg_poseStamped->pose.position.y = pose_in_quaternion.ty;
113 | msg_poseStamped->pose.position.z = pose_in_quaternion.tz;
114 | msg_poseStamped->pose.orientation.x = pose_in_quaternion.qx;
115 | msg_poseStamped->pose.orientation.y = pose_in_quaternion.qy;
116 | msg_poseStamped->pose.orientation.z = pose_in_quaternion.qz;
117 | msg_poseStamped->pose.orientation.w = pose_in_quaternion.qw;
118 | }
119 | /**
120 | * Create Odometry Message
121 | */
122 | void CreateOdometryMsg(nav_msgs::Odometry *odom,
123 | PIRVS::PoseInQuaternion pose_in_quaternion,
124 | ros::Time timestamp) {
125 | odom->header.stamp = timestamp;
126 | odom->header.frame_id = "odom";
127 | // set odom the position
128 | odom->pose.pose.position.x = pose_in_quaternion.tx;
129 | odom->pose.pose.position.y = pose_in_quaternion.ty;
130 | odom->pose.pose.position.z = pose_in_quaternion.tz;
131 | odom->pose.pose.orientation.x = pose_in_quaternion.qx;
132 | odom->pose.pose.orientation.y = pose_in_quaternion.qy;
133 | odom->pose.pose.orientation.z = pose_in_quaternion.qz;
134 | odom->pose.pose.orientation.w = pose_in_quaternion.qw;
135 | // set odom the velocity
136 | odom->child_frame_id = "base_link";
137 | odom->twist.twist.linear.x = 0;
138 | odom->twist.twist.linear.y = 0;
139 | odom->twist.twist.angular.z = 0;
140 | }
141 | /**
142 | * Create Image Message
143 | */
144 | void CreateImageMsg(sensor_msgs::ImagePtr *image_msg, cv::Mat cv_image,
145 | std::string frame_id, ros::Time timestamp) {
146 | std_msgs::Header image_header;
147 | image_header.stamp = timestamp;
148 | image_header.frame_id = frame_id;
149 | *image_msg = cv_bridge::CvImage(image_header,
150 | sensor_msgs::image_encodings::MONO8, cv_image)
151 | .toImageMsg();
152 | }
153 |
154 | int main(int argc, char **argv) {
155 | if (argc < 3) {
156 | ROS_ERROR(
157 | "Not enough input argument.\n"
158 | "Usage:\n%s [calib JSON] [voc JSON]\n",
159 | argv[0]);
160 | return -1;
161 | }
162 | const std::string file_calib(argv[1]);
163 | const std::string file_voc(argv[2]);
164 | // install SIGNAL handler
165 | struct sigaction sigIntHandler;
166 | sigIntHandler.sa_handler = exit_handler;
167 | sigemptyset(&sigIntHandler.sa_mask);
168 | sigIntHandler.sa_flags = 0;
169 | sigaction(SIGINT, &sigIntHandler, NULL);
170 | std::shared_ptr slam_state;
171 | if (!PIRVS::InitState(file_calib, &slam_state)) {
172 | ROS_ERROR("Failed to InitState.\n");
173 | return -1;
174 | }
175 | std::shared_ptr map;
176 | if (!PIRVS::InitMap(file_calib, file_voc, &map)) {
177 | ROS_ERROR("Failed to InitMap.\n");
178 | return -1;
179 | }
180 | PIRVS::TrajectoryDrawer drawer;
181 | PIRVS::Mat img_draw;
182 | // Create an interface to stream the PerceptIn V1 device.
183 | if (!PIRVS::CreatePerceptInDevice(&gDevice, PIRVS::PerceptInDevice::RAW_MODE,
184 | true) ||
185 | !gDevice) {
186 | ROS_ERROR("Failed to create device.\n");
187 | return -1;
188 | }
189 | // Start streaming from the device.
190 | if (!gDevice->StartDevice()) {
191 | ROS_ERROR("Failed to start device.\n");
192 | return -1;
193 | }
194 | // create Publisher and set topic names.
195 | ros::init(argc, argv, "SLAMPublisher", ros::init_options::NoSigintHandler);
196 | ros::NodeHandle nh;
197 | image_transport::ImageTransport it(nh);
198 | image_transport::Publisher pub_left =
199 | it.advertise("StereoImage/left", Q_SIZE_STEREOIMAGE);
200 | image_transport::Publisher pub_right =
201 | it.advertise("StereoImage/right", Q_SIZE_STEREOIMAGE);
202 | ros::Publisher pub_pose =
203 | nh.advertise("Pose", Q_SIZE_POSESTAMPED);
204 | ros::Publisher odom_pub =
205 | nh.advertise("odom", Q_SIZE_ODOMETRY);
206 | ros::Publisher pub_pointcloud =
207 | nh.advertise("PointCloud", Q_SIZE_POINTCLOUD);
208 |
209 | tf2_ros::TransformBroadcaster odom_broadcaster;
210 | geometry_msgs::TransformStamped odom_trans;
211 | tf2_ros::TransformBroadcaster pointcloud_broadcaster;
212 | geometry_msgs::TransformStamped pointcloud_trans;
213 |
214 | CreateRvizTranslateToInitFrame(&odom_trans, "odom");
215 | CreateRvizTranslateToInitFrame(&pointcloud_trans, "pointcloud");
216 |
217 | bool stereo_data_available = false;
218 | while (ros::ok) {
219 | std::shared_ptr data;
220 | if (!gDevice->GetData(&data)) {
221 | continue;
222 | }
223 | std::shared_ptr stereo_data =
224 | std::dynamic_pointer_cast(data);
225 | if (stereo_data) {
226 | stereo_data_available = true;
227 | }
228 | // wait for the first image
229 | if (!stereo_data_available) {
230 | continue;
231 | }
232 | if (!PIRVS::RunSlam(data, map, slam_state)) {
233 | ROS_ERROR("SLAM failed.\n");
234 | break;
235 | }
236 | if (stereo_data) {
237 | ros::Time timestamp((double)stereo_data->timestamp / 1000.0f);
238 | // publish odom tranform
239 | odom_trans.header.stamp = timestamp;
240 | odom_broadcaster.sendTransform(odom_trans);
241 | if (map) {
242 | sensor_msgs::PointCloud pointCloud;
243 | CreateMapPointCloudMsg(&pointCloud, map, timestamp);
244 | pub_pointcloud.publish(pointCloud);
245 | pointcloud_trans.header.stamp = timestamp;
246 | pointcloud_broadcaster.sendTransform(pointcloud_trans);
247 | }
248 | // publish pose
249 | PIRVS::PoseInQuaternion pose_in_quaternion;
250 | if (slam_state->GetPoseInQuaternion(&pose_in_quaternion)) {
251 | geometry_msgs::PoseStamped msg_poseStamped;
252 | CreatePoseMsg(&msg_poseStamped, pose_in_quaternion, timestamp);
253 | pub_pose.publish(msg_poseStamped);
254 |
255 | nav_msgs::Odometry msg_odom;
256 | CreateOdometryMsg(&msg_odom, pose_in_quaternion, timestamp);
257 | odom_pub.publish(msg_odom);
258 | }
259 | // Decode and publish stereo images
260 | cv::Mat cv_img_left =
261 | cv::Mat(stereo_data->img_l.height, stereo_data->img_l.width, CV_8UC1);
262 | memcpy(cv_img_left.data, stereo_data->img_l.data,
263 | cv_img_left.total() * cv_img_left.elemSize());
264 | cv::Mat cv_img_right =
265 | cv::Mat(stereo_data->img_r.height, stereo_data->img_r.width, CV_8UC1);
266 | memcpy(cv_img_right.data, stereo_data->img_r.data,
267 | cv_img_right.total() * cv_img_right.elemSize());
268 |
269 | sensor_msgs::ImagePtr image_msg_left;
270 | sensor_msgs::ImagePtr image_msg_right;
271 | CreateImageMsg(&image_msg_left, cv_img_left, "StereoLeft", timestamp);
272 | CreateImageMsg(&image_msg_right, cv_img_right, "StereoRight", timestamp);
273 | pub_left.publish(image_msg_left);
274 | pub_right.publish(image_msg_right);
275 | }
276 | }
277 | if (gDevice != NULL) {
278 | gDevice->StopDevice();
279 | }
280 | if (ros::ok) {
281 | ros::shutdown();
282 | }
283 | return 0;
284 | }
285 |
--------------------------------------------------------------------------------
/init/convert_raw_to_yaml.py:
--------------------------------------------------------------------------------
1 | import json, yaml, sys, math
2 | #create extrinsic calib file
3 | def CreateCalib4Extrinsic(data, output):
4 | p_T_t = data["children"]["child"]["children"]["child"]["children"]["child"]["parent_T_this"]
5 | left_T_right = { "rows": 4,
6 | "cols": 4,
7 | "data": [float(i) for i in p_T_t[0]] + \
8 | [float(i) for i in p_T_t[1]] + \
9 | [float(i) for i in p_T_t[2]] + \
10 | [float(i) for i in p_T_t[3]]}
11 |
12 | p_T_t = data["children"]["child"]["children"]["child"]["parent_T_this"]
13 | IMU_T_left = { "rows": 4,
14 | "cols": 4,
15 | "data": [float(i) for i in p_T_t[0]] + \
16 | [float(i) for i in p_T_t[1]] + \
17 | [float(i) for i in p_T_t[2]] + \
18 | [float(i) for i in p_T_t[3]]}
19 |
20 | p_T_t = data["children"]["child"]["parent_T_this"]
21 | root_T_IMU = { "rows": 4,
22 | "cols": 4,
23 | "data": [float(i) for i in p_T_t[0]] + \
24 | [float(i) for i in p_T_t[1]] + \
25 | [float(i) for i in p_T_t[2]] + \
26 | [float(i) for i in p_T_t[3]]}
27 | calib_extrinsic = { "left_T_right": left_T_right,
28 | "imu_T_left" : IMU_T_left,
29 | "root_T_imu" : root_T_IMU}
30 | with open(output, 'w') as yaml_file:
31 | # ymal_file.write('# ')
32 | yaml.safe_dump(calib_extrinsic, yaml_file, default_flow_style=False)
33 | print " ---created " + output
34 |
35 | #create IMU calib file
36 | def CreateCalib4IMU(data, output):
37 | model = data["children"]["child"]["model"]
38 | fs = 200.0
39 | max_acc_noise = 0
40 | for i in model["accel"]["wideband_noise_var"]:
41 | if (max_acc_noise < float(i)):
42 | max_acc_noise = float(i)
43 | max_acc_rand_walk = 0
44 | for i in model["accel"]["bias_driving_noise_var"]:
45 | if (max_acc_rand_walk < float(i)):
46 | max_acc_rand_walk = float(i)
47 | max_gyro_noise = 0
48 | for i in model["gyro"]["wideband_noise_var"]:
49 | if (max_gyro_noise < float(i)):
50 | max_gyro_noise = float(i)
51 | max_gyro_rand_walk = 0
52 | for i in model["gyro"]["bias_driving_noise_var"]:
53 | if (max_gyro_rand_walk < float(i)):
54 | max_gyro_rand_walk = float(i)
55 | calib_IMU = { "Accelerometer": {
56 | "accelerometer_noise_density" : math.sqrt(max_acc_noise / (fs/2)),
57 | "accelerometer_random_walk" : math.sqrt(max_acc_rand_walk / (fs/2))
58 | },
59 | "Gyroscope": {
60 | "gyroscope_noise_density" : math.sqrt(max_gyro_noise / (fs/2)),
61 | "gyroscope_random_walk" : math.sqrt(max_gyro_rand_walk / (fs/2))
62 | },
63 | "update_rate" : fs
64 | }
65 | with open(output, 'w') as yaml_file:
66 | yaml.safe_dump(calib_IMU, yaml_file, default_flow_style=False)
67 | print " ---created " + output
68 |
69 | #create left camera calib file
70 | def CreateCalib4LeftCam(data, output):
71 | model = data["children"]["child"]["children"]["child"]["model"]
72 | camera_model = model["type"]
73 | if camera_model == "fisheye_camera":
74 | camera_matrix = {"rows" : 3,
75 | "cols" : 3}
76 | camera_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + \
77 | [float(i) for i in model["camera_matrix"][1]] + \
78 | [float(i) for i in model["camera_matrix"][2]]
79 | distortion_coefficients = {"rows" : 1,
80 | "cols" : 4}
81 | distortion_coefficients["data"] = [float(i) for i in model["distortion"][0]] + \
82 | [float(i) for i in model["distortion"][1]] + \
83 | [float(i) for i in model["distortion"][2]] + \
84 | [float(i) for i in model["distortion"][3]]
85 | rectification_matrix = { "rows" : 3,
86 | "cols" : 3,
87 | "data" : [1, 0, 0, 0, 1, 0, 0, 0, 1]}
88 | projection_matrix = { "rows" : 3,
89 | "cols" : 4}
90 | projection_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + [0] +\
91 | [float(i) for i in model["camera_matrix"][1]] + [0] +\
92 | [float(i) for i in model["camera_matrix"][2]] + [0]
93 | calib_left = { "image_width" : int(model["image_size"]["width"]),
94 | "image_height" : int(model["image_size"]["height"]),
95 | "camera_name" : "stereo_left",
96 | "camera_matrix" : camera_matrix,
97 | "distortion_model" :"fisheye",
98 | "distortion_coefficients": distortion_coefficients,
99 | "rectification_matrix" : rectification_matrix,
100 | "projection_matrix" : projection_matrix
101 | }
102 | with open(output, 'w') as yaml_file:
103 | yaml.safe_dump(calib_left, yaml_file, default_flow_style=False)
104 | print " ---created " + output + " distortion_model: fisheye"
105 | elif camera_model == "catadioptric_camera":
106 | camera_matrix = {"rows" : 3,
107 | "cols" : 3}
108 | camera_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + \
109 | [float(i) for i in model["camera_matrix"][1]] + \
110 | [float(i) for i in model["camera_matrix"][2]]
111 | distortion_coefficients = {"rows" : 1,
112 | "cols" : 4}
113 | distortion_coefficients["data"] = [float(model["distortion"]["k1"])] + \
114 | [float(model["distortion"]["k2"])] + \
115 | [float(model["distortion"]["k3"])] + \
116 | [float(model["distortion"]["k4"])]
117 | rectification_matrix = { "rows" : 3,
118 | "cols" : 3,
119 | "data" : [1, 0, 0, 0, 1, 0, 0, 0, 1]}
120 | projection_matrix = { "rows" : 3,
121 | "cols" : 4}
122 | projection_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + [0] + \
123 | [float(i) for i in model["camera_matrix"][1]] + [0] + \
124 | [float(i) for i in model["camera_matrix"][2]] + [0]
125 | unified_model_params = {
126 | "eta" : float(model["unified_model_params"]["eta"]),
127 | "xi" : float(model["unified_model_params"]["xi"])
128 | }
129 | calib_left = { "image_width" : int(model["image_size"]["width"]),
130 | "image_height" : int(model["image_size"]["height"]),
131 | "camera_name" : "stereo_left",
132 | "camera_matrix" : camera_matrix,
133 | "unified_model_params" : unified_model_params,
134 | "distortion_model" :"catadioptric",
135 | "distortion_coefficients": distortion_coefficients,
136 | "rectification_matrix" : rectification_matrix,
137 | "projection_matrix" : projection_matrix
138 | }
139 | with open(output, 'w') as yaml_file:
140 | yaml.safe_dump(calib_left, yaml_file, default_flow_style=False)
141 | print " ---created " + output + " distortion_model: catadioptric"
142 |
143 | #create right camera calib file
144 | def CreateCalib4RightCam(data, output):
145 | model = data["children"]["child"]["children"]["child"]["children"]["child"]["model"]
146 | camera_model = model["type"]
147 | if camera_model == "fisheye_camera":
148 | camera_matrix = {"rows" : 3,
149 | "cols" : 3}
150 | camera_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + \
151 | [float(i) for i in model["camera_matrix"][1]] + \
152 | [float(i) for i in model["camera_matrix"][2]]
153 | distortion_coefficients = {"rows" : 1,
154 | "cols" : 4}
155 | distortion_coefficients["data"] = [float(i) for i in model["distortion"][0]] + \
156 | [float(i) for i in model["distortion"][1]] + \
157 | [float(i) for i in model["distortion"][2]] + \
158 | [float(i) for i in model["distortion"][3]]
159 | rectification_matrix = { "rows" : 3,
160 | "cols" : 3,
161 | "data" : [1, 0, 0, 0, 1, 0, 0, 0, 1]}
162 | projection_matrix = { "rows" : 3,
163 | "cols" : 4}
164 | projection_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + [0] + \
165 | [float(i) for i in model["camera_matrix"][1]] + [0] + \
166 | [float(i) for i in model["camera_matrix"][2]] + [0]
167 | calib_right= { "image_width" : int(model["image_size"]["width"]),
168 | "image_height" : int(model["image_size"]["height"]),
169 | "camera_name" : "stereo_right",
170 | "camera_matrix" : camera_matrix,
171 | "distortion_model" :"fisheye",
172 | "distortion_coefficients": distortion_coefficients,
173 | "rectification_matrix" : rectification_matrix,
174 | "projection_matrix" : projection_matrix
175 | }
176 |
177 | with open(output, 'w') as yaml_file:
178 | yaml.safe_dump(calib_right, yaml_file, default_flow_style=False)
179 | print " ---created " + output + " distortion_model: fisheye"
180 | elif camera_model == "catadioptric_camera":
181 | camera_matrix = {"rows" : 3,
182 | "cols" : 3}
183 | camera_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + \
184 | [float(i) for i in model["camera_matrix"][1]] + \
185 | [float(i) for i in model["camera_matrix"][2]]
186 | distortion_coefficients = {"rows" : 1,
187 | "cols" : 4}
188 | distortion_coefficients["data"] = [float(model["distortion"]["k1"])] + \
189 | [float(model["distortion"]["k2"])] + \
190 | [float(model["distortion"]["k3"])] + \
191 | [float(model["distortion"]["k4"])]
192 | rectification_matrix = { "rows" : 3,
193 | "cols" : 3,
194 | "data" : [1, 0, 0, 0, 1, 0, 0, 0, 1]}
195 | projection_matrix = { "rows" : 3,
196 | "cols" : 4}
197 | projection_matrix["data"] = [float(i) for i in model["camera_matrix"][0]] + [0] + \
198 | [float(i) for i in model["camera_matrix"][1]] + [0] + \
199 | [float(i) for i in model["camera_matrix"][2]] + [0]
200 | unified_model_params = {
201 | "eta" : float(model["unified_model_params"]["eta"]),
202 | "xi" : float(model["unified_model_params"]["xi"])
203 | }
204 | calib_left = { "image_width" : int(model["image_size"]["width"]),
205 | "image_height" : int(model["image_size"]["height"]),
206 | "camera_name" : "stereo_left",
207 | "camera_matrix" : camera_matrix,
208 | "unified_model_params" : unified_model_params,
209 | "distortion_model" :"catadioptric",
210 | "distortion_coefficients": distortion_coefficients,
211 | "rectification_matrix" : rectification_matrix,
212 | "projection_matrix" : projection_matrix
213 | }
214 | with open(output, 'w') as yaml_file:
215 | yaml.safe_dump(calib_left, yaml_file, default_flow_style=False)
216 | print " ---created " + output + " distortion_model: catadioptric"
217 | if __name__ == "__main__":
218 | if len(sys.argv)<3:
219 | print "Not enough input arguments."
220 | print "[calib_input JSON]" "[calib_output YAML]"
221 | calib_file = sys.argv[1]
222 | output_file = sys.argv[2]
223 | with open(calib_file) as ff:
224 | data = json.load(ff)
225 | print "Read from file:", calib_file
226 | CreateCalib4LeftCam(data,output_file[:-5]+"_left_raw.yaml")
227 | CreateCalib4RightCam(data,output_file[:-5]+"_right_raw.yaml")
228 | CreateCalib4IMU(data,output_file[:-5]+"_imu.yaml")
229 | CreateCalib4Extrinsic(data,output_file[:-5]+"_extrinsic.yaml")
230 |
--------------------------------------------------------------------------------