├── .gitignore
├── CMakeLists.txt
├── LICENSE
├── README.md
├── cfg
├── d4xx-default.json
├── d4xx-high-accuracy.json
├── d4xx-high-confidence.json
└── t265.yaml
├── launch
├── apriltags_to_mavros.launch
├── realsense_t265_d4xx.launch
├── t265_all_nodes.launch
├── t265_downfacing_tf_to_mavros.launch
├── t265_fisheye_undistort.launch
├── t265_precland.launch
├── t265_tf_to_mavros.launch
└── tf_to_mavros.launch
├── package.xml
├── scripts
├── AP_test_obstacle_distances.py
├── __pycache__
│ └── set_origin.cpython-35.pyc
├── apriltags3.py
├── calibrate_extrinsics.py
├── calibration_results
│ ├── H.txt
│ └── intrinsics.json
├── d4xx_to_mavlink.py
├── mavlink_control.py
├── mavros_control1.py
├── mavros_control2.py
├── opencv_depth_filtering.py
├── rs_depth.py
├── rs_list_info.py
├── rs_to_mavlink.py
├── set_origin.py
├── t265.sh
├── t265_precland_apriltags.py
├── t265_test_streams.py
└── t265_to_mavlink.py
└── src
├── t265_fisheye_undistort.cpp
└── vision_to_mavros.cpp
/.gitignore:
--------------------------------------------------------------------------------
1 | .vscode/*
2 | *.log
3 | *.parm
4 | *.tlog*
5 | __pycache__/
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(vision_to_mavros)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | add_compile_options(-std=c++11)
6 |
7 | ## Find catkin macros and libraries
8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | ## is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | roscpp
12 | rospy
13 | tf
14 | sensor_msgs
15 | cv_bridge
16 | std_msgs
17 | image_transport
18 | mavros_msgs
19 | )
20 |
21 | find_package(OpenCV REQUIRED COMPONENTS core calib3d)
22 |
23 | ## System dependencies are found with CMake's conventions
24 | # find_package(Boost REQUIRED COMPONENTS system)
25 |
26 |
27 | ## Uncomment this if the package has a setup.py. This macro ensures
28 | ## modules and global scripts declared therein get installed
29 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
30 | # catkin_python_setup()
31 |
32 | ################################################
33 | ## Declare ROS messages, services and actions ##
34 | ################################################
35 |
36 | ## To declare and build messages, services or actions from within this
37 | ## package, follow these steps:
38 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in
39 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
40 | ## * In the file package.xml:
41 | ## * add a build_depend tag for "message_generation"
42 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
43 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
44 | ## but can be declared for certainty nonetheless:
45 | ## * add a exec_depend tag for "message_runtime"
46 | ## * In this file (CMakeLists.txt):
47 | ## * add "message_generation" and every package in MSG_DEP_SET to
48 | ## find_package(catkin REQUIRED COMPONENTS ...)
49 | ## * add "message_runtime" and every package in MSG_DEP_SET to
50 | ## catkin_package(CATKIN_DEPENDS ...)
51 | ## * uncomment the add_*_files sections below as needed
52 | ## and list every .msg/.srv/.action file to be processed
53 | ## * uncomment the generate_messages entry below
54 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
55 |
56 | ## Generate messages in the 'msg' folder
57 | # add_message_files(
58 | # FILES
59 | # Message1.msg
60 | # Message2.msg
61 | # )
62 |
63 | ## Generate services in the 'srv' folder
64 | # add_service_files(
65 | # FILES
66 | # Service1.srv
67 | # Service2.srv
68 | # )
69 |
70 | ## Generate actions in the 'action' folder
71 | # add_action_files(
72 | # FILES
73 | # Action1.action
74 | # Action2.action
75 | # )
76 |
77 | ## Generate added messages and services with any dependencies listed here
78 | # generate_messages(
79 | # DEPENDENCIES
80 | # std_msgs # Or other packages containing msgs
81 | # )
82 |
83 | ################################################
84 | ## Declare ROS dynamic reconfigure parameters ##
85 | ################################################
86 |
87 | ## To declare and build dynamic reconfigure parameters within this
88 | ## package, follow these steps:
89 | ## * In the file package.xml:
90 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
91 | ## * In this file (CMakeLists.txt):
92 | ## * add "dynamic_reconfigure" to
93 | ## find_package(catkin REQUIRED COMPONENTS ...)
94 | ## * uncomment the "generate_dynamic_reconfigure_options" section below
95 | ## and list every .cfg file to be processed
96 |
97 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
98 | # generate_dynamic_reconfigure_options(
99 | # cfg/DynReconf1.cfg
100 | # cfg/DynReconf2.cfg
101 | # )
102 |
103 | ###################################
104 | ## catkin specific configuration ##
105 | ###################################
106 | ## The catkin_package macro generates cmake config files for your package
107 | ## Declare things to be passed to dependent projects
108 | ## INCLUDE_DIRS: uncomment this if your package contains header files
109 | ## LIBRARIES: libraries you create in this project that dependent projects also need
110 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need
111 | ## DEPENDS: system dependencies of this project that dependent projects also need
112 | catkin_package(
113 | # INCLUDE_DIRS include
114 | # LIBRARIES vision_to_mavros
115 | # CATKIN_DEPENDS roscpp rospy tf
116 | # DEPENDS system_lib
117 | )
118 |
119 | ###########
120 | ## Build ##
121 | ###########
122 |
123 | ## Specify additional locations of header files
124 | ## Your package locations should be listed before other locations
125 | include_directories(
126 | # include
127 | ${catkin_INCLUDE_DIRS}
128 | )
129 |
130 | ## Declare a C++ library
131 | # add_library(${PROJECT_NAME}
132 | # src/${PROJECT_NAME}/vision_to_mavros.cpp
133 | # )
134 |
135 | ## Add cmake target dependencies of the library
136 | ## as an example, code may need to be generated before libraries
137 | ## either from message generation or dynamic reconfigure
138 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
139 |
140 | ## Declare a C++ executable
141 | ## With catkin_make all packages are built within a single CMake context
142 | ## The recommended prefix ensures that target names across packages don't collide
143 | ## Specify libraries to link a library or executable target agains
144 | add_executable(${PROJECT_NAME}_node src/vision_to_mavros.cpp)
145 | target_link_libraries(${PROJECT_NAME}_node
146 | ${catkin_LIBRARIES}
147 | )
148 |
149 | add_executable(t265_fisheye_undistort_node src/t265_fisheye_undistort.cpp)
150 | target_link_libraries(t265_fisheye_undistort_node
151 | ${catkin_LIBRARIES} ${OpenCV_LIBRARIES}
152 | )
153 |
154 | #############
155 | ## Install ##
156 | #############
157 |
158 | # all install targets should use catkin DESTINATION variables
159 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
160 |
161 | ## Mark executable scripts (Python etc.) for installation
162 | ## in contrast to setup.py, you can choose the destination
163 | # install(PROGRAMS
164 | # scripts/my_python_script
165 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
166 | # )
167 |
168 | ## Mark executables and/or libraries for installation
169 | install(TARGETS ${PROJECT_NAME}_node
170 | ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
171 | LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
172 | RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
173 | )
174 |
175 | install(DIRECTORY
176 | launch
177 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
178 | )
179 |
180 | ## Mark cpp header files for installation
181 | # install(DIRECTORY include/${PROJECT_NAME}/
182 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
183 | # FILES_MATCHING PATTERN "*.h"
184 | # PATTERN ".svn" EXCLUDE
185 | # )
186 |
187 | ## Mark other files for installation (e.g. launch and bag files, etc.)
188 | # install(FILES
189 | # # myfile1
190 | # # myfile2
191 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
192 | # )
193 |
194 | #############
195 | ## Testing ##
196 | #############
197 |
198 | ## Add gtest based cpp test target and link libraries
199 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_vision_to_mavros.cpp)
200 | # if(TARGET ${PROJECT_NAME}-test)
201 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
202 | # endif()
203 |
204 | ## Add folders to be run by python nosetests
205 | # catkin_add_nosetests(test)
206 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | A collection of [ROS](https://www.ros.org/) and non-ROS (Python, cpp) code that converts data from vision-based system (external localization system like fiducial tags, VIO, SLAM, or depth image) to corresponding [MAVROS](http://wiki.ros.org/mavros) topics or [MAVLink](https://mavlink.io/en/) messages that can be consumed by a flight control stack to support precise localization and navigation tasks.
2 |
3 | The code has been tested and come with instructions to be used with [ArduPilot](https://ardupilot.org/). The main sensor is the [Intel Realsense Tracking camera T265](https://www.intelrealsense.com/tracking-camera-t265/).
4 |
5 | --------------------------------------------------------------------------
6 | # Installation and setup with ArduPilot:
7 |
8 | ## ROS:
9 | Follow this wiki page: https://ardupilot.org/dev/docs/ros-vio-tracking-camera.html or [this blog post](https://discuss.ardupilot.org/t/integration-of-ardupilot-and-vio-tracking-camera-part-1-getting-started-with-the-intel-realsense-t265-on-rasberry-pi-3b/43162/68)
10 |
11 | ## non-ROS:
12 | Follow this wiki page: https://ardupilot.org/copter/docs/common-vio-tracking-camera.html
13 |
14 | --------------------------------------------------------------------------
15 | # What's included (the main stuffs):
16 |
17 | ## ROS nodes:
18 |
19 | * **[`vision_to_mavros_node`](#vision_to_mavros_node)**: Transformation of `tf` pose data to NED frame for vision-related MAVROS topics. Full explaination and usage with Realsense T265 can be found [in this blog post](https://discuss.ardupilot.org/t/integration-of-ardupilot-and-vio-tracking-camera-part-2-complete-installation-and-indoor-non-gps-flights/43405) and [this one](https://discuss.ardupilot.org/t/indoor-non-gps-flight-using-apriltags-ros-based/42878) with [AprilTags](https://github.com/AprilRobotics/apriltag).
20 | * **[`t265_fisheye_undistort_node`](#t265_fisheye_undistort_node)**: Undistorts and rectifies fisheye images from the Realsense T265 for other packages to consume. Full explaination and usage can be found [in this blog post](https://discuss.ardupilot.org/t/precision-landing-with-ros-realsense-t265-camera-and-apriltag-3-part-2-2/51493).
21 |
22 | ## non-ROS scripts:
23 |
24 | * **[`t265_to_mavlink.py`](#t265_to_mavlink)**: a more elaborate version of [`vision_to_mavros_node`](#vision_to_mavros_node) but in Python and is where most of the newly development are put into.
25 | * **[`t265_precland_apriltags.py`](#t265_precland_apriltags)**: using the T265 images for the task of precision landing (while using the pose data at the same time), reported in [this blog post](https://discuss.ardupilot.org/t/precision-landing-with-realsense-t265-camera-and-apriltag-part-1-2/48978/17).
26 | * **[`t265_test_streams.py`](#t265_test_streams)**: test if the T265 is connected and [`librealsense`](https://github.com/IntelRealSense/librealsense) is working properly, extracted from [here](https://github.com/IntelRealSense/librealsense/blob/master/wrappers/python/examples/t265_example.py).
27 |
28 | --------------------------------------------------------------------------
29 | # ROS nodes
30 |
31 | ## `vision_to_mavros_node`
32 |
33 |
34 |
35 | ROS package that listens to `/tf`, transforms the pose of `source_frame_id` to `target_frame_id`, then rotate the frame to match `body_frame` according to [ENU convention](https://dev.px4.io/en/ros/external_position_estimation.html#ros_reference_frames) with user input roll, pitch, yaw, gamma angles.
36 |
37 | ### How it works
38 | - Suppose we have a frame named `source_frame_id` that is measured in a frame named `target_frame_id`. Let `target_frame_id` be the `world {W}` frame, we want to transform `source_frame_id` to `body {B}` frame so that `{B}` and `{W}` conform to `ENU` convention (x is pointing to East direction, y is pointing to the North and z is pointing up).
39 |
40 |
41 |
42 | - Now assume we already have a default `{B}` and `{W}` that are correct in `ENU`. We will rotate `{B}` in `{W}` by an angle `gamma_world`, in right hand rule. For example, `gamma_world` equals `-1.5707963 (-PI/2)` will make `{B}`'s x axis aligns with `{W}`'s y axis.
43 |
44 | - `source_frame_id` will be aligned with that default `{B}` by rotating around its own x, y, z axis by angles defined by `roll_cam`, `pitch_cam`, `yaw_cam`, in that order.
45 |
46 | ### Parameters:
47 |
48 | * `target_frame_id`: id of target frame (world/map/base_link)
49 | * `source_frame_id`: id of source frame (camera/imu/body_link)
50 | * `output_rate`: the output rate at which the pose data will be published.
51 | * `roll_cam`, `pitch_cam`, `yaw_cam`, `gamma_world`: angles (in radians) that will convert pose received from `source_frame_id` to body frame, according to ENU conventions.
52 |
53 | ### Subscribed topic:
54 | * `/tf` containing pose/odometry data.
55 |
56 | ### Published topic:
57 | * `/vision_pose` of type [geometry_msgs/PoseStamped](http://docs.ros.org/api/geometry_msgs/html/msg/PoseStamped.html) - single pose to be sent to the FCU autopilot (ArduPilot / PX4), published at a frequency defined by `output_rate`.
58 | * `/body_frame/path` of type [nav_msgs/Path](http://docs.ros.org/api/nav_msgs/html/msg/Path.html) - visualize trajectory of body frame in rviz.
59 |
60 | ### Example applications
61 |
62 | ### Autonomous flight with [Intel® RealSense™ Tracking Camera T265](https://www.intelrealsense.com/tracking-camera-t265/) and [ArduPilot](http://ardupilot.org/):
63 |
64 |
65 |
66 | * A complete guide including installation, configuration and flight tests can be found by the following [blog posts](https://discuss.ardupilot.org/t/gsoc-2019-integration-of-ardupilot-and-vio-tracking-camera-for-gps-less-localization-and-navigation/42394).
67 |
68 | There are 3 nodes running in this setup. In 3 separated terminals on RPi:
69 |
70 | * T265 node: `roslaunch realsense2_camera rs_t265.launch`. The topic `/camera/odom/sample/` and `/tf` should be published.
71 |
72 | * MAVROS node: `roslaunch mavros apm.launch` (with `fcu_url` and other parameters in `apm.launch` modified accordingly).
73 |
74 | `rostopic echo /mavros/state` should show that FCU is connected.
75 |
76 | `rostopic echo /mavros/vision_pose/pose` is not published
77 |
78 | * vision_to_mavros node: `roslaunch vision_to_mavros t265_tf_to_mavros.launch`
79 |
80 | `rostopic echo /mavros/vision_pose/pose` should now show pose data from the T265.
81 |
82 | `rostopic hz /mavros/vision_pose/pose` should show that the topic is being published at 30Hz.
83 |
84 | Once you have verified each node can run successfully, next time you can launch all 3 nodes at once with: `roslaunch vision_to_mavros t265_all_nodes.launch`, with:
85 |
86 | * `rs_t265.launch` as originally provided by `realsense-ros`.
87 | * `apm.launch` modified with your own configuration.
88 | * `t265_tf_to_mavros.launch` as is.
89 |
90 | ### View trajectory on rviz
91 | After running ```roslaunch vision_to_mavros t265_all_nodes.launch```, here's how to view the trajectory of t265 on rviz:
92 | 1. On host computer, open up rviz: `rosrun rviz rviz`.
93 | 2. Add [`Path`](http://docs.ros.org/api/nav_msgs/html/msg/Path.html), topic name: `/body_frame/path` to rviz.
94 | 3. Change `Fixed Frame` to `target_frame_id`, in the case of Realsense T265: `camera_odom_frame`.
95 |
96 |
97 |
98 | ### Usage with [AprilTag](https://github.com/AprilRobotics/apriltag):
99 |
100 | ```
101 | roslaunch vision_to_mavros apriltags_to_mavros.launch
102 | ```
103 | This will launch `usb_cam` to capture raw images, perform rectification through `image_proc`, use `apriltag_ros` to obtain the pose of the tag in the camera frame, and finally `vision_to_mavros` to first get the pose of camera in the tag frame, transform to body frame by using camera orientation, and publish the body pose to `/mavros/vision_pose/pose` topic. Note that `mavros` should be launch separately since it has a lot of output on the terminal.
104 |
105 | ## `t265_fisheye_undistort_node`
106 | Image stream from one of the T265’s cameras will be processed to detect [AprilTag](https://april.eecs.umich.edu/software/apriltag.html) visual marker, then we will follow MAVLink’s [Landing Target Protocol](https://mavlink.io/en/services/landing_target.html) that is supported by ArduPilot to perform precision landing.
107 |
108 | --------------------------------------------------------------------------
109 |
110 | # non-ROS scripts
111 |
112 | ## `t265_test_streams`
113 | Testing the installation of `librealsense` and USB connection with the Realsense T265. Extracted from [here](https://github.com/IntelRealSense/librealsense/tree/master/examples).
114 |
115 | ## `t265_to_mavlink`
116 | The main Python script that integrates the T265 with ArduPilot. The usage is documented in the following blog posts and wiki pages:
117 | - Wiki for [non-ROS](https://ardupilot.org/copter/docs/common-vio-tracking-camera.html).
118 | - [Overall introduction](https://discuss.ardupilot.org/t/gsoc-2019-integration-of-ardupilot-and-vio-tracking-camera-for-gps-less-localization-and-navigation/42394).
119 | - [Installation](https://discuss.ardupilot.org/t/integration-of-ardupilot-and-vio-tracking-camera-part-1-getting-started-with-the-intel-realsense-t265-on-rasberry-pi-3b/43162/1) and [setup](https://discuss.ardupilot.org/t/integration-of-ardupilot-and-vio-tracking-camera-part-4-non-ros-bridge-to-mavlink-in-python/44001).
120 | - [Flight test](https://discuss.ardupilot.org/t/integration-of-ardupilot-and-vio-tracking-camera-part-3-indoor-autonomous-flights-and-performance-tests/43626) and [advanced usage](https://discuss.ardupilot.org/t/integration-of-ardupilot-and-vio-tracking-camera-part-5-camera-position-offsets-compensation-scale-calibration-and-compass-north-alignment-beta/44984).
121 |
122 | ## `t265_precland_apriltags`
123 | Same as [`t265_fisheye_undistort_node`](#t265_fisheye_undistort_node), but in Python instead of ROS.
124 |
--------------------------------------------------------------------------------
/cfg/d4xx-default.json:
--------------------------------------------------------------------------------
1 | {
2 | "aux-param-autoexposure-setpoint": "1536",
3 | "aux-param-colorcorrection1": "0.298828",
4 | "aux-param-colorcorrection10": "-0",
5 | "aux-param-colorcorrection11": "-0",
6 | "aux-param-colorcorrection12": "-0",
7 | "aux-param-colorcorrection2": "0.293945",
8 | "aux-param-colorcorrection3": "0.293945",
9 | "aux-param-colorcorrection4": "0.114258",
10 | "aux-param-colorcorrection5": "-0",
11 | "aux-param-colorcorrection6": "-0",
12 | "aux-param-colorcorrection7": "-0",
13 | "aux-param-colorcorrection8": "-0",
14 | "aux-param-colorcorrection9": "-0",
15 | "aux-param-depthclampmax": "65536",
16 | "aux-param-depthclampmin": "0",
17 | "aux-param-disparityshift": "0",
18 | "controls-autoexposure-auto": "True",
19 | "controls-autoexposure-manual": "8500",
20 | "controls-color-autoexposure-auto": "True",
21 | "controls-color-autoexposure-manual": "166",
22 | "controls-color-backlight-compensation": "0",
23 | "controls-color-brightness": "0",
24 | "controls-color-contrast": "50",
25 | "controls-color-gain": "64",
26 | "controls-color-gamma": "300",
27 | "controls-color-hue": "0",
28 | "controls-color-power-line-frequency": "3",
29 | "controls-color-saturation": "64",
30 | "controls-color-sharpness": "50",
31 | "controls-color-white-balance-auto": "True",
32 | "controls-color-white-balance-manual": "4600",
33 | "controls-depth-gain": "16",
34 | "controls-laserpower": "150",
35 | "controls-laserstate": "on",
36 | "ignoreSAD": "0",
37 | "param-amplitude-factor": "0",
38 | "param-autoexposure-setpoint": "1536",
39 | "param-censusenablereg-udiameter": "9",
40 | "param-censusenablereg-vdiameter": "9",
41 | "param-censususize": "9",
42 | "param-censusvsize": "9",
43 | "param-depthclampmax": "65536",
44 | "param-depthclampmin": "0",
45 | "param-depthunits": "1000",
46 | "param-disableraucolor": "0",
47 | "param-disablesadcolor": "0",
48 | "param-disablesadnormalize": "0",
49 | "param-disablesloleftcolor": "0",
50 | "param-disableslorightcolor": "0",
51 | "param-disparitymode": "0",
52 | "param-disparityshift": "0",
53 | "param-lambdaad": "800",
54 | "param-lambdacensus": "26",
55 | "param-leftrightthreshold": "24",
56 | "param-maxscorethreshb": "2047",
57 | "param-medianthreshold": "500",
58 | "param-minscorethresha": "1",
59 | "param-neighborthresh": "7",
60 | "param-raumine": "1",
61 | "param-rauminn": "1",
62 | "param-rauminnssum": "3",
63 | "param-raumins": "1",
64 | "param-rauminw": "1",
65 | "param-rauminwesum": "3",
66 | "param-regioncolorthresholdb": "0.0499022",
67 | "param-regioncolorthresholdg": "0.0499022",
68 | "param-regioncolorthresholdr": "0.0499022",
69 | "param-regionshrinku": "3",
70 | "param-regionshrinkv": "1",
71 | "param-robbinsmonrodecrement": "10",
72 | "param-robbinsmonroincrement": "10",
73 | "param-rsmdiffthreshold": "4",
74 | "param-rsmrauslodiffthreshold": "1",
75 | "param-rsmremovethreshold": "0.375",
76 | "param-scanlineedgetaub": "72",
77 | "param-scanlineedgetaug": "72",
78 | "param-scanlineedgetaur": "72",
79 | "param-scanlinep1": "60",
80 | "param-scanlinep1onediscon": "105",
81 | "param-scanlinep1twodiscon": "70",
82 | "param-scanlinep2": "342",
83 | "param-scanlinep2onediscon": "190",
84 | "param-scanlinep2twodiscon": "130",
85 | "param-secondpeakdelta": "325",
86 | "param-texturecountthresh": "0",
87 | "param-texturedifferencethresh": "0",
88 | "param-usersm": "1",
89 | "param-zunits": "1000",
90 | "stream-depth-format": "Z16",
91 | "stream-fps": "30",
92 | "stream-height": "480",
93 | "stream-width": "848"
94 | }
--------------------------------------------------------------------------------
/cfg/d4xx-high-accuracy.json:
--------------------------------------------------------------------------------
1 | {
2 | "aux-param-autoexposure-setpoint": "1536",
3 | "aux-param-colorcorrection1": "0.298828",
4 | "aux-param-colorcorrection10": "-0",
5 | "aux-param-colorcorrection11": "-0",
6 | "aux-param-colorcorrection12": "-0",
7 | "aux-param-colorcorrection2": "0.293945",
8 | "aux-param-colorcorrection3": "0.293945",
9 | "aux-param-colorcorrection4": "0.114258",
10 | "aux-param-colorcorrection5": "-0",
11 | "aux-param-colorcorrection6": "-0",
12 | "aux-param-colorcorrection7": "-0",
13 | "aux-param-colorcorrection8": "-0",
14 | "aux-param-colorcorrection9": "-0",
15 | "aux-param-depthclampmax": "65536",
16 | "aux-param-depthclampmin": "0",
17 | "aux-param-disparityshift": "0",
18 | "controls-autoexposure-auto": "True",
19 | "controls-autoexposure-manual": "8500",
20 | "controls-color-autoexposure-auto": "True",
21 | "controls-color-autoexposure-manual": "166",
22 | "controls-color-backlight-compensation": "0",
23 | "controls-color-brightness": "0",
24 | "controls-color-contrast": "50",
25 | "controls-color-gain": "64",
26 | "controls-color-gamma": "300",
27 | "controls-color-hue": "0",
28 | "controls-color-power-line-frequency": "3",
29 | "controls-color-saturation": "64",
30 | "controls-color-sharpness": "50",
31 | "controls-color-white-balance-auto": "True",
32 | "controls-color-white-balance-manual": "4600",
33 | "controls-depth-gain": "16",
34 | "controls-laserpower": "150",
35 | "controls-laserstate": "on",
36 | "ignoreSAD": "0",
37 | "param-amplitude-factor": "0",
38 | "param-autoexposure-setpoint": "1536",
39 | "param-censusenablereg-udiameter": "9",
40 | "param-censusenablereg-vdiameter": "9",
41 | "param-censususize": "9",
42 | "param-censusvsize": "9",
43 | "param-depthclampmax": "65536",
44 | "param-depthclampmin": "0",
45 | "param-depthunits": "1000",
46 | "param-disableraucolor": "0",
47 | "param-disablesadcolor": "0",
48 | "param-disablesadnormalize": "0",
49 | "param-disablesloleftcolor": "0",
50 | "param-disableslorightcolor": "1",
51 | "param-disparitymode": "0",
52 | "param-disparityshift": "0",
53 | "param-lambdaad": "751",
54 | "param-lambdacensus": "6",
55 | "param-leftrightthreshold": "10",
56 | "param-maxscorethreshb": "2893",
57 | "param-medianthreshold": "796",
58 | "param-minscorethresha": "4",
59 | "param-neighborthresh": "108",
60 | "param-raumine": "6",
61 | "param-rauminn": "3",
62 | "param-rauminnssum": "7",
63 | "param-raumins": "2",
64 | "param-rauminw": "2",
65 | "param-rauminwesum": "12",
66 | "param-regioncolorthresholdb": "0.785714",
67 | "param-regioncolorthresholdg": "0.565558",
68 | "param-regioncolorthresholdr": "0.985323",
69 | "param-regionshrinku": "3",
70 | "param-regionshrinkv": "0",
71 | "param-robbinsmonrodecrement": "25",
72 | "param-robbinsmonroincrement": "2",
73 | "param-rsmdiffthreshold": "1.65625",
74 | "param-rsmrauslodiffthreshold": "0.71875",
75 | "param-rsmremovethreshold": "0.809524",
76 | "param-scanlineedgetaub": "13",
77 | "param-scanlineedgetaug": "15",
78 | "param-scanlineedgetaur": "30",
79 | "param-scanlinep1": "155",
80 | "param-scanlinep1onediscon": "160",
81 | "param-scanlinep1twodiscon": "59",
82 | "param-scanlinep2": "190",
83 | "param-scanlinep2onediscon": "507",
84 | "param-scanlinep2twodiscon": "493",
85 | "param-secondpeakdelta": "647",
86 | "param-texturecountthresh": "0",
87 | "param-texturedifferencethresh": "1722",
88 | "param-usersm": "1",
89 | "param-zunits": "1000",
90 | "stream-depth-format": "Z16",
91 | "stream-fps": "30",
92 | "stream-height": "480",
93 | "stream-width": "848"
94 | }
--------------------------------------------------------------------------------
/cfg/d4xx-high-confidence.json:
--------------------------------------------------------------------------------
1 | {
2 | "aux-param-autoexposure-setpoint": "2000",
3 | "aux-param-colorcorrection1": "0.298828",
4 | "aux-param-colorcorrection10": "0",
5 | "aux-param-colorcorrection11": "0",
6 | "aux-param-colorcorrection12": "0",
7 | "aux-param-colorcorrection2": "0.293945",
8 | "aux-param-colorcorrection3": "0.293945",
9 | "aux-param-colorcorrection4": "0.114258",
10 | "aux-param-colorcorrection5": "0",
11 | "aux-param-colorcorrection6": "0",
12 | "aux-param-colorcorrection7": "0",
13 | "aux-param-colorcorrection8": "0",
14 | "aux-param-colorcorrection9": "0",
15 | "aux-param-depthclampmax": "65536",
16 | "aux-param-depthclampmin": "0",
17 | "aux-param-disparityshift": "0",
18 | "controls-autoexposure-auto": "True",
19 | "controls-autoexposure-manual": "8500",
20 | "controls-depth-gain": "16",
21 | "controls-laserpower": "0",
22 | "controls-laserstate": "on",
23 | "ignoreSAD": "0",
24 | "param-autoexposure-setpoint": "2000",
25 | "param-censusenablereg-udiameter": "9",
26 | "param-censusenablereg-vdiameter": "9",
27 | "param-censususize": "9",
28 | "param-censusvsize": "9",
29 | "param-depthclampmax": "65536",
30 | "param-depthclampmin": "0",
31 | "param-depthunits": "1000",
32 | "param-disableraucolor": "0",
33 | "param-disablesadcolor": "0",
34 | "param-disablesadnormalize": "0",
35 | "param-disablesloleftcolor": "0",
36 | "param-disableslorightcolor": "1",
37 | "param-disparitymode": "0",
38 | "param-disparityshift": "0",
39 | "param-lambdaad": "751",
40 | "param-lambdacensus": "6",
41 | "param-leftrightthreshold": "10",
42 | "param-maxscorethreshb": "1423",
43 | "param-medianthreshold": "625",
44 | "param-minscorethresha": "4",
45 | "param-neighborthresh": "108",
46 | "param-raumine": "6",
47 | "param-rauminn": "3",
48 | "param-rauminnssum": "7",
49 | "param-raumins": "2",
50 | "param-rauminw": "2",
51 | "param-rauminwesum": "12",
52 | "param-regioncolorthresholdb": "0.784736",
53 | "param-regioncolorthresholdg": "0.565558",
54 | "param-regioncolorthresholdr": "0.985323",
55 | "param-regionshrinku": "3",
56 | "param-regionshrinkv": "0",
57 | "param-robbinsmonrodecrement": "5",
58 | "param-robbinsmonroincrement": "5",
59 | "param-rsmdiffthreshold": "1.65625",
60 | "param-rsmrauslodiffthreshold": "0.71875",
61 | "param-rsmremovethreshold": "0.809524",
62 | "param-scanlineedgetaub": "13",
63 | "param-scanlineedgetaug": "15",
64 | "param-scanlineedgetaur": "30",
65 | "param-scanlinep1": "30",
66 | "param-scanlinep1onediscon": "76",
67 | "param-scanlinep1twodiscon": "86",
68 | "param-scanlinep2": "98",
69 | "param-scanlinep2onediscon": "105",
70 | "param-scanlinep2twodiscon": "33",
71 | "param-secondpeakdelta": "775",
72 | "param-texturecountthresh": "4",
73 | "param-texturedifferencethresh": "50",
74 | "param-usersm": "1",
75 | "param-zunits": "1000"
76 | }
--------------------------------------------------------------------------------
/cfg/t265.yaml:
--------------------------------------------------------------------------------
1 | %YAML:1.0
2 | # Input-output configuration
3 | input: [848, 800]
4 | output: [800, 800]
5 |
6 | # Camera calibration parameter
7 | K1: !!opencv-matrix
8 | rows: 3
9 | cols: 3
10 | dt: d
11 | data: [ 284.0513916015625, 0.0, 427.4079895019531,
12 | 0.0, 285.19189453125, 401.1722106933594,
13 | 0.0, 0.0, 1.0 ]
14 | D1: !!opencv-matrix
15 | rows: 1
16 | cols: 4
17 | dt: d
18 | data: [ -0.0015451919753104448, 0.04006154090166092, -0.03794296830892563, 0.006493294145911932]
19 | K2: !!opencv-matrix
20 | rows: 3
21 | cols: 3
22 | dt: d
23 | data: [ 284.285400390625, 0.0, 431.7026062011719,
24 | 0.0, 285.2674865722656, 385.2261962890625,
25 | 0.0, 0.0, 1.0 ]
26 | D2: !!opencv-matrix
27 | rows: 1
28 | cols: 4
29 | dt: d
30 | data: [ 0.0007516997284255922, 0.032092008739709854, -0.03002322092652321, 0.004072860814630985]
31 | R: !!opencv-matrix
32 | rows: 3
33 | cols: 3
34 | dt: d
35 | data: [ 9.99994218e-01, -8.26635107e-04, -3.29655036e-03,
36 | 8.28813005e-04, 9.99999523e-01, 6.59199432e-04,
37 | 3.29600368e-03, -6.61927043e-04, 9.99994397e-01]
38 | T: [ -6.42428249e-02, -1.58874827e-05, 3.78768891e-06 ]
39 |
--------------------------------------------------------------------------------
/launch/apriltags_to_mavros.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
--------------------------------------------------------------------------------
/launch/realsense_t265_d4xx.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/launch/t265_all_nodes.launch:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/launch/t265_downfacing_tf_to_mavros.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/launch/t265_fisheye_undistort.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/launch/t265_precland.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/launch/t265_tf_to_mavros.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/launch/tf_to_mavros.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | vision_to_mavros
4 | 0.0.0
5 | The vision_to_mavros package
6 |
7 |
8 |
9 |
10 | ubuntu
11 |
12 |
13 |
14 |
15 |
16 | TODO
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | catkin
52 | roscpp
53 | rospy
54 | tf
55 | sensor_msgs
56 | cv_bridge
57 | std_msgs
58 | image_transport
59 |
60 | mavros_msgs
61 | roscpp
62 | rospy
63 | tf
64 | sensor_msgs
65 | cv_bridge
66 | std_msgs
67 | image_transport
68 |
69 | roscpp
70 | rospy
71 | tf
72 | sensor_msgs
73 | cv_bridge
74 | std_msgs
75 | image_transport
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
--------------------------------------------------------------------------------
/scripts/AP_test_obstacle_distances.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | ######################################################
4 | ##
5 | ## Testing OBSTACLE_DISTANCE messages with ArduPilot and Mission Planner
6 | ##
7 | ######################################################
8 |
9 | # Set MAVLink protocol to 2.
10 | import os
11 | os.environ["MAVLINK20"] = "1"
12 |
13 | # Import the libraries
14 | import sys
15 | import numpy as np
16 | import time
17 | import argparse
18 | import threading
19 |
20 | from time import sleep
21 | from apscheduler.schedulers.background import BackgroundScheduler
22 | from dronekit import connect
23 |
24 | ######################################################
25 | ## Reconfigurable parameters ##
26 | ######################################################
27 |
28 | # Default configurations for connection to the FCU
29 | connection_string_default = '/dev/ttyUSB0'
30 | connection_baudrate_default = 921600
31 |
32 | # Enable/disable each message/function individually
33 | enable_msg_obstacle_distance = True
34 | enable_msg_distance_sensor = False
35 | obstacle_distance_msg_hz = 15.0
36 |
37 | # lock for thread synchronization
38 | lock = threading.Lock()
39 |
40 | # FCU connection variables
41 | vehicle = None
42 | is_vehicle_connected = False
43 |
44 | ######################################################
45 | ## Parsing user' inputs ##
46 | ######################################################
47 |
48 | parser = argparse.ArgumentParser(description='Reboots vehicle')
49 | parser.add_argument('--connect',
50 | help="Vehicle connection target string. If not specified, a default string will be used.")
51 | parser.add_argument('--baudrate', type=float,
52 | help="Vehicle connection baudrate. If not specified, a default value will be used.")
53 |
54 | args = parser.parse_args()
55 |
56 | connection_string = args.connect
57 | connection_baudrate = args.baudrate
58 |
59 | # Using default values if no specified inputs
60 | if not connection_string:
61 | connection_string = connection_string_default
62 | print("INFO: Using default connection_string", connection_string)
63 | else:
64 | print("INFO: Using connection_string", connection_string)
65 |
66 | if not connection_baudrate:
67 | connection_baudrate = connection_baudrate_default
68 | print("INFO: Using default connection_baudrate", connection_baudrate)
69 | else:
70 | print("INFO: Using connection_baudrate", connection_baudrate)
71 |
72 | ######################################################
73 | ## Functions - MAVLink ##
74 | ######################################################
75 |
76 | # https://mavlink.io/en/messages/common.html#OBSTACLE_DISTANCE
77 | def send_obstacle_distance_message():
78 | #
79 | # Set the parameters for obstacle distance here
80 | #
81 | # [0] [35] [71] <- Output: distances[72]
82 | # | | | <- step = width / 72
83 | # --------------- <- horizontal line
84 | # \ | /
85 | # \ | /
86 | # \ | /
87 | # ^ \ | / ^
88 | # | \ | / |
89 | #start \ | / end
90 | # Camera <- Input: depth image, obtained from depth camera (implemented in d4xx_to_mavlink.py)
91 | #
92 |
93 | angle_start = -39.5 # -FOV/2
94 | angle_end = 39.5 # 39.5 - real camera (2 arcs), <= 69.0: 2 arcs, > 70.0: 3 arcs
95 |
96 | FOV = angle_end - angle_start
97 | angle_offset = angle_start
98 | distances_array_length = 72
99 | increment_f = FOV / distances_array_length
100 |
101 | min_dist_cm = 10
102 | max_dist_cm = 800
103 | cur_dist_cm = 200
104 |
105 | # Setup the distances array with the same value (cur_dist_cm)
106 | distances = np.ones((distances_array_length,), dtype=np.uint16) * cur_dist_cm
107 |
108 | current_time_us = int(round(time.time() * 1000000))
109 |
110 | msg = vehicle.message_factory.obstacle_distance_encode(
111 | current_time_us, # us Timestamp (UNIX time or time since system boot)
112 | 0, # sensor_type, defined here: https://mavlink.io/en/messages/common.html#MAV_DISTANCE_SENSOR
113 | distances, # distances, uint16_t[72], cm
114 | 0, # increment, uint8_t, deg
115 | min_dist_cm, # min_distance, uint16_t, cm
116 | max_dist_cm, # max_distance, uint16_t, cm
117 | increment_f, # increment_f, float, deg
118 | angle_offset, # angle_offset, float, deg
119 | 12 # MAV_FRAME, vehicle-front aligned: https://mavlink.io/en/messages/common.html#MAV_FRAME_BODY_FRD
120 | )
121 |
122 | vehicle.send_mavlink(msg)
123 | vehicle.flush()
124 |
125 |
126 | # https://mavlink.io/en/messages/common.html#DISTANCE_SENSOR
127 | def send_distance_sensor_message():
128 | # Use this to rotate all processed data
129 | camera_facing_angle_degree = 0
130 | orientation = int(camera_facing_angle_degree / 45)
131 |
132 | min_dist_cm = 10
133 | max_dist_cm = 800
134 | curr_dist_cm = 100
135 | current_time_ms = int(round(time.time() * 1000))
136 |
137 | msg = vehicle.message_factory.distance_sensor_encode(
138 | current_time_ms,# ms Timestamp (UNIX time or time since system boot) (ignored)
139 | min_dist_cm, # min_distance, uint16_t, cm
140 | max_dist_cm, # min_distance, uint16_t, cm
141 | curr_dist_cm, # current_distance, uint16_t, cm
142 | 0, # type : 0 (ignored)
143 | 0, # id : 0 (ignored)
144 | orientation, # orientation
145 | 0 # covariance : 0 (ignored)
146 | )
147 |
148 | vehicle.send_mavlink(msg)
149 | vehicle.flush()
150 |
151 | def send_msg_to_gcs(text_to_be_sent):
152 | # MAV_SEVERITY: 0=EMERGENCY 1=ALERT 2=CRITICAL 3=ERROR, 4=WARNING, 5=NOTICE, 6=INFO, 7=DEBUG, 8=ENUM_END
153 | # Defined here: https://mavlink.io/en/messages/common.html#MAV_SEVERITY
154 | # MAV_SEVERITY = 3 will let the message be displayed on Mission Planner HUD, but 6 is ok for QGroundControl
155 | if is_vehicle_connected == True:
156 | text_msg = 'OA: ' + text_to_be_sent
157 | status_msg = vehicle.message_factory.statustext_encode(
158 | 6, # MAV_SEVERITY
159 | text_msg.encode() # max size is char[50]
160 | )
161 | vehicle.send_mavlink(status_msg)
162 | vehicle.flush()
163 | print("INFO: " + text_to_be_sent)
164 | else:
165 | print("INFO: Vehicle not connected. Cannot send text message to Ground Control Station (GCS)")
166 |
167 | # Request a timesync update from the flight controller, for future work.
168 | # TODO: Inspect the usage of timesync_update
169 | def update_timesync(ts=0, tc=0):
170 | if ts == 0:
171 | ts = int(round(time.time() * 1000))
172 | msg = vehicle.message_factory.timesync_encode(
173 | tc, # tc1
174 | ts # ts1
175 | )
176 | vehicle.send_mavlink(msg)
177 | vehicle.flush()
178 |
179 | # Establish connection to the FCU
180 | def vehicle_connect():
181 | global vehicle, is_vehicle_connected
182 |
183 | if vehicle == None:
184 | try:
185 | vehicle = connect(connection_string, wait_ready = True, baud = connection_baudrate, source_system = 1)
186 | except Exception as e:
187 | print(e)
188 | sleep(1)
189 | except:
190 | print('Connection error! Retrying...')
191 | sleep(1)
192 |
193 | if vehicle == None:
194 | is_vehicle_connected = False
195 | return False
196 | else:
197 | is_vehicle_connected = True
198 | return True
199 |
200 | ######################################################
201 | ## Main code starts here ##
202 | ######################################################
203 |
204 | print("INFO: Connecting to vehicle.")
205 | while (not vehicle_connect()):
206 | pass
207 | print("INFO: Vehicle connected.")
208 |
209 | # Send MAVlink messages in the background at pre-determined frequencies
210 | sched = BackgroundScheduler()
211 |
212 | if enable_msg_obstacle_distance:
213 | sched.add_job(send_obstacle_distance_message, 'interval', seconds = 1/obstacle_distance_msg_hz)
214 | send_msg_to_gcs('Sending obstacle distance messages to FCU')
215 | elif enable_msg_distance_sensor:
216 | sched.add_job(send_distance_sensor_message, 'interval', seconds = 1/obstacle_distance_msg_hz)
217 | send_msg_to_gcs('Sending distance sensor messages to FCU')
218 | else:
219 | send_msg_to_gcs('Nothing to do. Check params to enable something')
220 | vehicle.close()
221 | print("INFO: Realsense pipe and vehicle object closed.")
222 | sys.exit()
223 |
224 | sched.start()
225 |
226 | try:
227 | while True:
228 | pass
229 |
230 | except KeyboardInterrupt:
231 | send_msg_to_gcs('Closing the script...')
232 |
233 | except Exception as e:
234 | print(e)
235 | pass
236 |
237 | except:
238 | send_msg_to_gcs('ERROR: Depth camera disconnected')
239 |
240 | finally:
241 | vehicle.close()
242 | sys.exit()
243 |
--------------------------------------------------------------------------------
/scripts/__pycache__/set_origin.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thien94/vision_to_mavros/139f199fcc82199e357279b11b94e651b07c284e/scripts/__pycache__/set_origin.cpython-35.pyc
--------------------------------------------------------------------------------
/scripts/apriltags3.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | '''Python wrapper for C version of apriltags. This program creates two
4 | classes that are used to detect apriltags and extract information from
5 | them. Using this module, you can identify all apriltags visible in an
6 | image, and get information about the location and orientation of the
7 | tags.
8 |
9 | Original author: Isaac Dulin, Spring 2016
10 | Updates: Matt Zucker, Fall 2016
11 | Apriltags 3 version: Aleksandar Petrov, Spring 2019
12 |
13 | '''
14 | from __future__ import division
15 | from __future__ import print_function
16 |
17 | import ctypes
18 | import os
19 | import numpy
20 |
21 | ######################################################################
22 |
23 | # pylint: disable=R0903
24 |
25 | class _ImageU8(ctypes.Structure):
26 | '''Wraps image_u8 C struct.'''
27 | _fields_ = [
28 | ('width', ctypes.c_int),
29 | ('height', ctypes.c_int),
30 | ('stride', ctypes.c_int),
31 | ('buf', ctypes.POINTER(ctypes.c_uint8))
32 | ]
33 |
34 | class _Matd(ctypes.Structure):
35 | '''Wraps matd C struct.'''
36 | _fields_ = [
37 | ('nrows', ctypes.c_int),
38 | ('ncols', ctypes.c_int),
39 | ('data', ctypes.c_double*1),
40 | ]
41 |
42 | class _ZArray(ctypes.Structure):
43 | '''Wraps zarray C struct.'''
44 | _fields_ = [
45 | ('el_sz', ctypes.c_size_t),
46 | ('size', ctypes.c_int),
47 | ('alloc', ctypes.c_int),
48 | ('data', ctypes.c_void_p)
49 | ]
50 |
51 | class _ApriltagFamily(ctypes.Structure):
52 | '''Wraps apriltag_family C struct.'''
53 | _fields_ = [
54 | ('ncodes', ctypes.c_uint32),
55 | ('codes', ctypes.POINTER(ctypes.c_uint64)),
56 | ('width_at_border', ctypes.c_int),
57 | ('total_width', ctypes.c_int),
58 | ('reversed_border', ctypes.c_bool),
59 | ('nbits', ctypes.c_uint32),
60 | ('bit_x', ctypes.POINTER(ctypes.c_uint32)),
61 | ('bit_y', ctypes.POINTER(ctypes.c_uint32)),
62 | ('h', ctypes.c_int32),
63 | ('name', ctypes.c_char_p),
64 | ]
65 |
66 | class _ApriltagDetection(ctypes.Structure):
67 | '''Wraps apriltag_detection C struct.'''
68 | _fields_ = [
69 | ('family', ctypes.POINTER(_ApriltagFamily)),
70 | ('id', ctypes.c_int),
71 | ('hamming', ctypes.c_int),
72 | ('decision_margin', ctypes.c_float),
73 | ('H', ctypes.POINTER(_Matd)),
74 | ('c', ctypes.c_double*2),
75 | ('p', (ctypes.c_double*2)*4)
76 | ]
77 |
78 | class _ApriltagDetector(ctypes.Structure):
79 | '''Wraps apriltag_detector C struct.'''
80 | _fields_ = [
81 | ('nthreads', ctypes.c_int),
82 | ('quad_decimate', ctypes.c_float),
83 | ('quad_sigma', ctypes.c_float),
84 | ('refine_edges', ctypes.c_int),
85 | ('decode_sharpening', ctypes.c_double),
86 | ('debug', ctypes.c_int)
87 | ]
88 |
89 | class _ApriltagDetectionInfo(ctypes.Structure):
90 | '''Wraps apriltag_detection_info C struct.'''
91 | _fields_ = [
92 | ('det', ctypes.POINTER(_ApriltagDetection)),
93 | ('tagsize', ctypes.c_double),
94 | ('fx', ctypes.c_double),
95 | ('fy', ctypes.c_double),
96 | ('cx', ctypes.c_double),
97 | ('cy', ctypes.c_double)
98 | ]
99 |
100 | class _ApriltagPose(ctypes.Structure):
101 | '''Wraps apriltag_pose C struct.'''
102 | _fields_ = [
103 | ('R', ctypes.POINTER(_Matd)),
104 | ('t', ctypes.POINTER(_Matd))
105 | ]
106 |
107 | ######################################################################
108 |
109 | def _ptr_to_array2d(datatype, ptr, rows, cols):
110 | array_type = (datatype*cols)*rows
111 | array_buf = array_type.from_address(ctypes.addressof(ptr))
112 | return numpy.ctypeslib.as_array(array_buf, shape=(rows, cols))
113 |
114 | def _image_u8_get_array(img_ptr):
115 | return _ptr_to_array2d(ctypes.c_uint8,
116 | img_ptr.contents.buf.contents,
117 | img_ptr.contents.height,
118 | img_ptr.contents.stride)
119 |
120 | def _matd_get_array(mat_ptr):
121 | return _ptr_to_array2d(ctypes.c_double,
122 | mat_ptr.contents.data,
123 | int(mat_ptr.contents.nrows),
124 | int(mat_ptr.contents.ncols))
125 |
126 | def zarray_get(za, idx, ptr):
127 |
128 | # memcpy(p, &za->data[idx*za->el_sz], za->el_sz);
129 | #
130 | # p = ptr
131 | # za->el_sz = za.contents.el_sz
132 | # &za->data[idx*za->el_sz] = za.contents.data+idx*za.contents.el_sz
133 |
134 |
135 | ctypes.memmove(ptr, za.contents.data+idx*za.contents.el_sz, za.contents.el_sz)
136 |
137 |
138 | ######################################################################
139 |
140 | class Detection():
141 |
142 | '''Combined pythonic wrapper for apriltag_detection and apriltag_pose'''
143 |
144 | def __init__(self):
145 | self.tag_family = None
146 | self.tag_id = None
147 | self.hamming = None
148 | self.decision_margin = None
149 | self.homography = None
150 | self.center = None
151 | self.corners = None
152 | self.pose_R = None
153 | self.pose_t = None
154 | self.pose_err = None
155 |
156 | def __str__(self):
157 | return('Detection object:'+
158 | '\ntag_family = ' + str(self.tag_family)+
159 | '\ntag_id = ' + str(self.tag_id)+
160 | '\nhamming = ' + str(self.hamming)+
161 | '\ndecision_margin = ' + str(self.decision_margin)+
162 | '\nhomography = ' + str(self.homography)+
163 | '\ncenter = ' + str(self.center)+
164 | '\ncorners = ' + str(self.corners)+
165 | '\npose_R = ' + str(self.pose_R)+
166 | '\npose_t = ' + str(self.pose_t)+
167 | '\npose_err = ' + str(self.pose_err)+'\n')
168 |
169 | def __repr__(self):
170 | return self.__str__()
171 |
172 |
173 | ######################################################################
174 |
175 | class Detector(object):
176 |
177 | '''Pythonic wrapper for apriltag_detector.
178 |
179 | families: Tag families, separated with a space, default: tag36h11
180 |
181 | nthreads: Number of threads, default: 1
182 |
183 | quad_decimate: Detection of quads can be done on a lower-resolution image, improving speed at a cost of pose accuracy and a slight decrease in detection rate. Decoding the binary payload is still done at full resolution, default: 2.0
184 |
185 | quad_sigma: What Gaussian blur should be applied to the segmented image (used for quad detection?) Parameter is the standard deviation in pixels. Very noisy images benefit from non-zero values (e.g. 0.8), default: 0.0
186 |
187 | refine_edges: When non-zero, the edges of the each quad are adjusted to "snap to" strong gradients nearby. This is useful when decimation is employed, as it can increase the quality of the initial quad estimate substantially. Generally recommended to be on (1). Very computationally inexpensive. Option is ignored if quad_decimate = 1, default: 1
188 |
189 | decode_sharpening: How much sharpening should be done to decoded images? This can help decode small tags but may or may not help in odd lighting conditions or low light conditions, default = 0.25
190 |
191 | searchpath: Where to look for the Apriltag 3 library, must be a list, default: ['apriltags']
192 |
193 | debug: If 1, will save debug images. Runs very slow, default: 0
194 | '''
195 |
196 | def __init__(self,
197 | families='tag36h11',
198 | nthreads=1,
199 | quad_decimate=2.0,
200 | quad_sigma=0.0,
201 | refine_edges=1,
202 | decode_sharpening=0.25,
203 | debug=0,
204 | searchpath=['apriltags']):
205 |
206 | # Parse the parameters
207 | self.params = dict()
208 | self.params['families'] = families.split()
209 | self.params['nthreads'] = nthreads
210 | self.params['quad_decimate'] = quad_decimate
211 | self.params['quad_sigma'] = quad_sigma
212 | self.params['refine_edges'] = refine_edges
213 | self.params['decode_sharpening'] = decode_sharpening
214 | self.params['debug'] = debug
215 |
216 | # detect OS to get extension for DLL
217 | uname0 = os.uname()[0]
218 | if uname0 == 'Darwin':
219 | extension = '.dylib'
220 | else:
221 | extension = '.so'
222 |
223 | filename = 'libapriltag'+extension
224 |
225 | self.libc = None
226 | self.tag_detector = None
227 | self.tag_detector_ptr = None
228 |
229 | for path in searchpath:
230 | relpath = os.path.join(path, filename)
231 | if os.path.exists(relpath):
232 | self.libc = ctypes.CDLL(relpath)
233 | break
234 |
235 | # if full path not found just try opening the raw filename;
236 | # this should search whatever paths dlopen is supposed to
237 | # search.
238 | if self.libc is None:
239 | self.libc = ctypes.CDLL(filename)
240 |
241 | if self.libc is None:
242 | raise RuntimeError('could not find DLL named ' + filename)
243 |
244 |
245 | # create the c-_apriltag_detector object
246 | self.libc.apriltag_detector_create.restype = ctypes.POINTER(_ApriltagDetector)
247 | self.tag_detector_ptr = self.libc.apriltag_detector_create()
248 |
249 | # create the family
250 | self.libc.apriltag_detector_add_family_bits.restype = None
251 | self.tag_families = dict()
252 | if 'tag16h5' in self.params['families']:
253 | self.libc.tag16h5_create.restype = ctypes.POINTER(_ApriltagFamily)
254 | self.tag_families['tag16h5']=self.libc.tag16h5_create()
255 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tag16h5'], 2)
256 | elif 'tag25h9' in self.params['families']:
257 | self.libc.tag25h9_create.restype = ctypes.POINTER(_ApriltagFamily)
258 | self.tag_families['tag25h9']=self.libc.tag25h9_create()
259 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tag25h9'], 2)
260 | elif 'tag36h11' in self.params['families']:
261 | self.libc.tag36h11_create.restype = ctypes.POINTER(_ApriltagFamily)
262 | self.tag_families['tag36h11']=self.libc.tag36h11_create()
263 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tag36h11'], 2)
264 | elif 'tagCircle21h7' in self.params['families']:
265 | self.libc.tagCircle21h7_create.restype = ctypes.POINTER(_ApriltagFamily)
266 | self.tag_families['tagCircle21h7']=self.libc.tagCircle21h7_create()
267 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tagCircle21h7'], 2)
268 | elif 'tagCircle49h12' in self.params['families']:
269 | self.libc.tagCircle49h12_create.restype = ctypes.POINTER(_ApriltagFamily)
270 | self.tag_families['tagCircle49h12']=self.libc.tagCircle49h12_create()
271 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tagCircle49h12'], 2)
272 | elif 'tagCustom48h12' in self.params['families']:
273 | self.libc.tagCustom48h12_create.restype = ctypes.POINTER(_ApriltagFamily)
274 | self.tag_families['tagCustom48h12']=self.libc.tagCustom48h12_create()
275 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tagCustom48h12'], 2)
276 | elif 'tagStandard41h12' in self.params['families']:
277 | self.libc.tagStandard41h12_create.restype = ctypes.POINTER(_ApriltagFamily)
278 | self.tag_families['tagStandard41h12']=self.libc.tagStandard41h12_create()
279 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tagStandard41h12'], 2)
280 | elif 'tagStandard52h13' in self.params['families']:
281 | self.libc.tagStandard52h13_create.restype = ctypes.POINTER(_ApriltagFamily)
282 | self.tag_families['tagStandard52h13']=self.libc.tagStandard52h13_create()
283 | self.libc.apriltag_detector_add_family_bits(self.tag_detector_ptr, self.tag_families['tagStandard52h13'], 2)
284 | else:
285 | raise Exception('Unrecognized tag family name. Use e.g. \'tag36h11\'.\n')
286 |
287 | # configure the parameters of the detector
288 | self.tag_detector_ptr.contents.nthreads = int(self.params['nthreads'])
289 | self.tag_detector_ptr.contents.quad_decimate = float(self.params['quad_decimate'])
290 | self.tag_detector_ptr.contents.quad_sigma = float(self.params['quad_sigma'])
291 | self.tag_detector_ptr.contents.refine_edges = int(self.params['refine_edges'])
292 | self.tag_detector_ptr.contents.decode_sharpening = int(self.params['decode_sharpening'])
293 | self.tag_detector_ptr.contents.debug = int(self.params['debug'])
294 |
295 |
296 |
297 | def __del__(self):
298 | if self.tag_detector_ptr is not None:
299 | # destroy the tag families
300 | for family, tf in self.tag_families.items():
301 | if 'tag16h5' == family:
302 | self.libc.tag16h5_destroy.restype = None
303 | self.libc.tag16h5_destroy(tf)
304 | elif 'tag25h9' == family:
305 | self.libc.tag25h9_destroy.restype = None
306 | self.libc.tag25h9_destroy(tf)
307 | elif 'tag36h11' == family:
308 | self.libc.tag36h11_destroy.restype = None
309 | self.libc.tag36h11_destroy(tf)
310 | elif 'tagCircle21h7' == family:
311 | self.libc.tagCircle21h7_destroy.restype = None
312 | self.libc.tagCircle21h7_destroy(tf)
313 | elif 'tagCircle49h12' == family:
314 | self.libc.tagCircle49h12_destroy.restype = None
315 | self.libc.tagCircle49h12_destroy(tf)
316 | elif 'tagCustom48h12' == family:
317 | self.libc.tagCustom48h12_destroy.restype = None
318 | self.libc.tagCustom48h12_destroy(tf)
319 | elif 'tagStandard41h12' == family:
320 | self.libc.tagStandard41h12_destroy.restype = None
321 | self.libc.tagStandard41h12_destroy(tf)
322 | elif 'tagStandard52h13' == family:
323 | self.libc.tagStandard52h13_destroy.restype = None
324 | self.libc.tagStandard52h13_destroy(tf)
325 |
326 | # destroy the detector
327 | self.libc.apriltag_detector_destroy.restype = None
328 | self.libc.apriltag_detector_destroy(self.tag_detector_ptr)
329 |
330 | def detect(self, img, estimate_tag_pose=False, camera_params=None, tag_size=None):
331 |
332 | '''Run detectons on the provided image. The image must be a grayscale
333 | image of type numpy.uint8.'''
334 |
335 | assert len(img.shape) == 2
336 | assert img.dtype == numpy.uint8
337 |
338 | c_img = self._convert_image(img)
339 |
340 | return_info = []
341 |
342 | #detect apriltags in the image
343 | self.libc.apriltag_detector_detect.restype = ctypes.POINTER(_ZArray)
344 | detections = self.libc.apriltag_detector_detect(self.tag_detector_ptr, c_img)
345 |
346 | apriltag = ctypes.POINTER(_ApriltagDetection)()
347 |
348 |
349 | for i in range(0, detections.contents.size):
350 |
351 | #extract the data for each apriltag that was identified
352 | zarray_get(detections, i, ctypes.byref(apriltag))
353 |
354 | tag = apriltag.contents
355 |
356 | homography = _matd_get_array(tag.H).copy() # numpy.zeros((3,3)) # Don't ask questions, move on with your life
357 | center = numpy.ctypeslib.as_array(tag.c, shape=(2,)).copy()
358 | corners = numpy.ctypeslib.as_array(tag.p, shape=(4, 2)).copy()
359 |
360 | detection = Detection()
361 | detection.tag_family = ctypes.string_at(tag.family.contents.name)
362 | detection.tag_id = tag.id
363 | detection.hamming = tag.hamming
364 | detection.decision_margin = tag.decision_margin
365 | detection.homography = homography
366 | detection.center = center
367 | detection.corners = corners
368 |
369 | if estimate_tag_pose:
370 | if camera_params==None:
371 | raise Exception('camera_params must be provided to detect if estimate_tag_pose is set to True')
372 | if tag_size==None:
373 | raise Exception('tag_size must be provided to detect if estimate_tag_pose is set to True')
374 |
375 | camera_fx, camera_fy, camera_cx, camera_cy = [ c for c in camera_params ]
376 |
377 | info = _ApriltagDetectionInfo(det=apriltag,
378 | tagsize=tag_size,
379 | fx=camera_fx,
380 | fy=camera_fy,
381 | cx=camera_cx,
382 | cy=camera_cy)
383 | pose = _ApriltagPose()
384 |
385 | self.libc.estimate_tag_pose.restype = ctypes.c_double
386 | err = self.libc.estimate_tag_pose(ctypes.byref(info), ctypes.byref(pose))
387 |
388 | detection.pose_R = _matd_get_array(pose.R).copy()
389 | detection.pose_t = _matd_get_array(pose.t).copy()
390 | detection.pose_err = err
391 |
392 |
393 | #Append this dict to the tag data array
394 | return_info.append(detection)
395 |
396 | self.libc.image_u8_destroy.restype = None
397 | self.libc.image_u8_destroy(c_img)
398 |
399 | self.libc.apriltag_detections_destroy.restype = None
400 | self.libc.apriltag_detections_destroy(detections)
401 |
402 | return return_info
403 |
404 |
405 | def _convert_image(self, img):
406 |
407 | height = img.shape[0]
408 | width = img.shape[1]
409 |
410 | self.libc.image_u8_create.restype = ctypes.POINTER(_ImageU8)
411 | c_img = self.libc.image_u8_create(width, height)
412 |
413 | tmp = _image_u8_get_array(c_img)
414 |
415 | # copy the opencv image into the destination array, accounting for the
416 | # difference between stride & width.
417 | tmp[:, :width] = img
418 |
419 | # tmp goes out of scope here but we don't care because
420 | # the underlying data is still in c_img.
421 | return c_img
422 |
423 |
424 | if __name__ == '__main__':
425 |
426 | test_images_path = 'test'
427 |
428 | visualization = True
429 | try:
430 | import cv2
431 | except:
432 | raise Exception('You need cv2 in order to run the demo. However, you can still use the library without it.')
433 |
434 | try:
435 | from cv2 import imshow
436 | except:
437 | print("The function imshow was not implemented in this installation. Rebuild OpenCV from source to use it")
438 | print("VIsualization will be disabled.")
439 | visualization = False
440 |
441 | try:
442 | import yaml
443 | except:
444 | raise Exception('You need yaml in order to run the tests. However, you can still use the library without it.')
445 |
446 | at_detector = Detector(searchpath=['apriltags/lib', 'apriltags/lib64'],
447 | families='tag36h11',
448 | nthreads=1,
449 | quad_decimate=1.0,
450 | quad_sigma=0.0,
451 | refine_edges=1,
452 | decode_sharpening=0.25,
453 | debug=0)
454 |
455 | with open(test_images_path + '/test_info.yaml', 'r') as stream:
456 | parameters = yaml.load(stream)
457 |
458 | #### TEST WITH THE SAMPLE IMAGE ####
459 |
460 | print("\n\nTESTING WITH A SAMPLE IMAGE")
461 |
462 | img = cv2.imread(test_images_path+'/'+parameters['sample_test']['file'], cv2.IMREAD_GRAYSCALE)
463 | cameraMatrix = numpy.array(parameters['sample_test']['K']).reshape((3,3))
464 | camera_params = ( cameraMatrix[0,0], cameraMatrix[1,1], cameraMatrix[0,2], cameraMatrix[1,2] )
465 |
466 | if visualization:
467 | cv2.imshow('Original image',img)
468 |
469 | tags = at_detector.detect(img, True, camera_params, parameters['sample_test']['tag_size'])
470 | print(tags)
471 |
472 | color_img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)
473 |
474 | for tag in tags:
475 | for idx in range(len(tag.corners)):
476 | cv2.line(color_img, tuple(tag.corners[idx-1, :].astype(int)), tuple(tag.corners[idx, :].astype(int)), (0, 255, 0))
477 |
478 | cv2.putText(color_img, str(tag.tag_id),
479 | org=(tag.corners[0, 0].astype(int)+10,tag.corners[0, 1].astype(int)+10),
480 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,
481 | fontScale=0.8,
482 | color=(0, 0, 255))
483 |
484 | if visualization:
485 | cv2.imshow('Detected tags', color_img)
486 |
487 | k = cv2.waitKey(0)
488 | if k == 27: # wait for ESC key to exit
489 | cv2.destroyAllWindows()
490 |
491 |
492 | #### TEST WITH THE ROTATION IMAGES ####
493 |
494 | import time
495 |
496 | print("\n\nTESTING WITH ROTATION IMAGES")
497 |
498 | time_num = 0
499 | time_sum = 0
500 |
501 | test_images_path = 'test'
502 | image_names = parameters['rotation_test']['files']
503 |
504 | for image_name in image_names:
505 | print("Testing image ", image_name)
506 | ab_path = test_images_path + '/' + image_name
507 | if(not os.path.isfile(ab_path)):
508 | continue
509 | groundtruth = float(image_name.split('_')[-1].split('.')[0]) # name of test image should be set to its groundtruth
510 |
511 | parameters['rotation_test']['rotz'] = groundtruth
512 | cameraMatrix = numpy.array(parameters['rotation_test']['K']).reshape((3,3))
513 | camera_params = ( cameraMatrix[0,0], cameraMatrix[1,1], cameraMatrix[0,2], cameraMatrix[1,2] )
514 |
515 | img = cv2.imread(ab_path, cv2.IMREAD_GRAYSCALE)
516 |
517 | start = time.time()
518 | tags = at_detector.detect(img, True, camera_params, parameters['rotation_test']['tag_size'])
519 |
520 | time_sum+=time.time()-start
521 | time_num+=1
522 |
523 | print(tags[0].pose_t, parameters['rotation_test']['posx'], parameters['rotation_test']['posy'], parameters['rotation_test']['posz'])
524 | print(tags[0].pose_R, parameters['rotation_test']['rotx'], parameters['rotation_test']['roty'], parameters['rotation_test']['rotz'])
525 |
526 | print("AVG time per detection: ", time_sum/time_num)
527 |
528 | #### TEST WITH MULTIPLE TAGS IMAGES ####
529 |
530 | print("\n\nTESTING WITH MULTIPLE TAGS IMAGES")
531 |
532 | time_num = 0
533 | time_sum = 0
534 |
535 | image_names = parameters['multiple_tags_test']['files']
536 |
537 | for image_name in image_names:
538 | print("Testing image ", image_name)
539 | ab_path = test_images_path + '/' + image_name
540 | if(not os.path.isfile(ab_path)):
541 | continue
542 |
543 | cameraMatrix = numpy.array(parameters['multiple_tags_test']['K']).reshape((3,3))
544 | camera_params = ( cameraMatrix[0,0], cameraMatrix[1,1], cameraMatrix[0,2], cameraMatrix[1,2] )
545 |
546 | img = cv2.imread(ab_path, cv2.IMREAD_GRAYSCALE)
547 |
548 | start = time.time()
549 | tags = at_detector.detect(img, True, camera_params, parameters['multiple_tags_test']['tag_size'])
550 | time_sum+=time.time()-start
551 | time_num+=1
552 |
553 | tag_ids = [tag.tag_id for tag in tags]
554 | print(len(tags), " tags found: ", tag_ids)
555 |
556 |
557 | color_img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)
558 |
559 | for tag in tags:
560 | for idx in range(len(tag.corners)):
561 | cv2.line(color_img, tuple(tag.corners[idx-1, :].astype(int)), tuple(tag.corners[idx, :].astype(int)), (0, 255, 0))
562 |
563 | cv2.putText(color_img, str(tag.tag_id),
564 | org=(tag.corners[0, 0].astype(int)+10,tag.corners[0, 1].astype(int)+10),
565 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,
566 | fontScale=0.8,
567 | color=(0, 0, 255))
568 |
569 | if visualization:
570 | cv2.imshow('Detected tags for ' + image_name , color_img)
571 |
572 | k = cv2.waitKey(0)
573 | if k == 27: # wait for ESC key to exit
574 | cv2.destroyAllWindows()
575 |
576 | print("AVG time per detection: ", time_sum/time_num)
577 |
--------------------------------------------------------------------------------
/scripts/calibrate_extrinsics.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | ######################################################
3 | ## Calibrating the extrinsics between T265 and D4xx ##
4 | ## Based on this example: https://github.com/IntelRealSense/librealsense/pull/4355
5 | ## with changes and modifications.
6 | ######################################################
7 |
8 | ######################################################
9 | #
10 | # General steps:
11 | # 1. Mount the two cameras rigidly
12 | # 2. Print any one of the checkerboards from: https://markhedleyjones.com/projects/calibration-checkerboard-collection
13 | # - The default settings in this script are for: https://markhedleyjones.com/storage/checkerboards/Checkerboard-A4-25mm-8x6.pdf
14 | # - Measure the actual printed grid size of the squares and modify size.
15 | # 3. Modify the script:
16 | # - Change grid_H, grid_W and size according to the actual printed checkerboard.
17 | # - Change the path and file_name if necessary (ex: use this script as standalone).
18 | # 4. Run the script online:
19 | # - python calibrate_extrinsics.py
20 | # 5. The results include intrinsics (save file) and extrinsics (terminal output)
21 | #
22 | ######################################################
23 |
24 | from __future__ import print_function
25 |
26 | import pyrealsense2 as rs
27 | import numpy as np
28 | np.set_printoptions(suppress=True,precision=5)
29 | import cv2
30 | assert cv2.__version__[0] >= '3', 'The fisheye module requires opencv version >= 3.0.0'
31 | import os
32 | import shutil
33 | import json
34 | import argparse
35 | import glob
36 | from collections import OrderedDict
37 |
38 |
39 | parser = argparse.ArgumentParser()
40 | parser.add_argument('--SN_T265', help='serial number of T265')
41 | parser.add_argument('--SN_D4xx', help='serial number of D4xx')
42 | parser.add_argument('--path', default="calibration_results", help='image path')
43 | parser.add_argument('--file_name', default="/intrinsics.json", help='intrinsics calibration file name')
44 | parser.add_argument('--save_tmp', default=False, help='save the temporary files of this program, useful for debugging purposes')
45 | parser.add_argument('--grid_H', default=8, help='grid height (inner corners)')
46 | parser.add_argument('--grid_W', default=6, help='grid width (inner corners)')
47 | parser.add_argument('--size', default=0.0282, help='grid side length')
48 | parser.add_argument('--calibrate', default=False, help='run calibration (only)', action='store_true')
49 | parser.add_argument('--visualize', default=True, help='with GUI', action='store_true')
50 | args = parser.parse_args()
51 | CHECKERBOARD = (args.grid_H, args.grid_W)
52 | SIDE_LENGTH = args.size
53 |
54 | tmp_folder = args.path + "/tmp"
55 |
56 | def add_camera_calibration(intrinsics, streams = None):
57 | cam = {}
58 | cam['center_px'] = [intrinsics.ppx, intrinsics.ppy]
59 | cam['focal_length_px'] = [intrinsics.fx, intrinsics.fy]
60 | cam['distortion'] = {}
61 | cam['distortion']['type'] = 'kannalabrandt4'
62 | cam['distortion']['k'] = intrinsics.coeffs[:4]
63 | if streams:
64 | ext = streams["cam1"].get_extrinsics_to(streams["pose"]) # w.r.t.
65 | #print(ext)
66 | cam["extrinsics"] = {}
67 | cam["extrinsics"]["T"] = ext.translation
68 | #print(ext.rotation)
69 | cam["extrinsics"]["R"] = ext.rotation
70 | return cam
71 |
72 | def save_intrinsics(directory, file_name, intrinsics, streams):
73 | D = OrderedDict() # in order (cam1,cam2)
74 | D['cameras'] = []
75 | D['cameras'].append(add_camera_calibration(intrinsics["cam1"], streams))
76 | D['cameras'].append(add_camera_calibration(intrinsics["cam2"]))
77 |
78 | if not os.path.exists(directory):
79 | os.mkdir(directory)
80 | with open(directory + file_name, 'w') as f:
81 | json.dump(D, f, indent=4)
82 | print("Intrinsics output written to " + directory + file_name)
83 |
84 |
85 | def read_calibration(cam, extrinsics = False):
86 | #print("read_calibration")
87 | # intrinsics
88 | K = np.array([[cam['focal_length_px'][0], 0, cam['center_px'][0]],
89 | [ 0, cam['focal_length_px'][1], cam['center_px'][1]],
90 | [ 0, 0, 1]])
91 | D = np.array(cam['distortion']['k'])
92 |
93 | if extrinsics:
94 | H = np.eye(4)
95 | H[:3,:3] = np.reshape(cam["extrinsics"]["R"],(3,3))
96 | H[:3,3] = cam["extrinsics"]["T"]
97 | #print(H)
98 | return (K, D, H)
99 | return (K, D)
100 |
101 | def load_calibration(directory, file_name):
102 | with open(directory + file_name, 'r') as f:
103 | D = json.load(f)
104 |
105 | (K1, D1, H1) = read_calibration(D['cameras'][0], True)
106 | (K2, D2) = read_calibration(D['cameras'][1])
107 | return (K1, D1, K2, D2, H1)
108 |
109 | def find_realsense_serial_no(type):
110 |
111 | camera_name = ['Intel RealSense T265', 'Intel RealSense D435']
112 |
113 | # Get realsense pipeline handle
114 | pipe = rs.pipeline()
115 |
116 | # Find the T265
117 | devices = rs.context().devices
118 | for i in range(len(devices)):
119 | if (devices[i].get_info(rs.camera_info.name) == camera_name[type]):
120 | print('Found one connected ' + camera_name[type] + ' with serial no:', devices[i].get_info(rs.camera_info.serial_number))
121 | return devices[i].get_info(rs.camera_info.serial_number)
122 |
123 | print('No ' + camera_name[type] + ' found, please check connection or input serial manually')
124 | return None
125 |
126 | if not args.calibrate:
127 | # Obtain the serial number of the cameras, either automatically or from user's input
128 | print("Trying to connect devices...")
129 | serial_t265 = None
130 | serial_d4xx = None
131 |
132 | if (not args.SN_T265):
133 | serial_t265 = find_realsense_serial_no(0)
134 | else:
135 | serial_t265 = args.SN_T265
136 |
137 | if (not args.SN_D4xx):
138 | serial_d4xx = find_realsense_serial_no(1)
139 | else:
140 | serial_d4xx = args.SN_D4xx
141 |
142 | if (not serial_t265) or (not serial_d4xx):
143 | print("Specify serial numbers --SN_T265 and --SN_D4xx (for online calibration, or --calibrate for prerecorded images with --path path to folder)")
144 | exit()
145 |
146 | # cam 1
147 | pipe1 = rs.pipeline()
148 | cfg1 = rs.config()
149 | cfg1.enable_device(serial_t265)
150 | pipe1.start(cfg1)
151 |
152 | # cam 2
153 | pipe2 = rs.pipeline()
154 | cfg2 = rs.config()
155 | cfg2.enable_device(serial_d4xx)
156 | cfg2.enable_all_streams()
157 | pipe2_profile = pipe2.start(cfg2)
158 | sensor_depth = pipe2_profile.get_device().first_depth_sensor()
159 | sensor_depth.set_option(rs.option.emitter_enabled, 0) # turn OFF projector
160 |
161 | try:
162 | # Retreive the stream and intrinsic properties for both cameras
163 | profile1 = pipe1.get_active_profile()
164 | profile2 = pipe2.get_active_profile()
165 | # future improvements: make both stream configureable
166 | streams = {"cam1" : profile1.get_stream(rs.stream.fisheye, 1).as_video_stream_profile(),
167 | "pose" : profile1.get_stream(rs.stream.pose),
168 | "cam2" : profile2.get_stream(rs.stream.infrared, 1).as_video_stream_profile()} # IR1
169 | #"cam2" : profile1.get_stream(rs.stream.fisheye, 2).as_video_stream_profile()} # test
170 | intrinsics = {"cam1" : streams["cam1"].get_intrinsics(),
171 | "cam2" : streams["cam2"].get_intrinsics()}
172 | #print("cam1:", intrinsics["cam1"])
173 | #print("cam2:", intrinsics["right"])
174 |
175 | save_intrinsics(args.path, args.file_name, intrinsics, streams)
176 |
177 | # capture images
178 | i = 0
179 | print("Press 's' to save image.\nPress 'q' or 'c' to quit recording and start the calibration.")
180 | while True:
181 | # cam 1
182 | frames1 = pipe1.wait_for_frames()
183 | f_fe1 = frames1.get_fisheye_frame(1) # left fisheye
184 | f_fe2 = frames1.get_fisheye_frame(2) # right fisheye
185 | if not f_fe1 or not f_fe2:
186 | continue
187 | img_fe1 = np.asanyarray(f_fe1.get_data())
188 | img_fe2 = np.asanyarray(f_fe2.get_data())
189 |
190 | # cam 2
191 | frames2 = pipe2.wait_for_frames()
192 | f_ir1 = frames2.get_infrared_frame(1) # left infrared
193 | f_ir2 = frames2.get_infrared_frame(2) # right infrared
194 | f_color = frames2.get_color_frame()
195 | if not f_ir1 or not f_ir2 or not f_color:
196 | continue
197 | img_ir1 = np.asanyarray(f_ir1.get_data())
198 | img_ir2 = np.asanyarray(f_ir2.get_data())
199 | img_color = np.asanyarray(f_color.get_data())
200 |
201 | # TODO: configure streams
202 | img1 = img_fe1
203 | img2 = img_ir1
204 |
205 | # display
206 | cv2.imshow('cam1', img1)
207 | cv2.imshow('cam2', img2)
208 |
209 | # save or quit
210 | k = cv2.waitKey(1)
211 | if k == ord('s'):
212 | print("'s' key pressed. Saving temp images..")
213 | if not os.path.exists(tmp_folder):
214 | os.mkdir(tmp_folder)
215 | cv2.imwrite(tmp_folder + '/fe1_' + str(i) + '.png', img_fe1)
216 | cv2.imwrite(tmp_folder + '/fe2_' + str(i) + '.png', img_fe2)
217 | cv2.imwrite(tmp_folder + '/ir1_' + str(i) + '.png', img_ir1)
218 | # cv2.imwrite(tmp_folder+ '/ir2_' + str(i) + '.png', img_ir2)
219 | cv2.imwrite(tmp_folder + '/color_' + str(i) + '.png', img_color)
220 | print("Saved temp images in temp folder " + tmp_folder)
221 | i = i+1
222 |
223 | if k == ord('q') or k == ord('c'):
224 | break
225 |
226 | finally:
227 | pipe1.stop()
228 | pipe2.stop()
229 |
230 |
231 | # calibrate
232 | print("Calibrate extrinsics now...")
233 |
234 | # arrays to store detections
235 | P3 = [] # w.r.t. target frame
236 | P2_1 = [] # in image #1
237 | P2_2 = [] # in image #2
238 |
239 | # TODO: configure streams
240 | images1 = glob.glob(tmp_folder + '/fe1_*')
241 | #images2 = glob.glob(tmp_folder + '/fe2_*') # test
242 | images2 = glob.glob(tmp_folder + '/ir1_*')
243 | images1.sort()
244 | images2.sort()
245 | #print(images1)
246 | #print(images2)
247 |
248 | if len(images1) == len(images2) == 0:
249 | print("No images found. Exit.")
250 | exit(0)
251 |
252 |
253 | try:
254 | for i, fname in enumerate(images1):
255 | img1 = cv2.imread(images1[i])
256 | img2 = cv2.imread(images2[i])
257 |
258 | gray1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
259 | gray2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
260 |
261 | # detect
262 | ret1, corners1 = cv2.findChessboardCorners(gray1, CHECKERBOARD, None)
263 | ret2, corners2 = cv2.findChessboardCorners(gray2, CHECKERBOARD, None)
264 |
265 | if ret1 and ret2:
266 | # subpixel refinement
267 | criteria_sub = (cv2.TermCriteria_COUNT + cv2.TERM_CRITERIA_EPS, 10, 1e-1)
268 | rt = cv2.cornerSubPix(gray1, corners1, (7, 7), (-1, -1), criteria_sub)
269 | P2_1.append(corners1)
270 | if args.visualize:
271 | ret1 = cv2.drawChessboardCorners(img1, CHECKERBOARD, corners1, ret1)
272 | cv2.imshow("img1", img1)
273 | cv2.waitKey(200)
274 |
275 | rt = cv2.cornerSubPix(gray2, corners2, (7, 7), (-1, -1), criteria_sub)
276 | P2_2.append(corners2)
277 | if args.visualize:
278 | ret2 = cv2.drawChessboardCorners(img2, CHECKERBOARD, corners2, ret2)
279 | cv2.imshow("img2", img2)
280 | cv2.waitKey(200)
281 | except cv2.error as e:
282 | print("Error: ", e)
283 |
284 | # calibration (stereo extrinsics)
285 | R = np.zeros((1, 1, 3), dtype=np.float64)
286 | T = np.zeros((1, 1, 3), dtype=np.float64)
287 |
288 | N = len(P2_1) # number of successful detections
289 |
290 | p3d = np.zeros( (CHECKERBOARD[0]*CHECKERBOARD[1], 1, 3) , np.float64)
291 | p3d[:,0, :2] = np.mgrid[0:CHECKERBOARD[0], 0:CHECKERBOARD[1]].T.reshape(-1, 2)
292 |
293 | # fisheye.stereoCalibrate needs different data structures/dimensions than cv2.stereoCalibrate, i.e. (N, 1, CHECKERBOARD[0]*CHECKERBOARD[1], 2/3)!
294 | P3 = np.array([p3d]*N, dtype=np.float64)
295 | P2_1 = np.asarray(P2_1, dtype=np.float64)
296 | P2_2 = np.asarray(P2_2, dtype=np.float64)
297 |
298 | P3 = np.reshape(P3, (N, 1, CHECKERBOARD[0]*CHECKERBOARD[1], 3))*SIDE_LENGTH
299 | P2_1 = np.reshape(P2_1, (N, 1, CHECKERBOARD[0]*CHECKERBOARD[1], 2))
300 | P2_2 = np.reshape(P2_2, (N, 1, CHECKERBOARD[0]*CHECKERBOARD[1], 2))
301 |
302 | (K1, D1, K2, D2, H1) = load_calibration(args.path, args.file_name)
303 |
304 | try:
305 | (rms, _, _, _, _, R, T) = \
306 | cv2.fisheye.stereoCalibrate(
307 | P3,
308 | P2_1,
309 | P2_2,
310 | K1,
311 | D1,
312 | K2,
313 | D2,
314 | (0,0), # only used to initialize intrinsics when no intrinsics provided
315 | R,
316 | T,
317 | cv2.fisheye.CALIB_FIX_INTRINSIC # extrinsics only
318 | )
319 | except cv2.error as e:
320 | print("Error: ", e)
321 | print("Please make sure that the checkerboard exists in the images. See tmp images in " + tmp_folder + " to debug.")
322 | exit()
323 |
324 | print("RMS:", rms)
325 |
326 | H_cam2_cam1 = np.eye(4)
327 | H_cam2_cam1[:3,:3] = R
328 | H_cam2_cam1[:3,3] = T.flatten()
329 |
330 | # w.r.t. pose
331 | H_ir1_fe1 = H_cam2_cam1 # TODO: configure
332 | H_pose_fe1 = H1
333 |
334 | H_pose_ir1 = H_pose_fe1.dot( np.linalg.inv(H_ir1_fe1) )
335 | print("H (ir1 wrt pose) =", H_pose_ir1)
336 |
337 | fn = args.path + "/H.txt"
338 | np.savetxt(fn, H_pose_ir1, fmt='%.9f')
339 | print("Extrinsic output written to", fn)
340 |
341 | if not args.save_tmp:
342 | if os.path.isdir(tmp_folder):
343 | shutil.rmtree(tmp_folder, ignore_errors=True)
344 | print("Temporary files deleted. If you wish to keep the tmp files, use --save_tmp True.")
--------------------------------------------------------------------------------
/scripts/calibration_results/H.txt:
--------------------------------------------------------------------------------
1 | 0.999761668 -0.005254881 -0.021186741 0.004522556
2 | -0.002735781 -0.993102196 0.117219799 -0.044872356
3 | -0.021656584 -0.117133906 -0.992879941 -0.003134181
4 | 0.000000000 0.000000000 0.000000000 1.000000000
5 |
--------------------------------------------------------------------------------
/scripts/calibration_results/intrinsics.json:
--------------------------------------------------------------------------------
1 | {
2 | "cameras": [
3 | {
4 | "center_px": [
5 | 424.61468505859375,
6 | 395.1351013183594
7 | ],
8 | "focal_length_px": [
9 | 285.5497131347656,
10 | 286.5690002441406
11 | ],
12 | "distortion": {
13 | "type": "kannalabrandt4",
14 | "k": [
15 | -0.0017655419651418924,
16 | 0.03551755100488663,
17 | -0.03320972993969917,
18 | 0.004888659808784723
19 | ]
20 | },
21 | "extrinsics": {
22 | "T": [
23 | -0.031984999775886536,
24 | -0.00011117507528979331,
25 | 0.0003567125531844795
26 | ],
27 | "R": [
28 | 0.9998769760131836,
29 | -0.00043032190296798944,
30 | -0.015675829723477364,
31 | -0.0004210202896501869,
32 | -0.9999997019767761,
33 | 0.0005966732278466225,
34 | -0.015676090493798256,
35 | -0.000590000010561198,
36 | -0.9998769164085388
37 | ]
38 | }
39 | },
40 | {
41 | "center_px": [
42 | 420.3105163574219,
43 | 241.55503845214844
44 | ],
45 | "focal_length_px": [
46 | 432.4413757324219,
47 | 432.4413757324219
48 | ],
49 | "distortion": {
50 | "type": "kannalabrandt4",
51 | "k": [
52 | 0.0,
53 | 0.0,
54 | 0.0,
55 | 0.0
56 | ]
57 | }
58 | }
59 | ]
60 | }
--------------------------------------------------------------------------------
/scripts/mavlink_control.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | ######################################################
5 | ## Sending control commands to AP via MAVLink ##
6 | ## Based on set_attitude_target.py: https://github.com/dronekit/dronekit-python/blob/master/examples/set_attitude_target/set_attitude_target.py
7 | ######################################################
8 |
9 | ## Additional installation for SITL:
10 | ## pip3 install dronekit-sitl -UI
11 |
12 | from dronekit import connect, VehicleMode, LocationGlobal, LocationGlobalRelative
13 | from pymavlink import mavutil # Needed for command message definitions
14 | import time
15 | import math
16 |
17 | # Set MAVLink protocol to 2.
18 | import os
19 | os.environ["MAVLINK20"] = "1"
20 |
21 | import sys
22 |
23 | #######################################
24 | # Parameters
25 | #######################################
26 |
27 | rc_control_channel = 6 # Channel to check value, start at 0 == chan1_raw
28 | rc_control_thres = 2000 # Values to check
29 |
30 | #######################################
31 | # Global variables
32 | #######################################
33 |
34 | rc_channel_value = 0
35 | vehicle = None
36 |
37 | #######################################
38 | # User input
39 | #######################################
40 |
41 | # Set up option parsing to get connection string
42 | import argparse
43 | parser = argparse.ArgumentParser(description='Example showing how to set and clear vehicle channel-override information.')
44 | parser.add_argument('--connect',
45 | help="vehicle connection target string. If not specified, SITL automatically started and used.")
46 | args = parser.parse_args()
47 |
48 | #######################################
49 | # Functions
50 | #######################################
51 |
52 | connection_string = args.connect
53 | sitl = None
54 |
55 | # Start SITL if no connection string specified
56 | if not connection_string:
57 | import dronekit_sitl
58 | sitl = dronekit_sitl.start_default()
59 | connection_string = sitl.connection_string()
60 |
61 | print('Connecting to vehicle on: %s' % connection_string)
62 | vehicle = connect(connection_string, wait_ready=True)
63 |
64 | @vehicle.on_message('RC_CHANNELS')
65 | def RC_CHANNEL_listener(vehicle, name, message):
66 | global rc_channel_value, rc_control_channel
67 |
68 | # TO-DO: find a less hard-coded solution
69 | curr_channels_values = [message.chan1_raw, message.chan2_raw, message.chan3_raw, message.chan4_raw, message.chan5_raw, message.chan6_raw, message.chan7_raw, message.chan8_raw]
70 |
71 | rc_channel_value = curr_channels_values[rc_control_channel]
72 |
73 | # # Print out the values to debug
74 | # print('%s attribute is: %s' % (name, message)) # Print all info from the messages
75 | # os.system('clear') # This helps in displaying the messages to be more readable
76 | # for channel in range(8):
77 | # print("Number of RC channels: ", message.chancount, ". Individual RC channel value:")
78 | # print(" CH", channel, curr_channels_values[channel])
79 |
80 |
81 | def arm_and_takeoff_nogps(aTargetAltitude):
82 | """
83 | Arms vehicle and fly to aTargetAltitude without GPS data.
84 | """
85 |
86 | print("Basic pre-arm checks")
87 |
88 | # Don't let the user try to arm until autopilot is ready
89 | # If you need to disable the arming check,
90 | # just comment it with your own responsibility.
91 | while not vehicle.is_armable:
92 | print("- Waiting for vehicle to initialise...")
93 | time.sleep(1)
94 |
95 | print("Arming motors")
96 | # Copter should arm in GUIDED_NOGPS mode
97 | vehicle.mode = VehicleMode("LOITER")
98 | vehicle.armed = True
99 | while not vehicle.armed:
100 | print("- Waiting for arming...")
101 | time.sleep(1)
102 |
103 | print("Taking off!")
104 | vehicle.simple_takeoff(aTargetAltitude) # Take off to target altitude
105 |
106 | # Wait until the vehicle reaches a safe height before processing the goto
107 | # (otherwise the command after Vehicle.simple_takeoff will execute
108 | # immediately).
109 | while True:
110 | print(" Altitude: ", vehicle.location.global_relative_frame.alt)
111 | # Break and return from function just below target altitude.
112 | if vehicle.location.global_relative_frame.alt >= aTargetAltitude * 0.95:
113 | print("Reached target altitude")
114 | break
115 | time.sleep(1)
116 |
117 |
118 | def send_attitude_target(roll_angle = 0.0, pitch_angle = 0.0,
119 | yaw_angle = None, yaw_rate = 0.0, use_yaw_rate = False,
120 | thrust = 0.5):
121 | """
122 | use_yaw_rate: the yaw can be controlled using yaw_angle OR yaw_rate.
123 | When one is used, the other is ignored by Ardupilot.
124 | thrust: 0 <= thrust <= 1, as a fraction of maximum vertical thrust.
125 | Note that as of Copter 3.5, thrust = 0.5 triggers a special case in
126 | the code for maintaining current altitude.
127 | """
128 | if yaw_angle is None:
129 | # this value may be unused by the vehicle, depending on use_yaw_rate
130 | yaw_angle = vehicle.attitude.yaw
131 | # Thrust > 0.5: Ascend
132 | # Thrust == 0.5: Hold the altitude
133 | # Thrust < 0.5: Descend
134 | msg = vehicle.message_factory.set_attitude_target_encode(
135 | 0, # time_boot_ms
136 | 1, # Target system
137 | 1, # Target component
138 | 0b00000000 if use_yaw_rate else 0b00000100,
139 | to_quaternion(roll_angle, pitch_angle, yaw_angle), # Quaternion
140 | 0, # Body roll rate in radian
141 | 0, # Body pitch rate in radian
142 | math.radians(yaw_rate), # Body yaw rate in radian/second
143 | thrust # Thrust
144 | )
145 | vehicle.send_mavlink(msg)
146 |
147 |
148 | def send_ned_velocity(velocity_x, velocity_y, velocity_z, duration):
149 | """
150 | Move vehicle in direction based on specified velocity vectors.
151 | """
152 | msg = vehicle.message_factory.set_position_target_local_ned_encode(
153 | 0, # time_boot_ms (not used)
154 | 0, 0, # target system, target component
155 | mavutil.mavlink.MAV_FRAME_LOCAL_NED, # frame
156 | 0b0000111111000111, # type_mask (only speeds enabled)
157 | 0, 0, 0, # x, y, z positions (not used)
158 | velocity_x, velocity_y, velocity_z, # x, y, z velocity in m/s
159 | 0, 0, 0, # x, y, z acceleration (not supported yet, ignored in GCS_Mavlink)
160 | 0, 0) # yaw, yaw_rate (not supported yet, ignored in GCS_Mavlink)
161 |
162 |
163 | # send command to vehicle on 1 Hz cycle
164 | for x in range(0,duration):
165 | vehicle.send_mavlink(msg)
166 | time.sleep(1)
167 |
168 |
169 | def goto_position_target_local_ned(north, east, down):
170 | """
171 | Send SET_POSITION_TARGET_LOCAL_NED command to request the vehicle fly to a specified
172 | location in the North, East, Down frame.
173 | """
174 | msg = vehicle.message_factory.set_position_target_local_ned_encode(
175 | 0, # time_boot_ms (not used)
176 | 0, 0, # target system, target component
177 | mavutil.mavlink.MAV_FRAME_LOCAL_NED, # frame
178 | 0b0000111111111000, # type_mask (only positions enabled)
179 | north, east, down,
180 | 0, 0, 0, # x, y, z velocity in m/s (not used)
181 | 0, 0, 0, # x, y, z acceleration (not supported yet, ignored in GCS_Mavlink)
182 | 0, 0) # yaw, yaw_rate (not supported yet, ignored in GCS_Mavlink)
183 | # send command to vehicle
184 | vehicle.send_mavlink(msg)
185 |
186 |
187 | def set_attitude(roll_angle = 0.0, pitch_angle = 0.0,
188 | yaw_angle = None, yaw_rate = 0.0, use_yaw_rate = False,
189 | thrust = 0.5, duration = 0):
190 | """
191 | Note that from AC3.3 the message should be re-sent more often than every
192 | second, as an ATTITUDE_TARGET order has a timeout of 1s.
193 | In AC3.2.1 and earlier the specified attitude persists until it is canceled.
194 | The code below should work on either version.
195 | Sending the message multiple times is the recommended way.
196 | """
197 | send_attitude_target(roll_angle, pitch_angle,
198 | yaw_angle, yaw_rate, False,
199 | thrust)
200 | start = time.time()
201 | while time.time() - start < duration:
202 | send_attitude_target(roll_angle, pitch_angle,
203 | yaw_angle, yaw_rate, False,
204 | thrust)
205 | time.sleep(0.1)
206 | # Reset attitude, or it will persist for 1s more due to the timeout
207 | send_attitude_target(0, 0,
208 | 0, 0, True,
209 | thrust)
210 |
211 |
212 | def to_quaternion(roll = 0.0, pitch = 0.0, yaw = 0.0):
213 | """
214 | Convert degrees to quaternions
215 | """
216 | t0 = math.cos(math.radians(yaw * 0.5))
217 | t1 = math.sin(math.radians(yaw * 0.5))
218 | t2 = math.cos(math.radians(roll * 0.5))
219 | t3 = math.sin(math.radians(roll * 0.5))
220 | t4 = math.cos(math.radians(pitch * 0.5))
221 | t5 = math.sin(math.radians(pitch * 0.5))
222 |
223 | w = t0 * t2 * t4 + t1 * t3 * t5
224 | x = t0 * t3 * t4 - t1 * t2 * t5
225 | y = t0 * t2 * t5 + t1 * t3 * t4
226 | z = t1 * t2 * t4 - t0 * t3 * t5
227 |
228 | return [w, x, y, z]
229 |
230 |
231 | """
232 | Convenience functions for sending immediate/guided mode commands to control the Copter.
233 | The set of commands demonstrated here include:
234 | * MAV_CMD_CONDITION_YAW - set direction of the front of the Copter (latitude, longitude)
235 | * MAV_CMD_DO_SET_ROI - set direction where the camera gimbal is aimed (latitude, longitude, altitude)
236 | * MAV_CMD_DO_CHANGE_SPEED - set target speed in metres/second.
237 | The full set of available commands are listed here:
238 | http://dev.ardupilot.com/wiki/copter-commands-in-guided-mode/
239 | """
240 |
241 | def condition_yaw(heading, relative=False):
242 | """
243 | Send MAV_CMD_CONDITION_YAW message to point vehicle at a specified heading (in degrees).
244 | This method sets an absolute heading by default, but you can set the `relative` parameter
245 | to `True` to set yaw relative to the current yaw heading.
246 | By default the yaw of the vehicle will follow the direction of travel. After setting
247 | the yaw using this function there is no way to return to the default yaw "follow direction
248 | of travel" behaviour (https://github.com/diydrones/ardupilot/issues/2427)
249 | For more information see:
250 | http://copter.ardupilot.com/wiki/common-mavlink-mission-command-messages-mav_cmd/#mav_cmd_condition_yaw
251 | """
252 | if relative:
253 | is_relative = 1 #yaw relative to direction of travel
254 | else:
255 | is_relative = 0 #yaw is an absolute angle
256 | # create the CONDITION_YAW command using command_long_encode()
257 | msg = vehicle.message_factory.command_long_encode(
258 | 0, 0, # target system, target component
259 | mavutil.mavlink.MAV_CMD_CONDITION_YAW, #command
260 | 0, #confirmation
261 | heading, # param 1, yaw in degrees
262 | 0, # param 2, yaw speed deg/s
263 | 1, # param 3, direction -1 ccw, 1 cw
264 | is_relative, # param 4, relative offset 1, absolute angle 0
265 | 0, 0, 0) # param 5 ~ 7 not used
266 | # send command to vehicle
267 | vehicle.send_mavlink(msg)
268 |
269 |
270 | def pos_control_align_north_and_move_square():
271 |
272 | print("SQUARE path using SET_POSITION_TARGET_LOCAL_NED and position parameters")
273 | DURATION_SEC = 2 #Set duration for each segment.
274 | HEIGHT_M = 2
275 | SIZE_M = 2
276 |
277 | """
278 | Fly the vehicle in a SIZE_M meter square path, using the SET_POSITION_TARGET_LOCAL_NED command
279 | and specifying a target position (rather than controlling movement using velocity vectors).
280 | The command is called from goto_position_target_local_ned() (via `goto`).
281 |
282 | The position is specified in terms of the NED (North East Down) relative to the Home location.
283 |
284 | WARNING: The "D" in NED means "Down". Using a positive D value will drive the vehicle into the ground!
285 |
286 | The code sleeps for a time (DURATION_SEC) to give the vehicle time to reach each position (rather than
287 | sending commands based on proximity).
288 |
289 | The code also sets the region of interest (MAV_CMD_DO_SET_ROI) via the `set_roi()` method. This points the
290 | camera gimbal at the the selected location (in this case it aligns the whole vehicle to point at the ROI).
291 | """
292 |
293 | print("Yaw 0 absolute (North)")
294 | condition_yaw(0)
295 | print("North (m): ", SIZE_M, ", East (m): 0m, Height (m):", HEIGHT_M," for", DURATION_SEC, "seconds")
296 | goto_position_target_local_ned(SIZE_M, 0, -HEIGHT_M)
297 | time.sleep(DURATION_SEC)
298 |
299 | print("Yaw 90 absolute (East)")
300 | condition_yaw(90)
301 | print("North (m): ", SIZE_M, ", East (m): ", SIZE_M, " Height (m):", HEIGHT_M," for", DURATION_SEC, "seconds")
302 | goto_position_target_local_ned(SIZE_M, SIZE_M, -HEIGHT_M)
303 | time.sleep(DURATION_SEC)
304 |
305 | print("Yaw 180 absolute (South)")
306 | condition_yaw(180)
307 | print("North (m): 0m, East (m): ", SIZE_M, ", Height (m):", HEIGHT_M," for", DURATION_SEC, "seconds")
308 | goto_position_target_local_ned(0, SIZE_M, -HEIGHT_M)
309 | time.sleep(DURATION_SEC)
310 |
311 | print("Yaw 270 absolute (West)")
312 | condition_yaw(270)
313 | print("North (m): 0m, East (m): 0m, Height (m):", HEIGHT_M," for", DURATION_SEC, "seconds")
314 | goto_position_target_local_ned(0, 0, -HEIGHT_M)
315 | time.sleep(DURATION_SEC)
316 |
317 |
318 | def vel_control_align_north_and_move_square():
319 | """
320 | Fly the vehicle in a path using velocity vectors (the underlying code calls the
321 | SET_POSITION_TARGET_LOCAL_NED command with the velocity parameters enabled).
322 | The thread sleeps for a time (DURATION) which defines the distance that will be travelled.
323 | The code also sets the yaw (MAV_CMD_CONDITION_YAW) using the `set_yaw()` method in each segment
324 | so that the front of the vehicle points in the direction of travel
325 | """
326 | #Set up velocity vector to map to each direction.
327 | # vx > 0 => fly North
328 | # vx < 0 => fly South
329 | NORTH = 0.5
330 | SOUTH = -0.5
331 |
332 | # Note for vy:
333 | # vy > 0 => fly East
334 | # vy < 0 => fly West
335 | EAST = 0.5
336 | WEST = -0.5
337 |
338 | # Note for vz:
339 | # vz < 0 => ascend
340 | # vz > 0 => descend
341 | UP = -0.5
342 | DOWN = 0.5
343 |
344 | # Set duration for each segment.
345 | DURATION_NORTH_SEC = 4
346 | DURATION_SOUTH_SEC = 4
347 | DURATION_EAST_SEC = 4
348 | DURATION_WEST_SEC = 4
349 |
350 | # Control path using velocity commands
351 | print("Point the vehicle to a specific direction, then moves using SET_POSITION_TARGET_LOCAL_NED and velocity parameters")
352 |
353 | print("Yaw 0 absolute (North)")
354 | condition_yaw(0)
355 | send_ned_velocity(0, 0, 0, 1)
356 | print("Velocity North")
357 | send_ned_velocity(NORTH, 0, 0, DURATION_NORTH_SEC)
358 | send_ned_velocity(0, 0, 0, 1)
359 |
360 | print("Yaw 90 absolute (East)")
361 | condition_yaw(90)
362 | print("Velocity East")
363 | send_ned_velocity(0, EAST, 0, DURATION_EAST_SEC)
364 | send_ned_velocity(0, 0, 0, 1)
365 |
366 | print("Yaw 180 absolute (South)")
367 | condition_yaw(180)
368 | print("Velocity South")
369 | send_ned_velocity(SOUTH, 0, 0, DURATION_SOUTH_SEC)
370 | send_ned_velocity(0, 0, 0, 1)
371 |
372 | print("Yaw 270 absolute (West)")
373 | condition_yaw(270)
374 | print("Velocity West")
375 | send_ned_velocity(0, WEST, 0, DURATION_WEST_SEC)
376 | send_ned_velocity(0, 0, 0, 1)
377 |
378 |
379 | #######################################
380 | # Main program starts here
381 | #######################################
382 |
383 | try:
384 | # If using SITL: Take off in GUIDED_NOGPS mode.
385 | if sitl is not None:
386 | arm_and_takeoff_nogps(20)
387 | print("Hold position for 3 seconds")
388 | set_attitude(duration = 3)
389 |
390 | # Wait until the RC channel is turned on and the corresponding channel is switch
391 | print("Starting autonomous control...")
392 | while True:
393 | if (vehicle.mode.name == "LOITER") and (rc_channel_value > rc_control_thres):
394 | pos_control_align_north_and_move_square()
395 | elif (vehicle.mode.name == "GUIDED") and (rc_channel_value > rc_control_thres):
396 | vel_control_align_north_and_move_square()
397 | else:
398 | print("Checking rc channel:", rc_control_channel, ", current value:", rc_channel_value, ", threshold to start: ", rc_control_thres)
399 | time.sleep(1)
400 |
401 | # print("Setting LAND mode...")
402 | # vehicle.mode = VehicleMode("LAND")
403 | # time.sleep(1)
404 |
405 | # Close vehicle object before exiting script
406 | print("Close vehicle object")
407 | vehicle.close()
408 |
409 | # Shut down simulator if it was started.
410 | if sitl is not None:
411 | sitl.stop()
412 |
413 | print("Completed")
414 |
415 | except KeyboardInterrupt:
416 | vehicle.close()
417 | print("Vehicle object closed.")
418 | sys.exit()
419 |
--------------------------------------------------------------------------------
/scripts/mavros_control1.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | ##
4 | #
5 | # Control a MAV via mavros
6 | #
7 | ##
8 |
9 | import rospy
10 | import tf
11 | from geometry_msgs.msg import Pose, PoseStamped, Twist, Quaternion
12 | from mavros_msgs.msg import OverrideRCIn
13 | from mavros_msgs.msg import RCIn
14 | from mavros_msgs.srv import CommandBool
15 | from mavros_msgs.srv import SetMode
16 | from mavros_msgs.srv import CommandTOL
17 |
18 | pi_2 = 3.141592654 / 2.0
19 |
20 | class MavController:
21 | """
22 | A simple object to help interface with mavros
23 | """
24 | def __init__(self):
25 |
26 | rospy.init_node("mav_control_node")
27 | rospy.Subscriber("/mavros/local_position/pose", PoseStamped, self.pose_callback)
28 | rospy.Subscriber("/mavros/rc/in", RCIn, self.rc_callback)
29 |
30 | self.cmd_pos_pub = rospy.Publisher("/mavros/setpoint_position/local", PoseStamped, queue_size=1)
31 | self.cmd_vel_pub = rospy.Publisher("/mavros/setpoint_velocity/cmd_vel_unstamped", Twist, queue_size=1)
32 | self.rc_override = rospy.Publisher("/mavros/rc/override", OverrideRCIn, queue_size=1)
33 |
34 | # mode 0 = STABILIZE
35 | # mode 4 = GUIDED
36 | # mode 9 = LAND
37 | self.mode_service = rospy.ServiceProxy('/mavros/set_mode', SetMode)
38 | self.arm_service = rospy.ServiceProxy('/mavros/cmd/arming', CommandBool)
39 | self.takeoff_service = rospy.ServiceProxy('/mavros/cmd/takeoff', CommandTOL)
40 |
41 | self.rc = RCIn()
42 | self.pose = Pose()
43 | self.timestamp = rospy.Time()
44 |
45 | def rc_callback(self, data):
46 | """
47 | Keep track of the current manual RC values
48 | """
49 | self.rc = data
50 |
51 | def pose_callback(self, data):
52 | """
53 | Handle local position information
54 | """
55 | self.timestamp = data.header.stamp
56 | self.pose = data.pose
57 |
58 | def goto(self, pose):
59 | """
60 | Set the given pose as a the next setpoint by sending
61 | a SET_POSITION_TARGET_LOCAL_NED message. The copter must
62 | be in GUIDED mode for this to work.
63 | """
64 | pose_stamped = PoseStamped()
65 | pose_stamped.header.stamp = self.timestamp
66 | pose_stamped.pose = pose
67 |
68 | self.cmd_pos_pub.publish(pose_stamped)
69 |
70 | def goto_xyz_rpy(self, x, y, z, ro, pi, ya):
71 | pose = Pose()
72 | pose.position.x = x
73 | pose.position.y = y
74 | pose.position.z = z
75 |
76 | quat = tf.transformations.quaternion_from_euler(ro, pi, ya + pi_2)
77 |
78 | pose.orientation.x = quat[0]
79 | pose.orientation.y = quat[1]
80 | pose.orientation.z = quat[2]
81 | pose.orientation.w = quat[3]
82 | self.goto(pose)
83 | #print(quat)
84 |
85 | def set_vel(self, vx, vy, vz, avx=0, avy=0, avz=0):
86 | """
87 | Send comand velocities. Must be in GUIDED mode. Assumes angular
88 | velocities are zero by default.
89 | """
90 | cmd_vel = Twist()
91 |
92 | cmd_vel.linear.x = vx
93 | cmd_vel.linear.y = vy
94 | cmd_vel.linear.z = vz
95 |
96 | cmd_vel.angular.x = avx
97 | cmd_vel.angular.y = avy
98 | cmd_vel.angular.z = avz
99 |
100 | self.cmd_vel_pub.publish(cmd_vel)
101 |
102 | def arm(self):
103 | """
104 | Arm the throttle
105 | """
106 | return self.arm_service(True)
107 |
108 | def disarm(self):
109 | """
110 | Disarm the throttle
111 | """
112 | return self.arm_service(False)
113 |
114 | def takeoff(self, height=1.0):
115 | """
116 | Arm the throttle, takeoff to a few feet, and set to guided mode
117 | """
118 | # Set to stabilize mode for arming
119 | #mode_resp = self.mode_service(custom_mode="0")
120 | mode_resp = self.mode_service(custom_mode="4")
121 | self.arm()
122 |
123 | # Set to guided mode
124 | #mode_resp = self.mode_service(custom_mode="4")
125 |
126 | # Takeoff
127 | takeoff_resp = self.takeoff_service(altitude=height)
128 |
129 | #return takeoff_resp
130 | return mode_resp
131 |
132 | def land(self):
133 | """
134 | Set in LAND mode, which should cause the UAV to descend directly,
135 | land, and disarm.
136 | """
137 | resp = self.mode_service(custom_mode="9")
138 | self.disarm()
139 |
140 | def simple_demo():
141 | """
142 | A simple demonstration of using mavros commands to control a UAV.
143 | """
144 | c = MavController()
145 | rospy.sleep(1)
146 |
147 | print("Takeoff")
148 | c.takeoff(0.5)
149 | rospy.sleep(3)
150 | c.goto_xyz_rpy(0,0,1.2,0,0,0)
151 | rospy.sleep(3)
152 |
153 | print("Waypoint 1: position control")
154 | c.goto_xyz_rpy(0.0,0.0,1.2,0,0,-1*pi_2)
155 | rospy.sleep(2)
156 | c.goto_xyz_rpy(0.4,0.0,1.2,0,0,-1*pi_2)
157 | rospy.sleep(3)
158 | print("Waypoint 2: position control")
159 | c.goto_xyz_rpy(0.4,0.0,1.2,0,0,0)
160 | rospy.sleep(2)
161 | c.goto_xyz_rpy(0.4,0.4,1.2,0,0,0)
162 | rospy.sleep(3)
163 | print("Waypoint 3: position control")
164 | c.goto_xyz_rpy(0.4,0.4,1.2,0,0,pi_2)
165 | rospy.sleep(2)
166 | c.goto_xyz_rpy(0.0,0.4,1.2,0,0,pi_2)
167 | rospy.sleep(3)
168 | print("Waypoint 4: position control")
169 | c.goto_xyz_rpy(0.0,0.4,1.2,0,0,2*pi_2)
170 | rospy.sleep(2)
171 | c.goto_xyz_rpy(0.0,0.0,1.2,0,0,2*pi_2)
172 | rospy.sleep(3)
173 |
174 | #print("Velocity Setpoint 1")
175 | #c.set_vel(0,0.1,0)
176 | #rospy.sleep(5)
177 | #print("Velocity Setpoint 2")
178 | #c.set_vel(0,-0.1,0)
179 | #rospy.sleep(5)
180 | #print("Velocity Setpoint 3")
181 | #c.set_vel(0,0,0)
182 | #rospy.sleep(5)
183 |
184 | print("Landing")
185 | c.land()
186 |
187 | if __name__=="__main__":
188 | simple_demo()
189 |
--------------------------------------------------------------------------------
/scripts/mavros_control2.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | ##
4 | #
5 | # Control a MAV via mavros
6 | #
7 | ##
8 |
9 | import rospy
10 | import tf
11 | from geometry_msgs.msg import Pose, PoseStamped, Twist, Quaternion
12 | from mavros_msgs.msg import OverrideRCIn
13 | from mavros_msgs.msg import RCIn
14 | from mavros_msgs.srv import CommandBool
15 | from mavros_msgs.srv import SetMode
16 | from mavros_msgs.srv import CommandTOL
17 | import math
18 |
19 | pi = math.pi
20 | pi_2 = pi / 2.0
21 |
22 | class MavController:
23 | """
24 | A simple object to help interface with mavros
25 | """
26 | def __init__(self):
27 |
28 | rospy.init_node("mav_control_node")
29 | rospy.Subscriber("/mavros/local_position/pose", PoseStamped, self.pose_callback)
30 | rospy.Subscriber("/mavros/rc/in", RCIn, self.rc_callback)
31 |
32 | self.cmd_pos_pub = rospy.Publisher("/mavros/setpoint_position/local", PoseStamped, queue_size=1)
33 | self.cmd_vel_pub = rospy.Publisher("/mavros/setpoint_velocity/cmd_vel_unstamped", Twist, queue_size=1)
34 | self.rc_override = rospy.Publisher("/mavros/rc/override", OverrideRCIn, queue_size=1)
35 |
36 | # mode 0 = STABILIZE
37 | # mode 4 = GUIDED
38 | # mode 9 = LAND
39 | self.mode_service = rospy.ServiceProxy('/mavros/set_mode', SetMode)
40 | self.arm_service = rospy.ServiceProxy('/mavros/cmd/arming', CommandBool)
41 | self.takeoff_service = rospy.ServiceProxy('/mavros/cmd/takeoff', CommandTOL)
42 |
43 | self.rc = RCIn()
44 | self.pose = Pose()
45 | self.timestamp = rospy.Time()
46 |
47 | def rc_callback(self, data):
48 | """
49 | Keep track of the current manual RC values
50 | """
51 | self.rc = data
52 |
53 | def pose_callback(self, data):
54 | """
55 | Handle local position information
56 | """
57 | self.timestamp = data.header.stamp
58 | self.pose = data.pose
59 |
60 | def goto(self, pose):
61 | """
62 | Set the given pose as a the next setpoint by sending
63 | a SET_POSITION_TARGET_LOCAL_NED message. The copter must
64 | be in GUIDED mode for this to work.
65 | """
66 | pose_stamped = PoseStamped()
67 | pose_stamped.header.stamp = self.timestamp
68 | pose_stamped.pose = pose
69 |
70 | self.cmd_pos_pub.publish(pose_stamped)
71 |
72 | def goto_xyz_rpy(self, x, y, z, ro, pi, ya):
73 | pose = Pose()
74 | pose.position.x = x
75 | pose.position.y = y
76 | pose.position.z = z
77 |
78 | quat = tf.transformations.quaternion_from_euler(ro, pi, ya + pi_2)
79 |
80 | pose.orientation.x = quat[0]
81 | pose.orientation.y = quat[1]
82 | pose.orientation.z = quat[2]
83 | pose.orientation.w = quat[3]
84 | self.goto(pose)
85 | #print(quat)
86 |
87 | def set_vel(self, vx, vy, vz, avx=0, avy=0, avz=0):
88 | """
89 | Send comand velocities. Must be in GUIDED mode. Assumes angular
90 | velocities are zero by default.
91 | """
92 | cmd_vel = Twist()
93 |
94 | cmd_vel.linear.x = vx
95 | cmd_vel.linear.y = vy
96 | cmd_vel.linear.z = vz
97 |
98 | cmd_vel.angular.x = avx
99 | cmd_vel.angular.y = avy
100 | cmd_vel.angular.z = avz
101 |
102 | self.cmd_vel_pub.publish(cmd_vel)
103 |
104 | def arm(self):
105 | """
106 | Arm the throttle
107 | """
108 | return self.arm_service(True)
109 |
110 | def disarm(self):
111 | """
112 | Disarm the throttle
113 | """
114 | return self.arm_service(False)
115 |
116 | def takeoff(self, height=1.0):
117 | """
118 | Arm the throttle, takeoff to a few feet, and set to guided mode
119 | """
120 | # Set to stabilize mode for arming
121 | #mode_resp = self.mode_service(custom_mode="0")
122 | mode_resp = self.mode_service(custom_mode="4")
123 | self.arm()
124 |
125 | # Set to guided mode
126 | #mode_resp = self.mode_service(custom_mode="4")
127 |
128 | # Takeoff
129 | takeoff_resp = self.takeoff_service(altitude=height)
130 |
131 | #return takeoff_resp
132 | return mode_resp
133 |
134 | def land(self):
135 | """
136 | Set in LAND mode, which should cause the UAV to descend directly,
137 | land, and disarm.
138 | """
139 | resp = self.mode_service(custom_mode="9")
140 | self.disarm()
141 |
142 | def simple_demo():
143 | """
144 | A simple demonstration of using mavros commands to control a UAV.
145 | """
146 | c = MavController()
147 | rospy.sleep(1)
148 |
149 | print("Takeoff")
150 | c.takeoff(1.0)
151 | rospy.sleep(7)
152 |
153 | r = 1
154 | circle_center_x = 0.0
155 | circle_center_y = 0.0
156 | circle_height = 1.0
157 |
158 | c.goto_xyz_rpy(circle_center_x, circle_center_y, circle_height, 0, 0, 0)
159 | rospy.sleep(3)
160 | c.goto_xyz_rpy(circle_center_x + r, circle_center_y, circle_height, 0, 0, 0)
161 | rospy.sleep(3)
162 |
163 | # for i in range(10):
164 | # y = i * 0.2 / 10.0
165 | # c.goto_xyz_rpy(0.2 + r, y, 1.2, 0, 0, 0)
166 | # rospy.sleep(0.2)
167 |
168 | for i in range(360):
169 | theta = i * 2.0 * pi / 180.0
170 | x = circle_center_x + r * math.cos(theta);
171 | y = circle_center_y + r * math.sin(theta);
172 | z = circle_height;
173 | c.goto_xyz_rpy(x, y, z, 0.0, 0.0, theta)
174 | rospy.sleep(0.1)
175 |
176 | rospy.sleep(3)
177 |
178 | c.goto_xyz_rpy(0.0, 0.0, circle_height, 0, 0, 0)
179 | rospy.sleep(3)
180 |
181 | #print("Velocity Setpoint 1")
182 | #c.set_vel(0,0.1,0)
183 | #rospy.sleep(5)
184 | #print("Velocity Setpoint 2")
185 | #c.set_vel(0,-0.1,0)
186 | #rospy.sleep(5)
187 | #print("Velocity Setpoint 3")
188 | #c.set_vel(0,0,0)
189 | #rospy.sleep(5)
190 |
191 | print("Landing")
192 | c.land()
193 |
194 | if __name__=="__main__":
195 | simple_demo()
196 |
--------------------------------------------------------------------------------
/scripts/opencv_depth_filtering.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | ######################################################
4 | ## Python implementation of the following examples ##
5 | ## https://github.com/IntelRealSense/librealsense/blob/master/doc/post-processing-filters.md
6 | ## https://github.com/IntelRealSense/librealsense/tree/master/wrappers/opencv/depth-filter
7 | ## https://github.com/IntelRealSense/librealsense/tree/master/examples/C/depth
8 | ## https://github.com/IntelRealSense/librealsense/blob/master/wrappers/python/examples/python-rs400-advanced-mode-example.py
9 | ######################################################
10 |
11 | # Install required packages:
12 | # pip3 install pyrealsense2
13 | # pip3 install opencv-python
14 |
15 | # First import the libraries
16 | import sys
17 | import pyrealsense2 as rs # Intel RealSense cross-platform open-source API
18 | import time
19 | import numpy as np # fundamental package for scientific computing
20 | import json
21 |
22 | # in order to import cv2 under python3 when you also have ROS installed
23 | import os
24 | if os.path.exists("/opt/ros/kinetic/lib/python2.7/dist-packages"):
25 | sys.path.remove('/opt/ros/kinetic/lib/python2.7/dist-packages')
26 | if os.path.exists("~/anaconda3/lib/python3.7/site-packages"):
27 | sys.path.append('~/anaconda3/lib/python3.7/site-packages')
28 | import cv2
29 |
30 | ######################################################
31 | ## These parameters are reconfigurable ##
32 | ######################################################
33 | STREAM_TYPE = [rs.stream.depth, rs.stream.color] # rs2_stream is a types of data provided by RealSense device
34 | FORMAT = [rs.format.z16, rs.format.bgr8] # rs2_format is identifies how binary data is encoded within a frame
35 | WIDTH = 848 # Defines the number of columns for each frame or zero for auto resolve
36 | HEIGHT = 480 # Defines the number of lines for each frame or zero for auto resolve
37 | FPS = 30 # Defines the rate of frames per second
38 | DISPLAY_WINDOW_NAME = 'Input/output depth'
39 | OPTION_WINDOW_NAME = 'Filter options'
40 |
41 | USE_PRESET_FILE = True
42 | PRESET_FILE = "../cfg/d4xx-default.json"
43 |
44 | # List of filters to be applied, in this order.
45 | # Depth Frame (input)
46 | # >> Decimation Filter (reduces depth frame density)
47 | # >> Threshold Filter (removes values outside recommended range)
48 | # >> Depth2Disparity Transform** (transform the scene into disparity domain)
49 | # >> Spatial Filter (edge-preserving spatial smoothing)
50 | # >> Temporal Filter (reduces temporal noise)
51 | # >> Hole Filling Filter (rectify missing data in the resulting image)
52 | # >> Disparity2Depth Transform** (revert the results back to depth)
53 | # >> Filtered Depth (output)
54 | filters = [
55 | [True, "Decimation Filter", rs.decimation_filter()],
56 | [True, "Threshold Filter", rs.threshold_filter()],
57 | [True, "Depth to Disparity", rs.disparity_transform(True)],
58 | [True, "Spatial Filter", rs.spatial_filter()],
59 | [True, "Temporal Filter", rs.temporal_filter()],
60 | [True, "Hole Filling Filter", rs.hole_filling_filter()],
61 | [True, "Disparity to Depth", rs.disparity_transform(False)]
62 | ]
63 |
64 | ######################################################
65 | ## Functions to change filtering options online ##
66 | ## Description for each option can be found at: ##
67 | ## https://github.com/IntelRealSense/librealsense/blob/master/doc/post-processing-filters.md
68 | ######################################################
69 | decimation_magnitude_min = 2
70 | decimation_magnitude_max = 8
71 | def on_trackbar_decimation(val):
72 | # Sanity check
73 | if val < decimation_magnitude_min:
74 | print("\nFilter magnitude for Decimation Filter cannot be smaller than ", decimation_magnitude_min)
75 | val = decimation_magnitude_min
76 | filters[0][2].set_option(rs.option.filter_magnitude, val)
77 |
78 | threshold_min_m = 0.15
79 | threshold_max_m = 10.0
80 | def on_trackbar_max_threshold(val_m):
81 | # Sanity check
82 | if val_m < threshold_min_m:
83 | print("\nMaximum threshold cannot be smaller than ", threshold_min_m)
84 | val_m = threshold_min_m
85 | elif val_m > threshold_max_m:
86 | print("\nMaximum threshold cannot be larger than ", threshold_max_m)
87 | val_m = threshold_max_m
88 | # filters[1][2].set_option(rs.option.min_distance, val_m)
89 | filters[1][2].set_option(rs.option.max_distance, val_m)
90 |
91 |
92 | spatial_magnitude_min = 1
93 | spatial_magnitude_max = 5
94 | def on_trackbar_spatial_magnitude(val):
95 | # Sanity check
96 | if val < spatial_magnitude_min:
97 | print("\nFilter magnitude for Spatial Filter cannot be smaller than ", spatial_magnitude_min)
98 | val = spatial_magnitude_min
99 | filters[3][2].set_option(rs.option.filter_magnitude, val)
100 |
101 | spatial_smooth_alpha_min = 0.25
102 | spatial_smooth_alpha_max = 1
103 | spatial_smooth_alpha_scaled_max = 10
104 | def on_trackbar_spatial_smooth_alpha(val):
105 | # Step in cv2 trackbar only allows discrete value, so we remap it from [0-spatial_smooth_alpha_scaled_max] to [0-spatial_smooth_alpha_max]
106 | val = val / spatial_smooth_alpha_scaled_max * spatial_smooth_alpha_max
107 | # Sanity check
108 | if val < spatial_smooth_alpha_min:
109 | print("\nFilter magnitude for Spatial Filter cannot be smaller than ", spatial_smooth_alpha_min)
110 | val = spatial_smooth_alpha_min
111 | filters[3][2].set_option(rs.option.filter_smooth_alpha, val)
112 |
113 | spatial_smooth_delta_min = 1
114 | spatial_smooth_delta_max = 50
115 | def on_trackbar_spatial_smooth_delta(val):
116 | # Sanity check
117 | if val < spatial_smooth_delta_min:
118 | print("\nSmooth alpha for Spatial Filter cannot be smaller than ", spatial_smooth_delta_min)
119 | val = spatial_smooth_delta_min
120 | filters[3][2].set_option(rs.option.filter_smooth_delta, val)
121 |
122 | spatial_hole_filling_min = 0
123 | spatial_hole_filling_max = 5
124 | def on_trackbar_spatial_hole_filling(val):
125 | # Sanity check
126 | if val < spatial_hole_filling_min:
127 | print("\nSmooth alpha for Spatial Filter cannot be smaller than ", spatial_hole_filling_min)
128 | val = spatial_hole_filling_min
129 | filters[3][2].set_option(rs.option.holes_fill, val)
130 |
131 | hole_filling_filter_min = 0
132 | hole_filling_filter_max = 2
133 | def on_trackbar_hole_filling(val):
134 | # direction: 0-from left, 1-farest from around, 2-nearest from around
135 | filters[5][2].set_option(rs.option.holes_fill, val)
136 |
137 | ######################################################
138 | ## Functions to interface with D4xx cameras ##
139 | ######################################################
140 | DS5_product_ids = ["0AD1", "0AD2", "0AD3", "0AD4", "0AD5", "0AF6", "0AFE", "0AFF", "0B00", "0B01", "0B03", "0B07","0B3A"]
141 |
142 | def find_device_that_supports_advanced_mode() :
143 | ctx = rs.context()
144 | ds5_dev = rs.device()
145 | devices = ctx.query_devices();
146 | for dev in devices:
147 | if dev.supports(rs.camera_info.product_id) and str(dev.get_info(rs.camera_info.product_id)) in DS5_product_ids:
148 | if dev.supports(rs.camera_info.name):
149 | print("Found device that supports advanced mode:", dev.get_info(rs.camera_info.name))
150 | return dev
151 | raise Exception("No device that supports advanced mode was found")
152 |
153 | # Loop until we successfully enable advanced mode
154 | def d4xx_enable_advanced_mode(advnc_mode):
155 | while not advnc_mode.is_enabled():
156 | print("Trying to enable advanced mode...")
157 | advnc_mode.toggle_advanced_mode(True)
158 | # At this point the device will disconnect and re-connect.
159 | print("Sleeping for 5 seconds...")
160 | time.sleep(5)
161 | # The 'dev' object will become invalid and we need to initialize it again
162 | dev = find_device_that_supports_advanced_mode()
163 | advnc_mode = rs.rs400_advanced_mode(dev)
164 | print("Advanced mode is", "enabled" if advnc_mode.is_enabled() else "disabled")
165 |
166 | # Load the settings stored in the JSON file
167 | def d4xx_load_settings_file(advnc_mode, setting_file):
168 | # Sanity checks
169 | if os.path.isfile(setting_file):
170 | print("Setting file found", setting_file)
171 | else:
172 | print("Cannot find setting file ", setting_file)
173 | exit()
174 |
175 | if advnc_mode.is_enabled():
176 | print("Advanced mode is enabled")
177 | else:
178 | print("Device does not support advanced mode")
179 | exit()
180 |
181 | # Input for load_json() is the content of the json file, not the file path
182 | with open(setting_file, 'r') as file:
183 | json_text = file.read().strip()
184 |
185 | advnc_mode.load_json(json_text)
186 |
187 | ######################################################
188 | ## Main program starts here ##
189 | ######################################################
190 | try:
191 | if USE_PRESET_FILE:
192 | device = find_device_that_supports_advanced_mode()
193 | advnc_mode = rs.rs400_advanced_mode(device)
194 | d4xx_enable_advanced_mode(advnc_mode)
195 | d4xx_load_settings_file(advnc_mode, PRESET_FILE)
196 |
197 | # Create a context object. This object owns the handles to all connected realsense devices
198 | pipeline = rs.pipeline()
199 |
200 | # Configure depth and color streams
201 | config = rs.config()
202 | config.enable_stream(STREAM_TYPE[0], WIDTH, HEIGHT, FORMAT[0], FPS)
203 | config.enable_stream(STREAM_TYPE[1], WIDTH, HEIGHT, FORMAT[1], FPS)
204 | colorizer = rs.colorizer()
205 |
206 | for i in range(len(filters)):
207 | if filters[i][0] is True:
208 | print("Applying: ", filters[i][1])
209 | else:
210 | print("NOT applying: ", filters[i][1])
211 |
212 | # Start streaming
213 | profile = pipeline.start(config)
214 |
215 | # Create the image windows to be used
216 | cv2.namedWindow(OPTION_WINDOW_NAME, cv2.WINDOW_NORMAL)
217 | cv2.namedWindow(DISPLAY_WINDOW_NAME, cv2.WINDOW_AUTOSIZE)
218 |
219 | # Create trackbars for options modifiers
220 | # NOTE: - The trackbar's minimum is always zero and cannot be changed
221 | # - The trackbar's steps are discrete (so 0-1-2 etc.)
222 | cv2.createTrackbar('Decimation magnitude [2-8]', OPTION_WINDOW_NAME, 0, decimation_magnitude_max, on_trackbar_decimation)
223 | cv2.createTrackbar('Threshold [0-any] (cm)', OPTION_WINDOW_NAME, int(threshold_max_m), int(threshold_max_m), on_trackbar_max_threshold)
224 | cv2.createTrackbar('Spatial magnitude [1-5]', OPTION_WINDOW_NAME, 0, spatial_magnitude_max, on_trackbar_spatial_magnitude)
225 | cv2.createTrackbar('Spatial smooth alpha [0.25-1]', OPTION_WINDOW_NAME, 0, spatial_smooth_alpha_scaled_max, on_trackbar_spatial_smooth_alpha)
226 | cv2.createTrackbar('Spatial smooth delta [1-50]', OPTION_WINDOW_NAME, 0, spatial_smooth_delta_max, on_trackbar_spatial_smooth_delta)
227 | cv2.createTrackbar('Spatial hole filling [0-5]', OPTION_WINDOW_NAME, 0, spatial_hole_filling_max, on_trackbar_spatial_hole_filling)
228 | cv2.createTrackbar('Hole filling direction [0-2]', OPTION_WINDOW_NAME, 0, hole_filling_filter_max, on_trackbar_hole_filling)
229 | # Avoid unnecessary blank space in the displayed window
230 | cv2.resizeWindow(OPTION_WINDOW_NAME, 600, 100)
231 |
232 | last_time = time.time()
233 | while True:
234 | # This call waits until a new coherent set of frames is available on a device
235 | # Calls to get_frame_data(...) and get_frame_timestamp(...) on a device will return stable values until wait_for_frames(...) is called
236 | frames = pipeline.wait_for_frames()
237 | depth_frame = frames.get_depth_frame()
238 |
239 | if not depth_frame:
240 | continue
241 |
242 | # Apply the filters
243 | filtered_frame = depth_frame
244 | for i in range(len(filters)):
245 | if filters[i][0] is True:
246 | filtered_frame = filters[i][2].process(filtered_frame)
247 |
248 | # Show the processing speed
249 | processing_speed = 1/(time.time() - last_time)
250 | print("\r>> Processing speed %.2f fps" %(processing_speed), end='')
251 | last_time = time.time()
252 |
253 | # Prepare the images
254 | input_image = np.asanyarray(colorizer.colorize(depth_frame).get_data())
255 | output_image = np.asanyarray(colorizer.colorize(filtered_frame).get_data())
256 | display_image = np.hstack((input_image, cv2.resize(output_image, (WIDTH, HEIGHT))))
257 |
258 | # Put the fps in the corner of the image
259 | text = ("%0.2f" % (processing_speed,)) + ' fps'
260 | textsize = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, 1, 2)[0]
261 | cv2.putText(display_image,
262 | text,
263 | org = (int((display_image.shape[1] - textsize[0]/2)), int((textsize[1])/2)),
264 | fontFace = cv2.FONT_HERSHEY_SIMPLEX,
265 | fontScale = 0.5,
266 | thickness = 1,
267 | color = (255, 255, 255))
268 |
269 | # Show the images with the fps
270 | cv2.imshow(DISPLAY_WINDOW_NAME, display_image)
271 | cv2.waitKey(1)
272 |
273 | except KeyboardInterrupt:
274 | print('Keyboard interrupt. Closing the script...')
275 |
276 | except Exception as e:
277 | print(e)
278 | pass
--------------------------------------------------------------------------------
/scripts/rs_depth.py:
--------------------------------------------------------------------------------
1 | ######################################################
2 | ## Python implementation of rs-depth.c example ##
3 | ## https://github.com/IntelRealSense/librealsense/tree/master/examples/C/depth
4 | ######################################################
5 |
6 | # First import the library
7 | import sys
8 | import pyrealsense2 as rs # Intel RealSense cross-platform open-source API
9 | import time
10 | import numpy as np # fundamental package for scientific computing
11 | from numba import njit # pip install numba, or use anaconda to install
12 |
13 |
14 | ######################################################
15 | ## These parameters are reconfigurable ##
16 | ######################################################
17 | STREAM_TYPE = rs.stream.depth # rs2_stream is a types of data provided by RealSense device
18 | FORMAT = rs.format.z16 # rs2_format is identifies how binary data is encoded within a frame
19 | WIDTH = 640 # Defines the number of columns for each frame or zero for auto resolve
20 | HEIGHT = 480 # Defines the number of lines for each frame or zero for auto resolve
21 | FPS = 30 # Defines the rate of frames per second
22 | HEIGHT_RATIO = 20 # Defines the height ratio between the original frame to the new frame
23 | WIDTH_RATIO = 10 # Defines the width ratio between the original frame to the new frame
24 | MAX_DEPTH = 1 # Approximate the coverage of pixels within this range (meter)
25 | ROW_LENGTH = int(WIDTH / WIDTH_RATIO)
26 | pixels = " .:nhBXWW" # The text-based representation of depth
27 |
28 | depth_scale = 0
29 |
30 | ######################################################
31 | ## Functions ##
32 | ######################################################
33 |
34 | @njit
35 | def calculate_depth_txt_img(depth_mat):
36 | img_txt = ""
37 | coverage = [0] * ROW_LENGTH
38 | for y in range(HEIGHT):
39 | # Create a depth histogram for each row
40 | for x in range(WIDTH):
41 | # dist = depth_frame.get_distance(x, y) is simplier to implement, but calling `get_distance` excessively can result in bad performance (much slower), so it could be beneficial to read the `DEPTH_UNITS` option directly from the `depth_sensor` and use it to convert raw depth pixels to meters, which is what we do here
42 | dist = depth_mat[y,x] * depth_scale
43 | if 0 < dist and dist < MAX_DEPTH:
44 | coverage[x // WIDTH_RATIO] += 1
45 |
46 | if y % HEIGHT_RATIO is (HEIGHT_RATIO - 1):
47 | line = ""
48 | for c in coverage:
49 | pixel_index = c // int(HEIGHT_RATIO * WIDTH_RATIO / (len(pixels) - 1)) # Magic number: c // 25
50 | line += pixels[pixel_index]
51 | coverage = [0] * ROW_LENGTH
52 | img_txt += line + "\n"
53 |
54 | return img_txt
55 |
56 | ######################################################
57 | ## Main program starts here ##
58 | ######################################################
59 | try:
60 | # Create a context object. This object owns the handles to all connected realsense devices
61 | pipeline = rs.pipeline()
62 | config = rs.config()
63 | config.enable_stream(STREAM_TYPE, WIDTH, HEIGHT, FORMAT, FPS)
64 | profile = pipeline.start(config)
65 |
66 | # Getting the depth sensor's depth scale (see rs-align example for explanation)
67 | depth_sensor = profile.get_device().first_depth_sensor()
68 | depth_scale = depth_sensor.get_depth_scale()
69 | print("Depth scale is: ", depth_scale)
70 |
71 | last_time = time.time()
72 |
73 | while True:
74 | # This call waits until a new coherent set of frames is available on a device
75 | # Calls to get_frame_data(...) and get_frame_timestamp(...) on a device will return stable values until wait_for_frames(...) is called
76 | frames = pipeline.wait_for_frames()
77 | depth_frame = frames.get_depth_frame()
78 |
79 | if not depth_frame:
80 | continue
81 |
82 | depth_data = depth_frame.as_frame().get_data()
83 | depth_array = np.asanyarray(depth_data)
84 |
85 | # Print a simple text-based representation of the image, by breaking it into WIDTH_RATIO x HEIGHT_RATIO pixel regions and approximating the coverage of pixels within MAX_DEPTH_TRUE_SCALE
86 | img_txt = calculate_depth_txt_img(depth_array)
87 | print(img_txt)
88 |
89 | # Print some debugging messages
90 | processing_time = time.time() - last_time
91 | print("Text-based depth img within %.3f meter" % MAX_DEPTH)
92 | print("Processing time per image: %.3f sec" % processing_time)
93 | if processing_time > 0:
94 | print("Processing freq per image: %.3f Hz" % (1/processing_time))
95 | last_time = time.time()
96 |
97 | except Exception as e:
98 | print(e)
99 | pass
--------------------------------------------------------------------------------
/scripts/rs_list_info.py:
--------------------------------------------------------------------------------
1 | #####################################################
2 | ## librealsense streams test ##
3 | #####################################################
4 | # This assumes .so file is found on the same directory
5 | import pyrealsense2 as rs
6 |
7 | # Prettier prints for reverse-engineering
8 | from pprint import pprint
9 |
10 | # Get realsense pipeline handle
11 | pipe = rs.pipeline()
12 |
13 | # Print all connected devices and find the T265
14 | devices = rs.context().devices
15 | for i in range(len(devices)):
16 | print('---------------------------')
17 | # Other fields of camera_info: https://intelrealsense.github.io/librealsense/python_docs/_generated/pyrealsense2.camera_info.html
18 | print('Found connected device #', i + 1, ':', devices[i].get_info(rs.camera_info.name), ', serial no: ', devices[i].get_info(rs.camera_info.serial_number))
19 | print('Available streams for this device:')
20 | pprint(dir(rs.stream))
--------------------------------------------------------------------------------
/scripts/rs_to_mavlink.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | ######################################################
4 | ## Running these two scripts simultaneously: ##
5 | ## - t265_to_mavlink.py ##
6 | ## - d4xx_to_mavlink.py ##
7 | ######################################################
8 |
9 | # Install required packages:
10 | # pip3 install MAVProxy
11 |
12 | import os
13 | import threading
14 |
15 | connection_in_port = "/dev/ttyUSB0"
16 | connection_in_baud = "921600"
17 | connection_out_p01 = "127.0.0.1:14550" # T265
18 | connection_out_p02 = "127.0.0.1:14560" # D4xx
19 | connection_out_p03 = "127.0.0.1:14570" # Control (GUIDED)
20 |
21 | def mavproxy_create_connection():
22 | os.system("mavproxy.py" + \
23 | " --master=" + connection_in_port + \
24 | " --baudrate=" + connection_in_baud + \
25 | " --out udp:" + connection_out_p01 + \
26 | " --out udp:" + connection_out_p02 + \
27 | " --out udp:" + connection_out_p03)
28 |
29 | def run_t265():
30 | os.system("python3 t265_to_mavlink.py --connect=" + connection_out_p01)
31 |
32 | def run_d4xx():
33 | os.system("python3 d4xx_to_mavlink.py --connect=" + connection_out_p02)
34 |
35 | def run_control():
36 | os.system("python3 mavlink_control.py --connect=" + connection_out_p03)
37 |
38 | thread1 = threading.Thread(target=mavproxy_create_connection)
39 | thread1.start()
40 |
41 | thread2 = threading.Thread(target=run_t265)
42 | thread2.start()
43 |
44 | thread3 = threading.Thread(target=run_d4xx)
45 | thread3.start()
46 |
47 | # thread4 = threading.Thread(target=run_control)
48 | # thread4.start()
--------------------------------------------------------------------------------
/scripts/set_origin.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | ##
4 | #
5 | # Send SET_GPS_GLOBAL_ORIGIN and SET_HOME_POSITION messages
6 | #
7 | ##
8 |
9 | import rospy
10 | from pymavlink.dialects.v10 import ardupilotmega as MAV_APM
11 | from mavros.mavlink import convert_to_rosmsg
12 | from mavros_msgs.msg import Mavlink
13 |
14 | # Global position of the origin
15 | lat = 425633500 # Terni
16 | lon = 126432900 # Terni
17 | alt = 163000
18 |
19 | class fifo(object):
20 | """ A simple buffer """
21 | def __init__(self):
22 | self.buf = []
23 | def write(self, data):
24 | self.buf += data
25 | return len(data)
26 | def read(self):
27 | return self.buf.pop(0)
28 |
29 | def send_message(msg, mav, pub):
30 | """
31 | Send a mavlink message
32 | """
33 | msg.pack(mav)
34 | rosmsg = convert_to_rosmsg(msg)
35 | pub.publish(rosmsg)
36 |
37 | print("sent message %s" % msg)
38 |
39 | def set_global_origin(mav, pub):
40 | """
41 | Send a mavlink SET_GPS_GLOBAL_ORIGIN message, which allows us
42 | to use local position information without a GPS.
43 | """
44 | target_system = mav.srcSystem
45 | #target_system = 0 # 0 --> broadcast to everyone
46 | lattitude = lat
47 | longitude = lon
48 | altitude = alt
49 |
50 | msg = MAV_APM.MAVLink_set_gps_global_origin_message(
51 | target_system,
52 | lattitude,
53 | longitude,
54 | altitude)
55 |
56 | send_message(msg, mav, pub)
57 |
58 | def set_home_position(mav, pub):
59 | """
60 | Send a mavlink SET_HOME_POSITION message, which should allow
61 | us to use local position information without a GPS
62 | """
63 | target_system = mav.srcSystem
64 | #target_system = 0 # broadcast to everyone
65 |
66 | lattitude = lat
67 | longitude = lon
68 | altitude = alt
69 |
70 | x = 0
71 | y = 0
72 | z = 0
73 | q = [1, 0, 0, 0] # w x y z
74 |
75 | approach_x = 0
76 | approach_y = 0
77 | approach_z = 1
78 |
79 | msg = MAV_APM.MAVLink_set_home_position_message(
80 | target_system,
81 | lattitude,
82 | longitude,
83 | altitude,
84 | x,
85 | y,
86 | z,
87 | q,
88 | approach_x,
89 | approach_y,
90 | approach_z)
91 |
92 | send_message(msg, mav, pub)
93 |
94 | if __name__=="__main__":
95 | try:
96 | rospy.init_node("origin_publisher")
97 | mavlink_pub = rospy.Publisher("/mavlink/to", Mavlink, queue_size=20)
98 |
99 | # Set up mavlink instance
100 | f = fifo()
101 | mav = MAV_APM.MAVLink(f, srcSystem=1, srcComponent=1)
102 |
103 | # wait to initialize
104 | while mavlink_pub.get_num_connections() <= 0:
105 | pass
106 |
107 | for _ in range(2):
108 | rospy.sleep(1)
109 | set_global_origin(mav, mavlink_pub)
110 | set_home_position(mav, mavlink_pub)
111 | except rospy.ROSInterruptException:
112 | pass
113 |
114 |
--------------------------------------------------------------------------------
/scripts/t265.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This is /usr/bin/your_config-start.sh
3 | # # do all your commands here... script terminates when all is done.
4 |
5 | # Change the path to t265_to_mavlink.py
6 | /home/ubuntu/catkin_ws/src/vision_to_mavros/scripts/t265_to_mavlink.py
--------------------------------------------------------------------------------
/scripts/t265_test_streams.py:
--------------------------------------------------------------------------------
1 | #####################################################
2 | ## librealsense T265 streams test ##
3 | #####################################################
4 | # This assumes .so file is found on the same directory
5 | import pyrealsense2 as rs
6 |
7 | # Prettier prints for reverse-engineering
8 | from pprint import pprint
9 | import numpy as np
10 |
11 | # Get realsense pipeline handle
12 | pipe = rs.pipeline()
13 |
14 | # Print all connected devices and find the T265
15 | devices = rs.context().devices
16 | for i in range(len(devices)):
17 | print('Found device:', devices[i].get_info(rs.camera_info.name), ', with serial number: ', devices[i].get_info(rs.camera_info.serial_number))
18 |
19 | # Configure the pipeline
20 | cfg = rs.config()
21 |
22 | # Prints a list of available streams, not all are supported by each device
23 | print('Available streams:')
24 | pprint(dir(rs.stream))
25 |
26 | # Enable streams you are interested in
27 | cfg.enable_stream(rs.stream.pose) # Positional data (translation, rotation, velocity etc)
28 | cfg.enable_stream(rs.stream.fisheye, 1) # Left camera
29 | cfg.enable_stream(rs.stream.fisheye, 2) # Right camera
30 |
31 | # Start the configured pipeline
32 | pipe.start(cfg)
33 |
34 | try:
35 | while(1):
36 | frames = pipe.wait_for_frames()
37 |
38 | # Left fisheye camera frame
39 | left = frames.get_fisheye_frame(1)
40 | left_data = np.asanyarray(left.get_data())
41 |
42 | # Right fisheye camera frame
43 | right = frames.get_fisheye_frame(2)
44 | right_data = np.asanyarray(right.get_data())
45 |
46 | print('Left frame', left_data.shape)
47 | print('Right frame', right_data.shape)
48 |
49 | # Positional data frame
50 | pose = frames.get_pose_frame()
51 | if pose:
52 | pose_data = pose.get_pose_data()
53 | print("\nFrame number: %5.0f" % (pose.frame_number))
54 | print("Position xyz: % 2.4f % 2.4f % 2.4f" % (pose_data.translation.x, pose_data.translation.y, pose_data.translation.z))
55 | print("Velocity xyz: % 2.4f % 2.4f % 2.4f" % (pose_data.velocity.x, pose_data.velocity.y, pose_data.velocity.z))
56 | print("Accelera xyz: % 2.4f % 2.4f % 2.4f" % (pose_data.acceleration.x, pose_data.acceleration.y, pose_data.acceleration.z))
57 | print("Quatern xyzw: % 2.4f % 2.4f % 2.4f % 2.4f" % (pose_data.rotation.x, pose_data.rotation.y, pose_data.rotation.z, pose_data.rotation.w))
58 | finally:
59 | pipe.stop()
60 |
--------------------------------------------------------------------------------
/scripts/t265_to_mavlink.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | #####################################################
4 | ## librealsense T265 to MAVLink ##
5 | #####################################################
6 | # This script assumes pyrealsense2.[].so file is found under the same directory as this script
7 | # Install required packages:
8 | # pip3 install pyrealsense2
9 | # pip3 install transformations
10 | # pip3 install pymavlink
11 | # pip3 install apscheduler
12 | # pip3 install pyserial
13 |
14 | # Set the path for IDLE
15 | import sys
16 | sys.path.append("/usr/local/lib/")
17 |
18 | # Set MAVLink protocol to 2.
19 | import os
20 | os.environ["MAVLINK20"] = "1"
21 |
22 | # Import the libraries
23 | import pyrealsense2 as rs
24 | import numpy as np
25 | import transformations as tf
26 | import math as m
27 | import time
28 | import argparse
29 | import threading
30 | import signal
31 |
32 | from time import sleep
33 | from apscheduler.schedulers.background import BackgroundScheduler
34 | from pymavlink import mavutil
35 |
36 |
37 | # Replacement of the standard print() function to flush the output
38 | def progress(string):
39 | print(string, file=sys.stdout)
40 | sys.stdout.flush()
41 |
42 |
43 | #######################################
44 | # Parameters
45 | #######################################
46 |
47 | # Default configurations for connection to the FCU
48 | connection_string_default = '/dev/ttyUSB0'
49 | connection_baudrate_default = 921600
50 | connection_timeout_sec_default = 5
51 |
52 | # Transformation to convert different camera orientations to NED convention. Replace camera_orientation_default for your configuration.
53 | # 0: Forward, USB port to the right
54 | # 1: Downfacing, USB port to the right
55 | # 2: Forward, 45 degree tilted down, USB port to the right
56 | # 3: Downfacing, USB port to the back
57 | # Important note for downfacing camera: you need to tilt the vehicle's nose up a little - not flat - before you run the script, otherwise the initial yaw will be randomized, read here for more details: https://github.com/IntelRealSense/librealsense/issues/4080. Tilt the vehicle to any other sides and the yaw might not be as stable.
58 | camera_orientation_default = 0
59 |
60 | # https://mavlink.io/en/messages/common.html#VISION_POSITION_ESTIMATE
61 | enable_msg_vision_position_estimate = True
62 | vision_position_estimate_msg_hz_default = 30.0
63 |
64 | # https://mavlink.io/en/messages/ardupilotmega.html#VISION_POSITION_DELTA
65 | enable_msg_vision_position_delta = False
66 | vision_position_delta_msg_hz_default = 30.0
67 |
68 | # https://mavlink.io/en/messages/common.html#VISION_SPEED_ESTIMATE
69 | enable_msg_vision_speed_estimate = True
70 | vision_speed_estimate_msg_hz_default = 30.0
71 |
72 | # https://mavlink.io/en/messages/common.html#STATUSTEXT
73 | enable_update_tracking_confidence_to_gcs = True
74 | update_tracking_confidence_to_gcs_hz_default = 1.0
75 |
76 | # Monitor user's online input via keyboard, can only be used when runs from terminal
77 | enable_user_keyboard_input = False
78 |
79 | # Default global position for EKF home/ origin
80 | enable_auto_set_ekf_home = False
81 | home_lat = 151269321 # Somewhere random
82 | home_lon = 16624301 # Somewhere random
83 | home_alt = 163000 # Somewhere random
84 |
85 | # TODO: Taken care of by ArduPilot, so can be removed (once the handling on AP side is confirmed stable)
86 | # In NED frame, offset from the IMU or the center of gravity to the camera's origin point
87 | body_offset_enabled = 0
88 | body_offset_x = 0 # In meters (m)
89 | body_offset_y = 0 # In meters (m)
90 | body_offset_z = 0 # In meters (m)
91 |
92 | # Global scale factor, position x y z will be scaled up/down by this factor
93 | scale_factor = 1.0
94 |
95 | # Enable using yaw from compass to align north (zero degree is facing north)
96 | compass_enabled = 0
97 |
98 | # pose data confidence: 0x0 - Failed / 0x1 - Low / 0x2 - Medium / 0x3 - High
99 | pose_data_confidence_level = ('FAILED', 'Low', 'Medium', 'High')
100 |
101 | # lock for thread synchronization
102 | lock = threading.Lock()
103 | mavlink_thread_should_exit = False
104 |
105 | # default exit code is failure - a graceful termination with a
106 | # terminate signal is possible.
107 | exit_code = 1
108 |
109 |
110 | #######################################
111 | # Global variables
112 | #######################################
113 |
114 | # FCU connection variables
115 |
116 | # Camera-related variables
117 | pipe = None
118 | pose_sensor = None
119 | linear_accel_cov = 0.01
120 | angular_vel_cov = 0.01
121 |
122 | # Data variables
123 | data = None
124 | prev_data = None
125 | H_aeroRef_aeroBody = None
126 | V_aeroRef_aeroBody = None
127 | heading_north_yaw = None
128 | current_confidence_level = None
129 | current_time_us = 0
130 |
131 | # Increment everytime pose_jumping or relocalization happens
132 | # See here: https://github.com/IntelRealSense/librealsense/blob/master/doc/t265.md#are-there-any-t265-specific-options
133 | # For AP, a non-zero "reset_counter" would mean that we could be sure that the user's setup was using mavlink2
134 | reset_counter = 1
135 |
136 | #######################################
137 | # Parsing user' inputs
138 | #######################################
139 |
140 | parser = argparse.ArgumentParser(description='Reboots vehicle')
141 | parser.add_argument('--connect',
142 | help="Vehicle connection target string. If not specified, a default string will be used.")
143 | parser.add_argument('--baudrate', type=float,
144 | help="Vehicle connection baudrate. If not specified, a default value will be used.")
145 | parser.add_argument('--vision_position_estimate_msg_hz', type=float,
146 | help="Update frequency for VISION_POSITION_ESTIMATE message. If not specified, a default value will be used.")
147 | parser.add_argument('--vision_position_delta_msg_hz', type=float,
148 | help="Update frequency for VISION_POSITION_DELTA message. If not specified, a default value will be used.")
149 | parser.add_argument('--vision_speed_estimate_msg_hz', type=float,
150 | help="Update frequency for VISION_SPEED_DELTA message. If not specified, a default value will be used.")
151 | parser.add_argument('--scale_calib_enable', default=False, action='store_true',
152 | help="Scale calibration. Only run while NOT in flight")
153 | parser.add_argument('--camera_orientation', type=int,
154 | help="Configuration for camera orientation. Currently supported: forward, usb port to the right - 0; downward, usb port to the right - 1, 2: forward tilted down 45deg")
155 | parser.add_argument('--debug_enable',type=int,
156 | help="Enable debug messages on terminal")
157 |
158 | args = parser.parse_args()
159 |
160 | connection_string = args.connect
161 | connection_baudrate = args.baudrate
162 | vision_position_estimate_msg_hz = args.vision_position_estimate_msg_hz
163 | vision_position_delta_msg_hz = args.vision_position_delta_msg_hz
164 | vision_speed_estimate_msg_hz = args.vision_speed_estimate_msg_hz
165 | scale_calib_enable = args.scale_calib_enable
166 | camera_orientation = args.camera_orientation
167 | debug_enable = args.debug_enable
168 |
169 | # Using default values if no specified inputs
170 | if not connection_string:
171 | connection_string = connection_string_default
172 | progress("INFO: Using default connection_string %s" % connection_string)
173 | else:
174 | progress("INFO: Using connection_string %s" % connection_string)
175 |
176 | if not connection_baudrate:
177 | connection_baudrate = connection_baudrate_default
178 | progress("INFO: Using default connection_baudrate %s" % connection_baudrate)
179 | else:
180 | progress("INFO: Using connection_baudrate %s" % connection_baudrate)
181 |
182 | if not vision_position_estimate_msg_hz:
183 | vision_position_estimate_msg_hz = vision_position_estimate_msg_hz_default
184 | progress("INFO: Using default vision_position_estimate_msg_hz %s" % vision_position_estimate_msg_hz)
185 | else:
186 | progress("INFO: Using vision_position_estimate_msg_hz %s" % vision_position_estimate_msg_hz)
187 |
188 | if not vision_position_delta_msg_hz:
189 | vision_position_delta_msg_hz = vision_position_delta_msg_hz_default
190 | progress("INFO: Using default vision_position_delta_msg_hz %s" % vision_position_delta_msg_hz)
191 | else:
192 | progress("INFO: Using vision_position_delta_msg_hz %s" % vision_position_delta_msg_hz)
193 |
194 | if not vision_speed_estimate_msg_hz:
195 | vision_speed_estimate_msg_hz = vision_speed_estimate_msg_hz_default
196 | progress("INFO: Using default vision_speed_estimate_msg_hz %s" % vision_speed_estimate_msg_hz)
197 | else:
198 | progress("INFO: Using vision_speed_estimate_msg_hz %s" % vision_speed_estimate_msg_hz)
199 |
200 | if body_offset_enabled == 1:
201 | progress("INFO: Using camera position offset: Enabled, x y z is %s %s %s" % (body_offset_x, body_offset_y, body_offset_z))
202 | else:
203 | progress("INFO: Using camera position offset: Disabled")
204 |
205 | if compass_enabled == 1:
206 | progress("INFO: Using compass: Enabled. Heading will be aligned to north.")
207 | else:
208 | progress("INFO: Using compass: Disabled")
209 |
210 | if scale_calib_enable == True:
211 | progress("\nINFO: SCALE CALIBRATION PROCESS. DO NOT RUN DURING FLIGHT.\nINFO: TYPE IN NEW SCALE IN FLOATING POINT FORMAT\n")
212 | else:
213 | if scale_factor == 1.0:
214 | progress("INFO: Using default scale factor %s" % scale_factor)
215 | else:
216 | progress("INFO: Using scale factor %s" % scale_factor)
217 |
218 | if not camera_orientation:
219 | camera_orientation = camera_orientation_default
220 | progress("INFO: Using default camera orientation %s" % camera_orientation)
221 | else:
222 | progress("INFO: Using camera orientation %s" % camera_orientation)
223 |
224 | if camera_orientation == 0: # Forward, USB port to the right
225 | H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
226 | H_T265body_aeroBody = np.linalg.inv(H_aeroRef_T265Ref)
227 | elif camera_orientation == 1: # Downfacing, USB port to the right
228 | H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
229 | H_T265body_aeroBody = np.array([[0,1,0,0],[1,0,0,0],[0,0,-1,0],[0,0,0,1]])
230 | elif camera_orientation == 2: # 45degree forward, USB port to the right
231 | H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
232 | H_T265body_aeroBody = (tf.euler_matrix(m.pi/4, 0, 0)).dot(np.linalg.inv(H_aeroRef_T265Ref))
233 | elif camera_orientation == 3: # Downfacing, USB port to the back
234 | H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
235 | H_T265body_aeroBody = np.array([[-1,0,0,0],[0,1,0,0],[0,0,-1,0],[0,0,0,1]])
236 | else: # Default is facing forward, USB port to the right
237 | H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
238 | H_T265body_aeroBody = np.linalg.inv(H_aeroRef_T265Ref)
239 |
240 | if not debug_enable:
241 | debug_enable = 0
242 | else:
243 | debug_enable = 1
244 | np.set_printoptions(precision=4, suppress=True) # Format output on terminal
245 | progress("INFO: Debug messages enabled.")
246 |
247 |
248 | #######################################
249 | # Functions - MAVLink
250 | #######################################
251 |
252 | def mavlink_loop(conn, callbacks):
253 | '''a main routine for a thread; reads data from a mavlink connection,
254 | calling callbacks based on message type received.
255 | '''
256 | interesting_messages = list(callbacks.keys())
257 | while not mavlink_thread_should_exit:
258 | # send a heartbeat msg
259 | conn.mav.heartbeat_send(mavutil.mavlink.MAV_TYPE_ONBOARD_CONTROLLER,
260 | mavutil.mavlink.MAV_AUTOPILOT_GENERIC,
261 | 0,
262 | 0,
263 | 0)
264 | m = conn.recv_match(type=interesting_messages, timeout=1, blocking=True)
265 | if m is None:
266 | continue
267 | callbacks[m.get_type()](m)
268 |
269 | # https://mavlink.io/en/messages/common.html#VISION_POSITION_ESTIMATE
270 | def send_vision_position_estimate_message():
271 | global current_time_us, H_aeroRef_aeroBody, reset_counter
272 | with lock:
273 | if H_aeroRef_aeroBody is not None:
274 | # Setup angle data
275 | rpy_rad = np.array( tf.euler_from_matrix(H_aeroRef_aeroBody, 'sxyz'))
276 |
277 | # Setup covariance data, which is the upper right triangle of the covariance matrix, see here: https://files.gitter.im/ArduPilot/VisionProjects/1DpU/image.png
278 | # Attemp #01: following this formula https://github.com/IntelRealSense/realsense-ros/blob/development/realsense2_camera/src/base_realsense_node.cpp#L1406-L1411
279 | cov_pose = linear_accel_cov * pow(10, 3 - int(data.tracker_confidence))
280 | cov_twist = angular_vel_cov * pow(10, 1 - int(data.tracker_confidence))
281 | covariance = np.array([cov_pose, 0, 0, 0, 0, 0,
282 | cov_pose, 0, 0, 0, 0,
283 | cov_pose, 0, 0, 0,
284 | cov_twist, 0, 0,
285 | cov_twist, 0,
286 | cov_twist])
287 |
288 | # Send the message
289 | conn.mav.vision_position_estimate_send(
290 | current_time_us, # us Timestamp (UNIX time or time since system boot)
291 | H_aeroRef_aeroBody[0][3], # Global X position
292 | H_aeroRef_aeroBody[1][3], # Global Y position
293 | H_aeroRef_aeroBody[2][3], # Global Z position
294 | rpy_rad[0], # Roll angle
295 | rpy_rad[1], # Pitch angle
296 | rpy_rad[2], # Yaw angle
297 | covariance, # Row-major representation of pose 6x6 cross-covariance matrix
298 | reset_counter # Estimate reset counter. Increment every time pose estimate jumps.
299 | )
300 |
301 | # https://mavlink.io/en/messages/ardupilotmega.html#VISION_POSITION_DELTA
302 | def send_vision_position_delta_message():
303 | global current_time_us, current_confidence_level, H_aeroRef_aeroBody
304 | with lock:
305 | if H_aeroRef_aeroBody is not None:
306 | # Calculate the deltas in position, attitude and time from the previous to current orientation
307 | H_aeroRef_PrevAeroBody = send_vision_position_delta_message.H_aeroRef_PrevAeroBody
308 | H_PrevAeroBody_CurrAeroBody = (np.linalg.inv(H_aeroRef_PrevAeroBody)).dot(H_aeroRef_aeroBody)
309 |
310 | delta_time_us = current_time_us - send_vision_position_delta_message.prev_time_us
311 | delta_position_m = [H_PrevAeroBody_CurrAeroBody[0][3], H_PrevAeroBody_CurrAeroBody[1][3], H_PrevAeroBody_CurrAeroBody[2][3]]
312 | delta_angle_rad = np.array( tf.euler_from_matrix(H_PrevAeroBody_CurrAeroBody, 'sxyz'))
313 |
314 | # Send the message
315 | conn.mav.vision_position_delta_send(
316 | current_time_us, # us: Timestamp (UNIX time or time since system boot)
317 | delta_time_us, # us: Time since last reported camera frame
318 | delta_angle_rad, # float[3] in radian: Defines a rotation vector in body frame that rotates the vehicle from the previous to the current orientation
319 | delta_position_m, # float[3] in m: Change in position from previous to current frame rotated into body frame (0=forward, 1=right, 2=down)
320 | current_confidence_level # Normalized confidence value from 0 to 100.
321 | )
322 |
323 | # Save static variables
324 | send_vision_position_delta_message.H_aeroRef_PrevAeroBody = H_aeroRef_aeroBody
325 | send_vision_position_delta_message.prev_time_us = current_time_us
326 |
327 | # https://mavlink.io/en/messages/common.html#VISION_SPEED_ESTIMATE
328 | def send_vision_speed_estimate_message():
329 | global current_time_us, V_aeroRef_aeroBody, reset_counter
330 | with lock:
331 | if V_aeroRef_aeroBody is not None:
332 |
333 | # Attemp #01: following this formula https://github.com/IntelRealSense/realsense-ros/blob/development/realsense2_camera/src/base_realsense_node.cpp#L1406-L1411
334 | cov_pose = linear_accel_cov * pow(10, 3 - int(data.tracker_confidence))
335 | covariance = np.array([cov_pose, 0, 0,
336 | 0, cov_pose, 0,
337 | 0, 0, cov_pose])
338 |
339 | # Send the message
340 | conn.mav.vision_speed_estimate_send(
341 | current_time_us, # us Timestamp (UNIX time or time since system boot)
342 | V_aeroRef_aeroBody[0][3], # Global X speed
343 | V_aeroRef_aeroBody[1][3], # Global Y speed
344 | V_aeroRef_aeroBody[2][3], # Global Z speed
345 | covariance, # covariance
346 | reset_counter # Estimate reset counter. Increment every time pose estimate jumps.
347 | )
348 |
349 | # Update the changes of confidence level on GCS and terminal
350 | def update_tracking_confidence_to_gcs():
351 | if data is not None and update_tracking_confidence_to_gcs.prev_confidence_level != data.tracker_confidence:
352 | confidence_status_string = 'Tracking confidence: ' + pose_data_confidence_level[data.tracker_confidence]
353 | send_msg_to_gcs(confidence_status_string)
354 | update_tracking_confidence_to_gcs.prev_confidence_level = data.tracker_confidence
355 |
356 | # https://mavlink.io/en/messages/common.html#STATUSTEXT
357 | def send_msg_to_gcs(text_to_be_sent):
358 | # MAV_SEVERITY: 0=EMERGENCY 1=ALERT 2=CRITICAL 3=ERROR, 4=WARNING, 5=NOTICE, 6=INFO, 7=DEBUG, 8=ENUM_END
359 | text_msg = 'T265: ' + text_to_be_sent
360 | conn.mav.statustext_send(mavutil.mavlink.MAV_SEVERITY_INFO, text_msg.encode())
361 | progress("INFO: %s" % text_to_be_sent)
362 |
363 |
364 | # Send a mavlink SET_GPS_GLOBAL_ORIGIN message (http://mavlink.org/messages/common#SET_GPS_GLOBAL_ORIGIN), which allows us to use local position information without a GPS.
365 | def set_default_global_origin():
366 | conn.mav.set_gps_global_origin_send(
367 | 1,
368 | home_lat,
369 | home_lon,
370 | home_alt
371 | )
372 |
373 | # Send a mavlink SET_HOME_POSITION message (http://mavlink.org/messages/common#SET_HOME_POSITION), which allows us to use local position information without a GPS.
374 | def set_default_home_position():
375 | x = 0
376 | y = 0
377 | z = 0
378 | q = [1, 0, 0, 0] # w x y z
379 |
380 | approach_x = 0
381 | approach_y = 0
382 | approach_z = 1
383 |
384 | conn.mav.set_home_position_send(
385 | 1,
386 | home_lat,
387 | home_lon,
388 | home_alt,
389 | x,
390 | y,
391 | z,
392 | q,
393 | approach_x,
394 | approach_y,
395 | approach_z
396 | )
397 |
398 |
399 | # Request a timesync update from the flight controller, for future work.
400 | # TODO: Inspect the usage of timesync_update
401 | def update_timesync(ts=0, tc=0):
402 | if ts == 0:
403 | ts = int(round(time.time() * 1000))
404 | conn.mav.timesync_send(
405 | tc, # tc1
406 | ts # ts1
407 | )
408 |
409 | # Listen to attitude data to acquire heading when compass data is enabled
410 | def att_msg_callback(value):
411 | global heading_north_yaw
412 | if heading_north_yaw is None:
413 | heading_north_yaw = value.yaw
414 | progress("INFO: Received first ATTITUDE message with heading yaw %.2f degrees" % m.degrees(heading_north_yaw))
415 |
416 | #######################################
417 | # Functions - T265
418 | #######################################
419 |
420 | def increment_reset_counter():
421 | global reset_counter
422 | if reset_counter >= 255:
423 | reset_counter = 1
424 | reset_counter += 1
425 |
426 | # List of notification events: https://github.com/IntelRealSense/librealsense/blob/development/include/librealsense2/h/rs_types.h
427 | # List of notification API: https://github.com/IntelRealSense/librealsense/blob/development/common/notifications.cpp
428 | def realsense_notification_callback(notif):
429 | progress("INFO: T265 event: " + notif)
430 | if notif.get_category() is rs.notification_category.pose_relocalization:
431 | increment_reset_counter()
432 | send_msg_to_gcs('Relocalization detected')
433 |
434 | def realsense_connect():
435 | global pipe, pose_sensor
436 |
437 | # Declare RealSense pipeline, encapsulating the actual device and sensors
438 | pipe = rs.pipeline()
439 |
440 | # Build config object before requesting data
441 | cfg = rs.config()
442 |
443 | # Enable the stream we are interested in
444 | cfg.enable_stream(rs.stream.pose) # Positional data
445 |
446 | # Configure callback for relocalization event
447 | device = cfg.resolve(pipe).get_device()
448 | pose_sensor = device.first_pose_sensor()
449 | pose_sensor.set_notifications_callback(realsense_notification_callback)
450 |
451 | # Start streaming with requested config
452 | pipe.start(cfg)
453 |
454 | #######################################
455 | # Functions - Miscellaneous
456 | #######################################
457 |
458 | # Monitor user input from the terminal and perform action accordingly
459 | def user_input_monitor():
460 | global scale_factor
461 | while True:
462 | # Special case: updating scale
463 | if scale_calib_enable == True:
464 | scale_factor = float(input("INFO: Type in new scale as float number\n"))
465 | progress("INFO: New scale is %s" % scale_factor)
466 |
467 | if enable_auto_set_ekf_home:
468 | send_msg_to_gcs('Set EKF home with default GPS location')
469 | set_default_global_origin()
470 | set_default_home_position()
471 | time.sleep(1) # Wait a short while for FCU to start working
472 |
473 | # Add new action here according to the key pressed.
474 | # Enter: Set EKF home when user press enter
475 | try:
476 | c = input()
477 | if c == "":
478 | send_msg_to_gcs('Set EKF home with default GPS location')
479 | set_default_global_origin()
480 | set_default_home_position()
481 | else:
482 | progress("Got keyboard input %s" % c)
483 | except IOError: pass
484 |
485 |
486 | #######################################
487 | # Main code starts here
488 | #######################################
489 |
490 | try:
491 | progress("INFO: pyrealsense2 version: %s" % str(rs.__version__))
492 | except Exception:
493 | # fail silently
494 | pass
495 |
496 | progress("INFO: Starting Vehicle communications")
497 | conn = mavutil.mavlink_connection(
498 | connection_string,
499 | autoreconnect = True,
500 | source_system = 1,
501 | source_component = 93,
502 | baud=connection_baudrate,
503 | force_connected=True,
504 | )
505 |
506 | mavlink_callbacks = {
507 | 'ATTITUDE': att_msg_callback,
508 | }
509 |
510 | mavlink_thread = threading.Thread(target=mavlink_loop, args=(conn, mavlink_callbacks))
511 | mavlink_thread.start()
512 |
513 | # connecting and configuring the camera is a little hit-and-miss.
514 | # Start a timer and rely on a restart of the script to get it working.
515 | # Configuring the camera appears to block all threads, so we can't do
516 | # this internally.
517 |
518 | # send_msg_to_gcs('Setting timer...')
519 | signal.setitimer(signal.ITIMER_REAL, 5) # seconds...
520 |
521 | send_msg_to_gcs('Connecting to camera...')
522 | realsense_connect()
523 | send_msg_to_gcs('Camera connected.')
524 |
525 | signal.setitimer(signal.ITIMER_REAL, 0) # cancel alarm
526 |
527 | # Send MAVlink messages in the background at pre-determined frequencies
528 | sched = BackgroundScheduler()
529 |
530 | if enable_msg_vision_position_estimate:
531 | sched.add_job(send_vision_position_estimate_message, 'interval', seconds = 1/vision_position_estimate_msg_hz)
532 |
533 | if enable_msg_vision_position_delta:
534 | sched.add_job(send_vision_position_delta_message, 'interval', seconds = 1/vision_position_delta_msg_hz)
535 | send_vision_position_delta_message.H_aeroRef_PrevAeroBody = tf.quaternion_matrix([1,0,0,0])
536 | send_vision_position_delta_message.prev_time_us = int(round(time.time() * 1000000))
537 |
538 | if enable_msg_vision_speed_estimate:
539 | sched.add_job(send_vision_speed_estimate_message, 'interval', seconds = 1/vision_speed_estimate_msg_hz)
540 |
541 | if enable_update_tracking_confidence_to_gcs:
542 | sched.add_job(update_tracking_confidence_to_gcs, 'interval', seconds = 1/update_tracking_confidence_to_gcs_hz_default)
543 | update_tracking_confidence_to_gcs.prev_confidence_level = -1
544 |
545 | # A separate thread to monitor user input
546 | if enable_user_keyboard_input:
547 | user_keyboard_input_thread = threading.Thread(target=user_input_monitor)
548 | user_keyboard_input_thread.daemon = True
549 | user_keyboard_input_thread.start()
550 | progress("INFO: Press Enter to set EKF home at default location")
551 |
552 | sched.start()
553 |
554 | # gracefully terminate the script if an interrupt signal (e.g. ctrl-c)
555 | # is received. This is considered to be abnormal termination.
556 | main_loop_should_quit = False
557 | def sigint_handler(sig, frame):
558 | global main_loop_should_quit
559 | main_loop_should_quit = True
560 | signal.signal(signal.SIGINT, sigint_handler)
561 |
562 | # gracefully terminate the script if a terminate signal is received
563 | # (e.g. kill -TERM).
564 | def sigterm_handler(sig, frame):
565 | global main_loop_should_quit
566 | main_loop_should_quit = True
567 | global exit_code
568 | exit_code = 0
569 |
570 | signal.signal(signal.SIGTERM, sigterm_handler)
571 |
572 | if compass_enabled == 1:
573 | time.sleep(1) # Wait a short while for yaw to be correctly initiated
574 |
575 | send_msg_to_gcs('Sending vision messages to FCU')
576 |
577 | try:
578 | while not main_loop_should_quit:
579 | # Wait for the next set of frames from the camera
580 | frames = pipe.wait_for_frames()
581 |
582 | # Fetch pose frame
583 | pose = frames.get_pose_frame()
584 |
585 | # Process data
586 | if pose:
587 | with lock:
588 | # Store the timestamp for MAVLink messages
589 | current_time_us = int(round(time.time() * 1000000))
590 |
591 | # Pose data consists of translation and rotation
592 | data = pose.get_pose_data()
593 |
594 | # Confidence level value from T265: 0-3, remapped to 0 - 100: 0% - Failed / 33.3% - Low / 66.6% - Medium / 100% - High
595 | current_confidence_level = float(data.tracker_confidence * 100 / 3)
596 |
597 | # In transformations, Quaternions w+ix+jy+kz are represented as [w, x, y, z]!
598 | H_T265Ref_T265body = tf.quaternion_matrix([data.rotation.w, data.rotation.x, data.rotation.y, data.rotation.z])
599 | H_T265Ref_T265body[0][3] = data.translation.x * scale_factor
600 | H_T265Ref_T265body[1][3] = data.translation.y * scale_factor
601 | H_T265Ref_T265body[2][3] = data.translation.z * scale_factor
602 |
603 | # Transform to aeronautic coordinates (body AND reference frame!)
604 | H_aeroRef_aeroBody = H_aeroRef_T265Ref.dot( H_T265Ref_T265body.dot( H_T265body_aeroBody))
605 |
606 | # Calculate GLOBAL XYZ speed (speed from T265 is already GLOBAL)
607 | V_aeroRef_aeroBody = tf.quaternion_matrix([1,0,0,0])
608 | V_aeroRef_aeroBody[0][3] = data.velocity.x
609 | V_aeroRef_aeroBody[1][3] = data.velocity.y
610 | V_aeroRef_aeroBody[2][3] = data.velocity.z
611 | V_aeroRef_aeroBody = H_aeroRef_T265Ref.dot(V_aeroRef_aeroBody)
612 |
613 | # Check for pose jump and increment reset_counter
614 | if prev_data != None:
615 | delta_translation = [data.translation.x - prev_data.translation.x, data.translation.y - prev_data.translation.y, data.translation.z - prev_data.translation.z]
616 | delta_velocity = [data.velocity.x - prev_data.velocity.x, data.velocity.y - prev_data.velocity.y, data.velocity.z - prev_data.velocity.z]
617 | position_displacement = np.linalg.norm(delta_translation)
618 | speed_delta = np.linalg.norm(delta_velocity)
619 |
620 | # Pose jump is indicated when position changes abruptly. The behavior is not well documented yet (as of librealsense 2.34.0)
621 | jump_threshold = 0.1 # in meters, from trials and errors, should be relative to how frequent is the position data obtained (200Hz for the T265)
622 | jump_speed_threshold = 20.0 # in m/s from trials and errors, should be relative to how frequent is the velocity data obtained (200Hz for the T265)
623 | if (position_displacement > jump_threshold) or (speed_delta > jump_speed_threshold):
624 | send_msg_to_gcs('VISO jump detected')
625 | if position_displacement > jump_threshold:
626 | progress("Position jumped by: %s" % position_displacement)
627 | elif speed_delta > jump_speed_threshold:
628 | progress("Speed jumped by: %s" % speed_delta)
629 | increment_reset_counter()
630 |
631 | prev_data = data
632 |
633 | # Take offsets from body's center of gravity (or IMU) to camera's origin into account
634 | if body_offset_enabled == 1:
635 | H_body_camera = tf.euler_matrix(0, 0, 0, 'sxyz')
636 | H_body_camera[0][3] = body_offset_x
637 | H_body_camera[1][3] = body_offset_y
638 | H_body_camera[2][3] = body_offset_z
639 | H_camera_body = np.linalg.inv(H_body_camera)
640 | H_aeroRef_aeroBody = H_body_camera.dot(H_aeroRef_aeroBody.dot(H_camera_body))
641 |
642 | # Realign heading to face north using initial compass data
643 | if compass_enabled == 1:
644 | H_aeroRef_aeroBody = H_aeroRef_aeroBody.dot( tf.euler_matrix(0, 0, heading_north_yaw, 'sxyz'))
645 |
646 | # Show debug messages here
647 | if debug_enable == 1:
648 | os.system('clear') # This helps in displaying the messages to be more readable
649 | progress("DEBUG: Raw RPY[deg]: {}".format( np.array( tf.euler_from_matrix( H_T265Ref_T265body, 'sxyz')) * 180 / m.pi))
650 | progress("DEBUG: NED RPY[deg]: {}".format( np.array( tf.euler_from_matrix( H_aeroRef_aeroBody, 'sxyz')) * 180 / m.pi))
651 | progress("DEBUG: Raw pos xyz : {}".format( np.array( [data.translation.x, data.translation.y, data.translation.z])))
652 | progress("DEBUG: NED pos xyz : {}".format( np.array( tf.translation_from_matrix( H_aeroRef_aeroBody))))
653 |
654 | except Exception as e:
655 | progress(e)
656 |
657 | except:
658 | send_msg_to_gcs('ERROR IN SCRIPT')
659 | progress("Unexpected error: %s" % sys.exc_info()[0])
660 |
661 | finally:
662 | progress('Closing the script...')
663 | # start a timer in case stopping everything nicely doesn't work.
664 | signal.setitimer(signal.ITIMER_REAL, 5) # seconds...
665 | pipe.stop()
666 | mavlink_thread_should_exit = True
667 | mavlink_thread.join()
668 | conn.close()
669 | progress("INFO: Realsense pipeline and vehicle object closed.")
670 | sys.exit(exit_code)
671 |
--------------------------------------------------------------------------------
/src/t265_fisheye_undistort.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 |
10 | using namespace cv;
11 | using namespace std;
12 |
13 | enum class CameraSide { LEFT, RIGHT };
14 |
15 | //////////////////////////////////////////////////
16 | // Declare all the calibration matrices as Mat variables.
17 | //////////////////////////////////////////////////
18 | Mat lmapx, lmapy, rmapx, rmapy;
19 |
20 | image_transport::Publisher pub_img_rect_left, pub_img_rect_right;
21 |
22 | sensor_msgs::CameraInfo output_camera_info_left, output_camera_info_right;
23 |
24 | ros::Publisher left_camera_info_output_pub, right_camera_info_output_pub;
25 |
26 | //////////////////////////////////////////////////
27 | // This function computes all the projection matrices and the rectification transformations
28 | // using the stereoRectify and initUndistortRectifyMap functions respectively.
29 | // See documentation for stereoRectify: https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#stereorectify
30 | //////////////////////////////////////////////////
31 | void init_rectification_map(string param_file_path)
32 | {
33 | Mat Q, P1, P2;
34 | Mat R1, R2, K1, K2, D1, D2, R;
35 | Vec3d T;
36 | Vec2d size_input, size_output;
37 |
38 | FileStorage param_file = FileStorage(param_file_path, FileStorage::READ);
39 |
40 | param_file["K1"] >> K1;
41 | param_file["D1"] >> D1;
42 | param_file["K2"] >> K2;
43 | param_file["D2"] >> D2;
44 | param_file["R"] >> R;
45 | param_file["T"] >> T;
46 | param_file["input"] >> size_input;
47 | param_file["output"] >> size_output;
48 |
49 | // The resolution of the input images used for stereo calibration.
50 | Size input_img_size(size_input[0], size_input[1]);
51 |
52 | // The resolution of the output rectified images. Lower resolution images require less computation time.
53 | Size output_img_size(size_output[0], size_output[1]);
54 | double alpha = 0.0;
55 |
56 | stereoRectify(K1, D1, K2, D2,
57 | input_img_size,
58 | R, T,
59 | R1, R2, P1, P2,
60 | Q,
61 | #if CV_VERSION_MAJOR == 3
62 | CALIB_ZERO_DISPARITY,
63 | #elif CV_VERSION_MAJOR == 4
64 | cv::CALIB_ZERO_DISPARITY,
65 | #endif
66 | alpha,
67 | output_img_size);
68 |
69 | fisheye::initUndistortRectifyMap(K1, D1, R1, P1, output_img_size, CV_32FC1, lmapx, lmapy);
70 | fisheye::initUndistortRectifyMap(K2, D2, R2, P2, output_img_size, CV_32FC1, rmapx, rmapy);
71 |
72 | // Copy the parameters for rectified images to the camera_info messages
73 | output_camera_info_left.width = size_output[0];
74 | output_camera_info_left.height = size_output[1];
75 | output_camera_info_left.D = vector(5, 0);
76 |
77 | output_camera_info_right.width = size_output[0];
78 | output_camera_info_right.height = size_output[1];
79 | output_camera_info_right.D = vector(5, 0);
80 |
81 | for (int i = 0; i < 9; i++)
82 | {
83 | output_camera_info_left.K[i] = K1.at(i);
84 | output_camera_info_right.K[i] = K2.at(i);
85 | output_camera_info_left.R[i] = R1.at(i);
86 | output_camera_info_right.R[i] = R2.at(i);
87 | }
88 | for (int i = 0; i < 12; i++)
89 | {
90 | output_camera_info_left.P[i] = P1.at(i);
91 | output_camera_info_right.P[i] = P2.at(i);
92 | }
93 |
94 | ROS_INFO("Initialization complete. Publishing rectified images and camera_info when raw images arrive...");
95 | }
96 |
97 | //////////////////////////////////////////////////
98 | // This function undistorts and rectifies the src image into dst.
99 | // The homographic mappings lmapx, lmapy, rmapx, and rmapy are found from OpenCV’s initUndistortRectifyMap function.
100 | //////////////////////////////////////////////////
101 | void undistort_rectify_image(Mat& src, Mat& dst, const CameraSide& side)
102 | {
103 | if (side == CameraSide::LEFT)
104 | {
105 | remap(src, dst, lmapx, lmapy, cv::INTER_LINEAR);
106 | }
107 | else
108 | {
109 | remap(src, dst, rmapx, rmapy, cv::INTER_LINEAR);
110 | }
111 | }
112 |
113 | //////////////////////////////////////////////////
114 | // This callback function takes a pair of raw stereo images as inputs,
115 | // then undistorts and rectifies the images using the undistort_rectify_image function
116 | // defined above and publishes on the rectified image topic using pub_img_left/right.
117 | //////////////////////////////////////////////////
118 | void synched_img_callback(const sensor_msgs::ImageConstPtr& msg_left, const sensor_msgs::ImageConstPtr& msg_right)
119 | {
120 | Mat tmp_left = cv_bridge::toCvShare(msg_left, "bgr8")->image;
121 | Mat tmp_right = cv_bridge::toCvShare(msg_right, "bgr8")->image;
122 |
123 | Mat dst_left, dst_right;
124 |
125 | undistort_rectify_image(tmp_left, dst_left, CameraSide::LEFT);
126 | undistort_rectify_image(tmp_right, dst_right, CameraSide::RIGHT);
127 |
128 | sensor_msgs::ImagePtr rect_img_left = cv_bridge::CvImage(msg_left->header, "bgr8", dst_left).toImageMsg();
129 | sensor_msgs::ImagePtr rect_img_right = cv_bridge::CvImage(msg_right->header, "bgr8", dst_right).toImageMsg();
130 |
131 | pub_img_rect_left.publish(rect_img_left);
132 | pub_img_rect_right.publish(rect_img_right);
133 |
134 | std_msgs::Header header = msg_left->header;
135 | output_camera_info_left.header = header;
136 | output_camera_info_right.header = header;
137 |
138 | left_camera_info_output_pub.publish(output_camera_info_left);
139 | right_camera_info_output_pub.publish(output_camera_info_right);
140 | }
141 |
142 | int main(int argc, char **argv)
143 | {
144 | ros::init(argc, argv, "camera_fisheye_undistort");
145 |
146 | ros::NodeHandle nh("~");
147 |
148 | string param_file_path;
149 | if(nh.getParam("param_file_path", param_file_path))
150 | {
151 | ROS_WARN("Using parameter file: %s", param_file_path.c_str());
152 | }
153 | else
154 | {
155 | ROS_ERROR("Failed to get param file path. Please check and try again.");
156 | ros::shutdown();
157 | return 0;
158 | }
159 |
160 | // Read the input parameters and perform initialization
161 | init_rectification_map(param_file_path);
162 |
163 | // The raw stereo images should be published as type sensor_msgs/Image
164 | image_transport::ImageTransport it(nh);
165 | message_filters::Subscriber sub_img_left(nh, "/camera/fisheye1/image_raw", 1);
166 | message_filters::Subscriber sub_img_right(nh, "/camera/fisheye2/image_raw", 1);
167 |
168 | // Having time synced stereo images might be important for other purposes, say generating accurate disparity maps.
169 | // To sync the left and right image messages by their header time stamps, ApproximateTime is used.
170 | // More info here: http://wiki.ros.org/message_filters#Time_Synchronizer
171 | typedef message_filters::sync_policies::ApproximateTime SyncPolicy;
172 | message_filters::Synchronizer sync(SyncPolicy(10), sub_img_left, sub_img_right);
173 | sync.registerCallback(boost::bind(&synched_img_callback, _1, _2));
174 |
175 | // The output data include rectified images and their corresponding camera info
176 | pub_img_rect_left = it.advertise("/camera/fisheye1/rect/image", 1);
177 | pub_img_rect_right = it.advertise("/camera/fisheye2/rect/image", 1);
178 |
179 | left_camera_info_output_pub = nh.advertise("/camera/fisheye1/rect/camera_info", 1);
180 | right_camera_info_output_pub = nh.advertise("/camera/fisheye2/rect/camera_info", 1);
181 |
182 | // Processing start
183 | ros::spin();
184 | }
185 |
--------------------------------------------------------------------------------
/src/vision_to_mavros.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | #include
10 |
11 | int main(int argc, char** argv)
12 | {
13 | ros::init(argc, argv, "vision_to_mavros");
14 |
15 | ros::NodeHandle node;
16 |
17 | //////////////////////////////////////////////////
18 | // Variables for precision navigation
19 | //////////////////////////////////////////////////
20 | ros::Publisher camera_pose_publisher = node.advertise("vision_pose", 10);
21 |
22 | ros::Publisher body_path_pubisher = node.advertise("body_frame/path", 1);
23 |
24 | tf::TransformListener tf_listener;
25 |
26 | tf::StampedTransform transform;
27 |
28 | geometry_msgs::PoseStamped msg_body_pose;
29 |
30 | nav_msgs::Path body_path;
31 |
32 | std::string target_frame_id = "/camera_odom_frame";
33 |
34 | std::string source_frame_id = "/camera_link";
35 |
36 | double output_rate = 20, roll_cam = 0, pitch_cam = 0, yaw_cam = 1.5707963, gamma_world = -1.5707963;
37 |
38 | // Read parameters from launch file, including: target_frame_id, source_frame_id, output_rate
39 | {
40 | // The frame in which we find the transform into, the original "world" frame
41 | if(node.getParam("target_frame_id", target_frame_id))
42 | {
43 | ROS_INFO("Get target_frame_id parameter: %s", target_frame_id.c_str());
44 | }
45 | else
46 | {
47 | ROS_WARN("Using default target_frame_id: %s", target_frame_id.c_str());
48 | }
49 |
50 | // The frame for which we find the tranform to target_frame_id, the original "camera" frame
51 | if(node.getParam("source_frame_id", source_frame_id))
52 | {
53 | ROS_INFO("Get source_frame_id parameter: %s", source_frame_id.c_str());
54 | }
55 | else
56 | {
57 | ROS_WARN("Using default source_frame_id: %s", source_frame_id.c_str());
58 | }
59 |
60 | // The rate at which we wish to publish final pose data
61 | if(node.getParam("output_rate", output_rate))
62 | {
63 | ROS_INFO("Get output_rate parameter: %f", output_rate);
64 | }
65 | else
66 | {
67 | ROS_WARN("Using default output_rate: %f", output_rate);
68 | }
69 |
70 | // The rotation around z axis between original world frame and target world frame, assuming the z axis needs not to be changed
71 | // In this case, target world frame has y forward, x to the right and z upwards (ENU as ROS dictates)
72 | if(node.getParam("gamma_world", gamma_world))
73 | {
74 | ROS_INFO("Get gamma_world parameter: %f", gamma_world);
75 | }
76 | else
77 | {
78 | ROS_WARN("Using default gamma_world: %f", gamma_world);
79 | }
80 |
81 | // The roll angle around camera's own axis to align with body frame
82 | if(node.getParam("roll_cam", roll_cam))
83 | {
84 | ROS_INFO("Get roll_cam parameter: %f", roll_cam);
85 | }
86 | else
87 | {
88 | ROS_WARN("Using default roll_cam: %f", roll_cam);
89 | }
90 |
91 | // The pitch angle around camera's own axis to align with body frame
92 | if(node.getParam("pitch_cam", pitch_cam))
93 | {
94 | ROS_INFO("Get pitch_cam parameter: %f", pitch_cam);
95 | }
96 | else
97 | {
98 | ROS_WARN("Using default pitch_cam: %f", pitch_cam);
99 | }
100 |
101 | // The yaw angle around camera's own axis to align with body frame
102 | if(node.getParam("yaw_cam", yaw_cam))
103 | {
104 | ROS_INFO("Get yaw_cam parameter: %f", yaw_cam);
105 | }
106 | else
107 | {
108 | ROS_WARN("Using default yaw_cam: %f", yaw_cam);
109 | }
110 | }
111 |
112 | //////////////////////////////////////////////////
113 | // Variables for precision landing (optional)
114 | //////////////////////////////////////////////////
115 | bool enable_precland = false;
116 |
117 | std::string precland_target_frame_id = "/landing_target";
118 |
119 | std::string precland_camera_frame_id = "/camera_fisheye2_optical_frame";
120 |
121 | ros::Publisher precland_msg_publisher;
122 |
123 | if(node.getParam("enable_precland", enable_precland))
124 | {
125 | ROS_INFO("Precision landing: %s", enable_precland ? "enabled" : "disabled");
126 | }
127 | else
128 | {
129 | ROS_INFO("Precision landing disabled by default");
130 | }
131 |
132 | if (enable_precland)
133 | {
134 | // The frame of the landing target in the camera frame
135 | if(node.getParam("precland_target_frame_id", precland_target_frame_id))
136 | {
137 | ROS_INFO("Get precland_target_frame_id parameter: %s", precland_target_frame_id.c_str());
138 | }
139 | else
140 | {
141 | ROS_WARN("Using default precland_target_frame_id: %s", precland_target_frame_id.c_str());
142 | }
143 |
144 | if(node.getParam("precland_camera_frame_id", precland_camera_frame_id))
145 | {
146 | ROS_INFO("Get precland_camera_frame_id parameter: %s", precland_camera_frame_id.c_str());
147 | }
148 | else
149 | {
150 | ROS_WARN("Using default precland_camera_frame_id: %s", precland_camera_frame_id.c_str());
151 | }
152 |
153 | precland_msg_publisher = node.advertise("landing_raw", 10);
154 | }
155 |
156 | //////////////////////////////////////////////////
157 | // Wait for the first transform to become available.
158 | //////////////////////////////////////////////////
159 | tf_listener.waitForTransform(target_frame_id, source_frame_id, ros::Time::now(), ros::Duration(3.0));
160 |
161 | ros::Time last_tf_time = ros::Time::now();
162 | ros::Time last_precland_tf_time = ros::Time::now();
163 |
164 | // Limit the rate of publishing data, otherwise the other telemetry port might be flooded
165 | ros::Rate rate(output_rate);
166 |
167 | while (node.ok())
168 | {
169 | // For tf, Time(0) means "the latest available" transform in the buffer.
170 | ros::Time now = ros::Time(0);
171 |
172 | //////////////////////////////////////////////////
173 | // Publish vision_position_estimate message if transform is available
174 | //////////////////////////////////////////////////
175 | try
176 | {
177 | // lookupTransform(frame_2, frame_1, at_this_time, this_transform)
178 | // will give the transfrom from frame_1 to frame_2
179 | tf_listener.lookupTransform(target_frame_id, source_frame_id, now, transform);
180 |
181 | // Only publish pose messages when we have new transform data.
182 | if (last_tf_time < transform.stamp_)
183 | {
184 | last_tf_time = transform.stamp_;
185 |
186 | static tf::Vector3 position_orig, position_body;
187 |
188 | static tf::Quaternion quat_cam, quat_cam_to_body_x, quat_cam_to_body_y, quat_cam_to_body_z, quat_rot_z, quat_body;
189 |
190 | // 1) Rotation from original world frame to world frame with y forward.
191 | // See the full rotation matrix at https://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations
192 | position_orig = transform.getOrigin();
193 |
194 | position_body.setX( cos(gamma_world) * position_orig.getX() + sin(gamma_world) * position_orig.getY());
195 | position_body.setY(-sin(gamma_world) * position_orig.getX() + cos(gamma_world) * position_orig.getY());
196 | position_body.setZ(position_orig.getZ());
197 |
198 | // 2) Rotation from camera to body frame.
199 | quat_cam = transform.getRotation();
200 |
201 | quat_cam_to_body_x = tf::createQuaternionFromRPY(roll_cam, 0, 0);
202 | quat_cam_to_body_y = tf::createQuaternionFromRPY(0, pitch_cam, 0);
203 | quat_cam_to_body_z = tf::createQuaternionFromRPY(0, 0, yaw_cam);
204 |
205 | // 3) Rotate body frame 90 degree (align body x with world y at launch)
206 | quat_rot_z = tf::createQuaternionFromRPY(0, 0, -gamma_world);
207 |
208 | quat_body = quat_rot_z * quat_cam * quat_cam_to_body_x * quat_cam_to_body_y * quat_cam_to_body_z;
209 | quat_body.normalize();
210 |
211 | // Create PoseStamped message to be sent
212 | msg_body_pose.header.stamp = transform.stamp_;
213 | msg_body_pose.header.frame_id = transform.frame_id_;
214 | msg_body_pose.pose.position.x = position_body.getX();
215 | msg_body_pose.pose.position.y = position_body.getY();
216 | msg_body_pose.pose.position.z = position_body.getZ();
217 | msg_body_pose.pose.orientation.x = quat_body.getX();
218 | msg_body_pose.pose.orientation.y = quat_body.getY();
219 | msg_body_pose.pose.orientation.z = quat_body.getZ();
220 | msg_body_pose.pose.orientation.w = quat_body.getW();
221 |
222 | // Publish pose of body frame in world frame
223 | camera_pose_publisher.publish(msg_body_pose);
224 |
225 | // Publish trajectory path for visualization
226 | body_path.header.stamp = msg_body_pose.header.stamp;
227 | body_path.header.frame_id = msg_body_pose.header.frame_id;
228 | body_path.poses.push_back(msg_body_pose);
229 | body_path_pubisher.publish(body_path);
230 | }
231 | }
232 | catch (tf::TransformException ex)
233 | {
234 | ROS_WARN("%s",ex.what());
235 | ros::Duration(1.0).sleep();
236 | }
237 |
238 | //////////////////////////////////////////////////
239 | // Publish landing_target message if option is enabled and transform is available
240 | //////////////////////////////////////////////////
241 | if (enable_precland)
242 | {
243 | if (tf_listener.canTransform(precland_camera_frame_id, precland_target_frame_id, now))
244 | {
245 | // lookupTransform(frame_2, frame_1, at_this_time, this_transform)
246 | // will give the transfrom from frame_1 to frame_2
247 | tf_listener.lookupTransform(precland_camera_frame_id, precland_target_frame_id, now, transform);
248 |
249 | // Only publish when we have new data
250 | if (last_precland_tf_time < transform.stamp_)
251 | {
252 | last_precland_tf_time = transform.stamp_;
253 |
254 | mavros_msgs::LandingTarget msg_landing_target;
255 |
256 | // Setup the landing target message according to the relative protocol: https://mavlink.io/en/services/landing_target.html#camera_image_relative
257 | msg_landing_target.header.frame_id = transform.frame_id_;
258 | msg_landing_target.header.stamp = transform.stamp_;
259 | msg_landing_target.target_num = 0;
260 | msg_landing_target.frame = mavros_msgs::LandingTarget::LOCAL_NED;
261 | msg_landing_target.type = mavros_msgs::LandingTarget::VISION_FIDUCIAL;
262 |
263 | msg_landing_target.angle[0] = std::atan(transform.getOrigin().getX() / transform.getOrigin().getZ());
264 | msg_landing_target.angle[1] = std::atan(transform.getOrigin().getY() / transform.getOrigin().getZ());
265 | msg_landing_target.distance = transform.getOrigin().length();
266 |
267 | // Publish the message
268 | precland_msg_publisher.publish(msg_landing_target);
269 |
270 | ROS_INFO("Landing target detected");
271 | }
272 | }
273 | }
274 |
275 | //////////////////////////////////////////////////
276 | // Repeat
277 | //////////////////////////////////////////////////
278 | rate.sleep();
279 | }
280 | return 0;
281 | }
--------------------------------------------------------------------------------