├── .gitignore
├── CMakeLists.txt
├── README.md
├── config
├── params_flightmare_sim.yaml
└── params_snapdragon_flight.yaml
├── doc
└── tracking.png
├── include
└── multi_robot_tracking
│ ├── JpdafFilter.h
│ ├── PhdFilter.h
│ ├── SimpleKalman.h
│ ├── detection.h
│ ├── hungarian_alg.h
│ ├── kalman.h
│ ├── track.h
│ └── tracker_param.h
├── launch
├── demo.launch
├── demo_left.launch
├── demo_node.launch
├── demo_old.launch
└── demo_right.launch
├── nodelet_plugin.xml
├── package.xml
├── rviz
└── darknet_vis.rviz
├── scripts
├── flightmare_test.py
├── rmse_calc.py
└── sample_data.py
└── src
├── SimpleKalman.cpp
├── hungarian_alg.cpp
├── jpdaf_filter.cpp
├── kalman.cpp
├── multi_robot_tracking_node.cpp
├── multi_robot_tracking_nodelet.cpp
├── phd_filter.cpp
└── track.cpp
/.gitignore:
--------------------------------------------------------------------------------
1 | .vscode/
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.0.2)
2 | project(multi_robot_tracking)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | add_compile_options(-std=c++11)
6 |
7 |
8 | find_package(catkin REQUIRED COMPONENTS
9 | geometry_msgs
10 | nodelet
11 | roscpp
12 | std_msgs
13 | tf
14 | cv_bridge
15 | nav_msgs
16 | image_transport
17 | )
18 | find_package(Eigen3 REQUIRED NO_MODULE)
19 |
20 | catkin_package(
21 | INCLUDE_DIRS
22 | LIBRARIES
23 | CATKIN_DEPENDS
24 | geometry_msgs
25 | nodelet
26 | roscpp
27 | std_msgs
28 | tf
29 | DEPENDS
30 | EIGEN3
31 | )
32 |
33 | # include headers
34 | include_directories(
35 | ${catkin_INCLUDE_DIRS}
36 | ${OpenCV_INCLUDE_DIRS}
37 | ${Eigen_INCLUDE_DIRS}
38 | include
39 | include/multi_robot_tracking
40 | )
41 |
42 | add_library(multi_robot_tracking src/hungarian_alg.cpp src/SimpleKalman.cpp src/track.cpp src/jpdaf_filter.cpp src/phd_filter.cpp src/kalman.cpp src/multi_robot_tracking_nodelet.cpp)
43 | target_include_directories(multi_robot_tracking PUBLIC include ${catkin_LIBRARIES})
44 |
45 | target_link_libraries(multi_robot_tracking PUBLIC ${catkin_LIBRARIES} Eigen3::Eigen)
46 | add_dependencies(multi_robot_tracking ${catkin_EXPORTED_TARGETS} ${${PROJECT_NAME}_EXPORTED_TARGETS})
47 |
48 | add_executable(multi_robot_tracking_node src/multi_robot_tracking_node.cpp)
49 | add_dependencies(multi_robot_tracking_node ${catkin_EXPORTED_TARGETS} ${${PROJECT_NAME}_EXPORTED_TARGETS})
50 | target_link_libraries(multi_robot_tracking_node multi_robot_tracking ${catkin_LIBRARIES})
51 |
52 |
53 | install(
54 | TARGETS multi_robot_tracking
55 | ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
56 | LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
57 | RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
58 | )
59 |
60 | # install(DIRECTORY include/${PROJECT_NAME}/ DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION})
61 | install(FILES nodelet_plugin.xml
62 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
63 | )
64 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Multi Agent Tracking
2 | Multi Agent Tracking for Data Association with various filter comparison
3 |
4 | ## License
5 | Please be aware that this code was originally implemented for research purposes and may be subject to changes and any fitness for a particular purpose is disclaimed. To inquire about commercial licenses, please contact Prof. Giuseppe Loianno (loiannog@nyu.edu).
6 | ```
7 | This program is free software: you can redistribute it and/or modify
8 | it under the terms of the GNU General Public License as published by
9 | the Free Software Foundation, either version 3 of the License, or
10 | (at your option) any later version.
11 |
12 | This program is distributed in the hope that it will be useful,
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | GNU General Public License for more details.
16 |
17 | You should have received a copy of the GNU General Public License
18 | along with this program. If not, see .
19 |
20 | ```
21 | ## Citation
22 | If you publish a paper with this work, please cite our paper:
23 | ```
24 | @article{VisualTraIROS2022,
25 | url = {https://arxiv.org/abs/2207.08301},
26 | Year = {2022},
27 | Booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)}
28 | author = {Ge, Rundong and Lee, Moonyoung and Radhakrishnan, Vivek and Zhou, Yang and Li, Guanrui and Loianno, Giuseppe},
29 | title = {Vision-based Relative Detection and Tracking for Teams of Micro Aerial Vehicles}}
30 | ```
31 |
32 | ## Overview
33 | This is ROS package developed for tracking multiple agents using a camera. This software is designed for associating 2D bounding box measurements (of drones) to unique IDs and 2D position in image space. The required inputs for this software are 2d bounding box measurements and RGB image. The provided outputs are 2D positions with target associations. The motion model is based on Gaussian Linear kalman filter with probabilistic hypothesis density (PHD) filter to solve association. Can also run Joint Probability Association filter (JPDAF) by switching rosparam upon launch. This repository includes matlab implementation and evaluation as well.
34 |
35 | 
36 |
37 |
38 | **Developer: Mark Lee, Vivek Radhakrishnan
39 | Affiliation: [NYU ARPL](https://wp.nyu.edu/arpl/)
40 | Maintainer: Vivek Radhakrishnan, vr2171@nyu.edu
**
41 |
42 | #### Subscribed Topics
43 | |Name|Type|Description|
44 | |---|---|---|
45 | |`/hummingbird0/track/bounding_box`|geometry_msgs/PoseArray|output 2D position from Flightmare rosbag|
46 | |`/hummingbird0/camera/rgb`|sensor_msgs/Image|RGB image|
47 | |`/hummingbird0/imu`|sensor_msgs/Imu|IMU data|position
48 |
49 | #### Published Topics
50 | |Name|Type|Description|
51 | |---|---|---|
52 | |`/tracked_image`|sensor_msgs/Image|RGB with position, ID association labeled|
53 | |`/tracked_pose_output`|geometry_msgs/PoseArray|position (x,y) in pixel coordinate with ID association, which is the index array|
54 | |`/tracked_velocity_output`|geometry_msgs/PoseArray|linear velocity (x,y) in pixel coordinate with ID association, velocity in pixel space|
55 |
56 | #### ROS Parameters
57 | |Name|Description|
58 | |---|---|
59 | |`filter`|phd or jpdaf specify in the demo.launch file|
60 | |`input_bbox_topic`|output of py_imag_proc or /hummingbird0/track/bounding_box specify in the demo.launch file|
61 | |`input_rgb_topic`|/hires/image_raw or hummingbird0/camera/rgb specify in the demo.launch file|
62 | |`input_imu_topic`|output of dragonfly imu or hummingbird0/imu specify in the demo.launch file|
63 | |`num_drones`|how many drones in FOV specify in the demo.launch file|
64 | |`camera_cx`|CX paramerter of the camera projection matrix|
65 | |`camera_cy`|CY paramerter of the camera projection matrix|
66 | |`camera_f`'|F of the camera projection matrix|
67 | |`dt`|Hard coded time difference between imu frames (not used anymore)|
68 | |`viz_detection_height`|Image height used by the detection algorithm|
69 | |`viz_detection_width`|Image width used by the detection algorithm|
70 | |`viz_detection_offset_x`|Output drawing offset in X axis in case of black bars|
71 | |`viz_detection_offset_y`|Output drawing offset in Y axis in case of black bars|
72 | |`enable_async_pdf`|Not used anymore|
73 | |`use_generated_id`|Assigns sequential IDs to subsequent detections|
74 | |`phd/q_pos`|Process noise for the evolution of position|
75 | |`phd/q_vel`|Process noise for the evolution of velocity|
76 | |`phd/r_meas`|Measurement noise|
77 | |`phd/p_pos_init`|Initial process covariance for position|
78 | |`phd/p_vel_init`|Initial process covariance for velocity|
79 | |`phd/prune_weight_threshold`|Minimum threshold weight for pruning. If the weight is lower than this value, its removed from the next step|
80 | |`phd/prune_mahalanobis_threshold`|Maximum mahalanobis distance for merging different updates|
81 | |`phd/extract_weight_threshold`|Minimum threshold for the weight for state extraction. If the weight is lower than this value, we update the state using the preditction instead of update|
82 | |`jpdaf/q_pos`|Process noise for the evolution of position|
83 | |`jpdaf/q_vel`|Process noise for the evolution of velocity|
84 | |`jpdaf/p_pos_init`|Initial process covariance for position|
85 | |`jpdaf/p_vel_init`|Initial process covariance for velocity|
86 | |`jpdaf/r_meas`|Measurement noise|
87 | |`jpdaf/alpha_0_threshold`|JPDAF Alpha 0 threshold|
88 | |`jpdaf/alpha_cam`|pixel size ratio, use 1 for normal images|
89 | |`jpdaf/associaiton_cost`|Association cost for JPDAF|
90 | |`jpdaf/beta_0_threshold`|JPDAF beta 0 threshold|
91 | |`jpdaf/false_measurements_density`|JPDAF false measurement threshold|
92 | |`jpdaf/gamma`|JPDAF gamma|
93 | |`jpdaf/max_missed_rate`|JPDAF max missed rate|
94 | |`jpdaf/min_acceptance_rate`|JPDAF minimim acceptance rate|
95 | |`jpdaf/probability_detection`|JPDAF probability of detection|
96 |
97 | ## Install
98 | The tracking filter package is dependent on Eigen and Boost, and ROS. The additional repo can be installed below:
99 |
100 |
101 | clone the filter repository into catkin src directory and build using catkin
102 | ```
103 | $ git clone https://github.com/arplaboratory/multi_robot_tracking.git
104 | $ cd ..
105 | $ catkin build
106 | $ source devel/setup.bash
107 | ```
108 |
109 | retrieve exp2 rosbag data from [ARPL data folder](https://drive.google.com/drive/folders/1dSBd08ocj_x8MGDS-cl1F2HeHKFADlqP?usp=sharing) after gaining access
110 |
111 | ## Running
112 | This pacakge runs the tracking filter only -- it doesn't provide image detection. If image detection package is not available, can run with either by recorded rosbag data or by acquiring ground truth detection from simulation. Boths options are shown below. Specifiy filter rosparam in the demo.launch file to select phd or jpdaf filter.
113 |
114 | ```
115 | $ roslaunch multi_robot_tracking demo.launch
116 | ```
117 |
118 | A. Testing with recorded Rosbag.
119 | Move rosbag data into corresponding directory. Modify the rostopic subscriber in demo.launch if wanting to modify using a different measured input.
120 | ```
121 | $ roslaunch multi_robot_tracking demo.launch
122 | -- open a new tab and navigate to rosbag directory
123 | $ rosbag play "bag_file_name".bag --clock
124 | ```
125 |
126 | B. Testing with simulation.
127 | We can also utilize the Flightare simulator for photorealistic rendering to create new data. Refer to [Flightmare github page](https://github.com/arplaboratory/flightmare) for installation and setting up dependencies. This modified version of fligtmare also depends on a modified version of rotors simulator. This can be found here [Rotors Simulator github page](https://github.com/arplaboratory/rotors_simulator)
128 | Running the below code launches Flightmare simulation with Gazebo for multiple drones, multimotion creates trajectories for all drones, and phd_tracker associates each target in space.
129 | ```
130 | $ roslaunch flightros multi_robot.launch
131 | -- open a new tab
132 | $ roslaunch multi_robot_tracking demo.launch
133 | -- open a new tab
134 | $ rosrun multi_robot_tracking flightmare_test.py
135 | ```
136 |
137 | C. Testing Matlab for Evaluation
138 | Move the corresponding rosbag data to the Matlab directory. Make sure the GM_PHD_Initialisation_drones.m file points to the correct directory for rosbag data.
139 |
140 |
--------------------------------------------------------------------------------
/config/params_flightmare_sim.yaml:
--------------------------------------------------------------------------------
1 | input_bbox_topic: "/hummingbird0/track/bounding_box"
2 | input_img_topic: "/hummingbird0/camera/rgb"
3 | input_imu_topic: "/hummingbird0/imu"
4 |
5 | init_pos_self_x: 0.0
6 | init_pos_self_y: 0.0
7 | init_pos_x_left: 3.4641
8 | init_pos_y_left: 2.0
9 | init_pos_x_right: 3.4641
10 | init_pos_y_right: -2.0
11 | id_self: 0
12 | id_left: 1
13 | id_right: 2
14 |
15 | camera_cx: 180
16 | camera_cy: 120
17 | camera_f: 120
18 |
19 | dt: 0.225
20 |
21 | viz_detection_height: 240
22 | viz_detection_width: 320
23 | viz_detection_offset_x: 0
24 | viz_detection_offset_y: 0
25 |
26 | enable_async_pdf: true
27 |
28 | use_generated_id: true
29 |
30 | phd:
31 | q_pos: 6.25
32 | q_vel: 12.5
33 | r_meas: 10
34 | p_pos_init: 5.0
35 | p_vel_init: 10.0
36 | prune_weight_threshold: 0.1
37 | prune_mahalanobis_threshold: 4.0
38 | extract_weight_threshold: 0.5
39 |
40 | jpdaf:
41 | q_pos: 100.0
42 | q_vel: 100.0
43 | p_pos_init: 10.0
44 | p_vel_init: 20.0
45 | r_meas: 2.0
46 | alpha_0_threshold: 0.95
47 | alpha_cam: 1.0
48 | associaiton_cost: 50.0
49 | beta_0_threshold: 0.5
50 | false_measurements_density: 0.00000003034
51 | gamma: 10.0
52 | max_missed_rate: 15
53 | min_acceptance_rate: 15
54 | probability_detection: 0.952662721893
55 |
--------------------------------------------------------------------------------
/config/params_snapdragon_flight.yaml:
--------------------------------------------------------------------------------
1 | input_bbox_topic: "/DragonPro1/snpe_ros/detections"
2 | input_img_topic: "/DragonPro1/image_publisher/image_raw"
3 | input_imu_topic: "/DragonPro1/imu"
4 |
5 | init_pos_self_x: 0.0
6 | init_pos_self_y: 0.0
7 | init_pos_x_left: 3.4641
8 | init_pos_y_left: 2.0
9 | init_pos_x_right: 3.4641
10 | init_pos_y_right: -2.0
11 | id_self: 0
12 | id_left: 1
13 | id_right: 2
14 |
15 | camera_cx: 329
16 | camera_cy: 243
17 | camera_f: 431
18 |
19 | dt: 0.225
20 |
21 | viz_detection_height: 168
22 | viz_detection_width: 224
23 | viz_detection_offset_x: 0
24 | viz_detection_offset_y: -28
25 |
26 | enable_async_pdf: true
27 |
28 | use_generated_id: true
29 |
30 | phd:
31 | q_pos: 6.25
32 | q_vel: 12.5
33 | r_meas: 100
34 | p_pos_init: 5.0
35 | p_vel_init: 2.0
36 | prune_weight_threshold: 0.1
37 | prune_mahalanobis_threshold: 4.0
38 | extract_weight_threshold: 0.5
39 |
40 | jpdaf:
41 | q_pos: 100.0
42 | q_vel: 100.0
43 | p_pos_init: 10.0
44 | p_vel_init: 20.0
45 | r_meas: 2.0
46 | alpha_0_threshold: 0.95
47 | alpha_cam: 1.0
48 | associaiton_cost: 50.0
49 | beta_0_threshold: 0.5
50 | false_measurements_density: 0.00000003034
51 | gamma: 10.0
52 | max_missed_rate: 15
53 | min_acceptance_rate: 15
54 | probability_detection: 0.952662721893
--------------------------------------------------------------------------------
/doc/tracking.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arplaboratory/multi_robot_tracking/08aa293d0a9d7aa74a3c4824770734af52e15578/doc/tracking.png
--------------------------------------------------------------------------------
/include/multi_robot_tracking/JpdafFilter.h:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 |
5 | //ros
6 | #include
7 | #include
8 | #include
9 | #include
10 |
11 | //detection bbox
12 | //#include
13 | //#include
14 |
15 | //CV
16 | #include
17 | #include
18 |
19 | #include
20 | #include
21 | #include
22 |
23 | //jpdaf
24 | #include
25 |
26 | #include
27 |
28 |
29 | #define PI 3.14159
30 |
31 |
32 |
33 | class JpdafFilter
34 | {
35 | public:
36 | JpdafFilter();
37 |
38 | //functions
39 | void initialize_matrix();
40 | void track(bool called_from_detection);
41 | void timer_callback(const ros::TimerEvent& event);
42 |
43 | ros::Time startTime,endTime,processTime;
44 |
45 |
46 | std::vector get_detections_flightmare(const geometry_msgs::PoseArray latest_detection);
47 | Eigen::Vector3f compute_angular_velocity(double detection_timestamp);
48 | bool imu_buffer_ok(double detection_timestamp);
49 | bool image_buffer_ok(double detection_timestamp);
50 |
51 | void publishTracks(double detection_timestamp);
52 |
53 | Eigen::MatrixXf association_matrix(const std::vector detections);
54 |
55 | void manage_new_old_tracks(std::vector detections, std::vector alphas_0, std::vector betas_0, Eigen::Vector3f omega, double time_step);
56 | std::vector