├── yolox_people_detection
├── README.md
├── CMakeLists.txt
├── package.xml
├── launch
│ └── yolox_people_detection.launch
└── scripts
│ └── yolox_person_detection_node
├── detector_msg_to_pose_array
├── launch
│ └── to_pose_array.launch
├── config
│ └── detectors.yaml
├── CMakeLists.txt
├── package.xml
├── LICENSE
├── README.md
├── scripts
│ └── to_pose_array.py
└── CHANGELOG.rst
├── bayes_people_tracker
├── include
│ └── bayes_people_tracker
│ │ ├── asso_exception.h
│ │ ├── people_tracker.h
│ │ ├── people_marker.h
│ │ └── simple_tracking.h
├── config
│ ├── detectors_no_leg_detector.yaml
│ ├── detectors.yaml
│ ├── detectors_test.yaml
│ └── default.rviz
├── launch
│ ├── people_tracker.launch
│ └── people_tracker_test.launch
├── package.xml
├── CMakeLists.txt
├── share
│ └── people_tracker_rule.bmr
├── scripts
│ └── test_publisher.py
├── CHANGELOG.rst
└── src
│ └── bayes_people_tracker
│ └── people_tracker.cpp
├── bayes_people_tracker_msgs
├── CMakeLists.txt
├── msg
│ └── PeopleTracker.msg
└── package.xml
├── LICENSE
└── README.md
/yolox_people_detection/README.md:
--------------------------------------------------------------------------------
1 | # Yolox Person Detection
2 |
3 | Use yolox to detect people and publish their poses calculated from the bounding box center
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/launch/to_pose_array.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/yolox_people_detection/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.12)
2 | project(yolox_people_detection)
3 |
4 | find_package(catkin REQUIRED COMPONENTS
5 | rospy
6 | clf_object_recognition_yolox
7 | )
8 |
9 | catkin_package()
10 |
11 | install(PROGRAMS
12 | scripts/yolox_person_detection_node
13 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
14 | )
15 |
16 | install(DIRECTORY launch
17 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
18 | )
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/config/detectors.yaml:
--------------------------------------------------------------------------------
1 | to_pose_array:
2 | detectors: # Add detectors under this namespace
3 | leg_detector: # Name of detector (used internally to identify them. Has to be unique.
4 | topic: "/people_tracker_measurements" # The topic on which the geometry_msgs/PoseArray is published
5 | point_name: "pos" # The name of the point containing the coordinates in question
6 |
--------------------------------------------------------------------------------
/yolox_people_detection/package.xml:
--------------------------------------------------------------------------------
1 |
2 | >
3 | yolox_people_detection
4 | 0.0.0
5 | People detection with yolox
6 |
7 | Leroy Rügemer
8 |
9 | Apache 2.0
10 |
11 | catkin
12 |
13 | rospy
14 | clf_object_recognition_yolox
15 |
16 |
--------------------------------------------------------------------------------
/bayes_people_tracker/include/bayes_people_tracker/asso_exception.h:
--------------------------------------------------------------------------------
1 | #ifndef ASSO_EXCEPTION_H
2 | #define ASSO_EXCEPTION_H
3 |
4 | #include
5 | using namespace std;
6 |
7 | struct asso_exception: public exception
8 | {
9 | const char* what() const throw()
10 | {
11 | return "Unknown association algorithm!";
12 | }
13 | };
14 |
15 | struct filter_exception: public exception
16 | {
17 | const char* what() const throw()
18 | {
19 | return "Unknown filter type!";
20 | }
21 | };
22 |
23 | struct observ_exception: public exception
24 | {
25 | const char* what() const throw()
26 | {
27 | return "Unknown observation model!";
28 | }
29 | };
30 |
31 | #endif // ASSO_EXCEPTION_H
32 |
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(detector_msg_to_pose_array)
3 |
4 | find_package(catkin REQUIRED COMPONENTS
5 | genpy
6 | geometry_msgs
7 | roslib
8 | rospy
9 | rostopic
10 | )
11 |
12 | ###################################
13 | ## catkin specific configuration ##
14 | ###################################
15 | catkin_package(
16 | CATKIN_DEPENDS genpy geometry_msgs roslib rospy rostopic
17 | )
18 |
19 | #############
20 | ## Install ##
21 | #############
22 |
23 | install(PROGRAMS
24 | scripts/to_pose_array.py
25 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
26 | )
27 |
28 | install(DIRECTORY launch
29 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
30 | )
31 |
32 | install(DIRECTORY config
33 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
34 | )
35 |
--------------------------------------------------------------------------------
/bayes_people_tracker_msgs/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(bayes_people_tracker_msgs)
3 |
4 | ## Find catkin macros and libraries
5 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
6 | ## is used, also find other catkin packages
7 | find_package(catkin REQUIRED COMPONENTS
8 | geometry_msgs
9 | message_generation
10 | people_msgs
11 | std_msgs
12 | visualization_msgs
13 | )
14 |
15 | #######################################
16 | ## Declare ROS messages and services ##
17 | #######################################
18 |
19 | add_message_files(
20 | FILES
21 | PeopleTracker.msg
22 | )
23 |
24 | generate_messages(
25 | DEPENDENCIES
26 | std_msgs
27 | geometry_msgs
28 | )
29 |
30 | ###################################
31 | ## catkin specific configuration ##
32 | ###################################
33 | catkin_package(
34 | CATKIN_DEPENDS geometry_msgs std_msgs visualization_msgs
35 | )
36 |
37 |
--------------------------------------------------------------------------------
/bayes_people_tracker_msgs/msg/PeopleTracker.msg:
--------------------------------------------------------------------------------
1 | std_msgs/Header header
2 | string[] uuids # Unique uuid5 (NAMESPACE_DNS) person id as string. Id is based on system time on start-up and tracker id. Array index matches ids array index
3 | geometry_msgs/Pose[] poses # The real world poses of the detected people in the given target frame. Default: /map. Array index matches uuids array index
4 | geometry_msgs/Vector3[] velocities # The velocities of the detected people in the given target frame. Default: /map. Array index matches uuids array index
5 | float64[] distances # The distances of the detected persons to the robot (polar coordinates). Array index matches ids array index.
6 | float64[] angles # Angles of the detected persons to the coordinate frames z axis (polar coordinates). Array index matches ids array index.
7 | float64 min_distance # The minimal distance in the distances array.
8 | float64 min_distance_angle # The angle according to the minimal distance.
9 |
--------------------------------------------------------------------------------
/bayes_people_tracker_msgs/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | bayes_people_tracker_msgs
4 | 1.8.0
5 | The bayes_people_tracker_msgs package
6 |
7 | Marc Hanheide
8 |
9 | MIT
10 |
11 | Christian Dondrup
12 |
13 | catkin
14 |
15 | geometry_msgs
16 | message_generation
17 | people_msgs
18 | std_msgs
19 | visualization_msgs
20 |
21 | geometry_msgs
22 | message_runtime
23 | people_msgs
24 | std_msgs
25 | visualization_msgs
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | detector_msg_to_pose_array
4 | 1.9.0
5 | The detector_msg_to_pose_array package
6 |
7 | Marc Hanheide
8 |
9 | MIT
10 |
11 | https://github.com/strands-project/strands_perception_people
12 |
13 | Christian Dondrup
14 |
15 | catkin
16 | genpy
17 | geometry_msgs
18 | roslib
19 | rospy
20 | rostopic
21 | genpy
22 | geometry_msgs
23 | roslib
24 | rospy
25 | rostopic
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Christian Dondrup
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Christian Dondrup
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/bayes_people_tracker/config/detectors_no_leg_detector.yaml:
--------------------------------------------------------------------------------
1 | bayes_people_tracker:
2 | prune_named: true
3 | filter_type: "UKF" # The Kalman filter type: EKF = Extended Kalman Filter, UKF = Uncented Kalman Filter
4 | cv_noise_params: # The noise for the constant velocity prediction model
5 | x: 1.2
6 | y: 1.2
7 | detectors: # Add detectors under this namespace
8 | openpose_tracker: # Name of detector (used internally to identify them). Has to be unique.
9 | topic: "/person_rec_lw/poses"
10 | cartesian_noise_params: # The noise for the cartesian observation model
11 | x: 0.2
12 | y: 0.2
13 | observation_model: "CARTESIAN"
14 | matching_algorithm: "NNJPDA" # The algorthim to match different detections. NN = Nearest Neighbour, NNJPDA = NN Joint Probability Data Association
15 | tracking_actuator:
16 | people_topic: "/tracking_act/people"
17 | cartesian_noise_params:
18 | x: 0.2
19 | y: 0.2
20 | observation_model: "CARTESIAN"
21 | matching_alogrithm: "NNJPDA"
22 |
--------------------------------------------------------------------------------
/yolox_people_detection/launch/yolox_people_detection.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/bayes_people_tracker/launch/people_tracker.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/bayes_people_tracker/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | bayes_people_tracker
4 | 1.8.0
5 | The bayes_people_tracker package
6 |
7 | Marc Hanheide
8 |
9 | MIT
10 |
11 | https://github.com/strands-project/strands_perception_people
12 |
13 | Christian Dondrup
14 |
15 | catkin
16 |
17 | boost
18 | bayes_tracking
19 | bayes_people_tracker_msgs
20 | geometry_msgs
21 | message_generation
22 | people_msgs
23 | roscpp
24 | std_msgs
25 | tf
26 | visualization_msgs
27 |
28 | boost
29 | bayes_tracking
30 | bayes_people_tracker_msgs
31 | geometry_msgs
32 | message_runtime
33 | people_msgs
34 | roscpp
35 | std_msgs
36 | tf
37 | visualization_msgs
38 | rosbag_migration_rule
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/bayes_people_tracker/launch/people_tracker_test.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/README.md:
--------------------------------------------------------------------------------
1 | ## Message conversion package
2 | This package contains a node to convert messages of people detectors to a pose_array which is naturally understood by the bayes_people_tracker. Some detectors use custom message types which makes it hard to use them otherwise.
3 |
4 | All the information given on how to run the nodes should only be used if you need to run them seperately. In normal cases please refer to the `perception_people_launch` package to start the whole perception pipeline.
5 |
6 | ### to_pose_array
7 | Small node that takes in an arbitrary message from a topic and extracts a pose according to a given identifier. The found poses are published as a geometry_msgs/PoseArray. The node is used to transform the output of any people detector to a pose array for the people_tracker. The node is configured using the detectors.yaml in the config directory:
8 |
9 | ```
10 | to_pose_array:
11 | detectors: # Add detectors under this namespace
12 | leg_detector: # Name of detector (used internally to identify them. Has to be unique.
13 | topic: "/people_tracker_measurements" # The topic on which the geometry_msgs/PoseArray is published
14 | point_name: "pos" # The name of the point containing the coordinates in question
15 | ```
16 |
17 | The parameter namespace `to_pose_array/detectors` can contain as many sub namespaces as needed. The above example shows the namespace `to_pose_array/detectors/leg_detector` which contains the information to parse the messages generated by the ros hydro package `leg_detector`.
18 |
19 | 1. `topic`: this string is the topic name under which the messages containing the positions are published.
20 | 1. `point_name`: this string specifies the identifier for the detected positions. In this case the leg_detector publishes a message which caontains data like:
21 | ```
22 | pos.x
23 | pos.y
24 | pos.z
25 | ```
26 | The message is parsed for all occurences of the `pos` identifier and the result is published as a `PoseArray`.
27 |
28 | Run with
29 | ```
30 | roslaunch detector_msg_to_pose_array to_pose_array.launch
31 | ```
32 |
--------------------------------------------------------------------------------
/bayes_people_tracker/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(bayes_people_tracker)
3 |
4 | ## Find catkin macros and libraries
5 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
6 | ## is used, also find other catkin packages
7 | find_package(catkin REQUIRED COMPONENTS
8 | bayes_tracking
9 | geometry_msgs
10 | message_generation
11 | people_msgs
12 | roscpp
13 | std_msgs
14 | tf2_ros
15 | tf2_geometry_msgs
16 | visualization_msgs
17 | bayes_people_tracker_msgs
18 | )
19 | find_package(Boost REQUIRED COMPONENTS thread)
20 |
21 | ###################################
22 | ## catkin specific configuration ##
23 | ###################################
24 | catkin_package(
25 | INCLUDE_DIRS include
26 | CATKIN_DEPENDS bayes_people_tracker_msgs bayes_tracking geometry_msgs roscpp std_msgs tf visualization_msgs
27 | )
28 |
29 | ###########
30 | ## Build ##
31 | ###########
32 |
33 | include_directories(
34 | include
35 | ${catkin_INCLUDE_DIRS}
36 | ${Boost_INCLUDE_DIRS}
37 | )
38 |
39 | add_executable(bayes_people_tracker
40 | src/bayes_people_tracker/people_tracker.cpp
41 | )
42 |
43 | add_dependencies(bayes_people_tracker ${catkin_EXPORTED_TARGETS})
44 |
45 | target_link_libraries(bayes_people_tracker
46 | ${catkin_LIBRARIES}
47 | ${Boost_LIBRARIES}
48 | )
49 |
50 | #############
51 | ## Install ##
52 | #############
53 |
54 | install(TARGETS bayes_people_tracker
55 | ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
56 | LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
57 | RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
58 | )
59 |
60 | install(FILES scripts/test_publisher.py
61 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
62 | )
63 |
64 | install(DIRECTORY include/${PROJECT_NAME}/
65 | DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
66 | FILES_MATCHING PATTERN "*.h"
67 | PATTERN ".git" EXCLUDE
68 | )
69 |
70 | install(DIRECTORY launch
71 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
72 | )
73 |
74 | install(DIRECTORY config
75 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
76 | )
77 |
78 | install(DIRECTORY share
79 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
80 | )
81 |
--------------------------------------------------------------------------------
/bayes_people_tracker/config/detectors.yaml:
--------------------------------------------------------------------------------
1 | bayes_people_tracker:
2 | prune_named: true # prune a named person if a tracker found the named person somewhere else :DDDD
3 | filter_type: "UKF" # The Kalman filter type: EKF = Extended Kalman Filter, UKF = Uncented Kalman Filter
4 | cv_noise_params: # The noise for the constant velocity prediction model
5 | x: 1.2
6 | y: 1.2
7 | detectors: # Add detectors under this namespace
8 | leg_detector: # Name of detector (used internally to identify them). Has to be unique.
9 | topic: "/to_pose_array/leg_detector" # The topic on which the geometry_msgs/PoseArray is published
10 | cartesian_noise_params: # The noise for the cartesian observation model
11 | x: 0.2
12 | y: 0.2
13 | observation_model: "CARTESIAN"
14 | matching_algorithm: "NNJPDA" # The algorthim to match different detections. NN = Nearest Neighbour, NNJPDA = NN Joint Probability Data Association
15 | openpose_tracker: # Name of detector (used internally to identify them). Has to be unique.
16 | topic: "/person_rec_lw/poses"
17 | cartesian_noise_params: # The noise for the cartesian observation model
18 | x: 0.2
19 | y: 0.2
20 | observation_model: "CARTESIAN"
21 | matching_algorithm: "NNJPDA" # The algorthim to match different detections. NN = Nearest Neighbour, NNJPDA = NN Joint Probability Data Association
22 | tracking_actuator:
23 | people_topic: "/tracking_act/people"
24 | cartesian_noise_params:
25 | x: 0.2
26 | y: 0.2
27 | observation_model: "CARTESIAN"
28 | matching_alogrithm: "NNJPDA"
29 | robocup_tracker:
30 | people_topic: "/robocup_tracker/people"
31 | cartesian_noise_params:
32 | x: 0.2
33 | y: 0.2
34 | observation_model: "CARTESIAN"
35 | matching_alogrithm: "NNJPDA"
36 | mujoco_tracker:
37 | topic: "/mujoco_persons/poses"
38 | cartesian_noise_params:
39 | x: 0.1
40 | y: 0.1
41 | observation_model: "CARTESIAN"
42 | matching_alogrithm: "NNJPDA"
43 |
--------------------------------------------------------------------------------
/bayes_people_tracker/config/detectors_test.yaml:
--------------------------------------------------------------------------------
1 | bayes_people_tracker:
2 | prune_named: true
3 | filter_type: "EKF" # The Kalman filter type: EKF = Extended Kalman Filter, UKF = Uncented Kalman Filter
4 | prune_named: true # make sure identity tracks are unique (prunes the one with high stdev)
5 | cv_noise_params: # The noise for the constant velocity prediction model
6 | x: 0.4
7 | y: 0.4
8 | std_limit: 5.0 # pruning tracks that have std(X) > std_limit
9 | detectors: # Add detectors under this namespace
10 | gps: # Name of detector (used internally to identify them). Has to be unique.
11 | people_topic: "/tracker_tester/sensor/people/gps" # The topic on which the people_msgs/People is published (identity detector)
12 | noise_params: # The noise for the cartesian observation model
13 | x: 2.0
14 | y: 2.0
15 | observation_model: "CARTESIAN" # CARTESIAN observation model (POLAR is alternative)
16 | seq_size: 2 # minimum `seq_size` number of detections received in `seq_time` seconds
17 | seq_time: 5.0 # to create new track
18 | matching_algorithm: "NN_LABELED" # The algorthim to match different detections. NN = Nearest Neighbour, NN_LABELED for identifying detections
19 | marvelmind:
20 | people_topic: "/tracker_tester/sensor/people/marvelmind"
21 | noise_params:
22 | x: .2
23 | y: .2
24 | observation_model: "CARTESIAN"
25 | seq_size: 1
26 | seq_time: 6.0
27 | matching_algorithm: "NN_LABELED"
28 | vision:
29 | topic: "/tracker_tester/sensor/pose_array/serhan" # The topic on which the geometry_msgs/PoseArray is published (anonymous detections here)
30 | noise_params:
31 | x: 0.04
32 | y: 0.04
33 | seq_size: 5
34 | seq_time: 1.0
35 | observation_model: "CARTESIAN"
36 | matching_algorithm: "NN" # The algorthim to match different detections. NN = Nearest Neighbour, NNJPDA = NN Joint Probability Data Association
37 |
--------------------------------------------------------------------------------
/bayes_people_tracker/include/bayes_people_tracker/people_tracker.h:
--------------------------------------------------------------------------------
1 | #ifndef PEDESTRIANLOCALISATION_H
2 | #define PEDESTRIANLOCALISATION_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include
16 | #include
17 | #include
18 |
19 | #include
20 | #include
21 | #include
22 |
23 | #include
24 |
25 | #include
26 |
27 | #include
28 | #include
29 | #include
30 |
31 | #include "bayes_people_tracker_msgs/PeopleTracker.h"
32 | #include "bayes_people_tracker/simple_tracking.h"
33 | #include "bayes_people_tracker/asso_exception.h"
34 | #include "bayes_people_tracker/people_marker.h"
35 |
36 | #define INVALID_ID -1 // For publishing trajectories
37 |
38 | class PeopleTracker
39 | {
40 | public:
41 | PeopleTracker();
42 |
43 | private:
44 | void trackingThread();
45 | void publishDetections(bayes_people_tracker_msgs::PeopleTracker msg);
46 | void publishDetections(geometry_msgs::PoseStamped msg);
47 | void publishDetections(geometry_msgs::PoseArray msg);
48 | void publishDetections(people_msgs::People msg);
49 | void publishDetections(double time_sec,
50 | geometry_msgs::Pose closest,
51 | std::vector ppl,
52 | std::vector vels,
53 | std::vector vars,
54 | std::vector uuids,
55 | std::vector distances,
56 | std::vector angles,
57 | double min_dist,
58 | double angle);
59 | void publishTrajectory(std::vector poses,
60 | std::vector vels,
61 | std::vector vars,
62 | std::vector pids,
63 | ros::Publisher& pub);
64 | void createVisualisation(std::vector points, std::vector vars, std::vector pids, ros::Publisher& pub, std::vector uuids);
65 | std::vector cartesianToPolar(geometry_msgs::Point point);
66 | void detectorCallback(const geometry_msgs::PoseArray::ConstPtr &pta, string detector);
67 | void detectorCallback_people(const people_msgs::People::ConstPtr &pta, string detector);
68 | void connectCallback(ros::NodeHandle &n);
69 | void parseParams(ros::NodeHandle);
70 |
71 | std::string generateUUID(std::string time, long id) {
72 | boost::uuids::name_generator gen(dns_namespace_uuid);
73 | time += num_to_str(id);
74 |
75 | return num_to_str(gen(time.c_str()));
76 | }
77 |
78 | template
79 | std::string num_to_str(T num) {
80 | std::stringstream ss;
81 | ss << num;
82 | return ss.str();
83 | }
84 |
85 | ros::Publisher pub_detect;
86 | ros::Publisher pub_pose;
87 | ros::Publisher pub_pose_array;
88 | ros::Publisher pub_people;
89 | ros::Publisher pub_trajectory;
90 | ros::Publisher pub_marker;
91 | tf2_ros::Buffer m_tf_buffer;
92 | tf2_ros::TransformListener m_tf;
93 | std::string target_frame;
94 | std::string base_frame;
95 |
96 | double tracker_frequency;
97 | bool publish_detections;
98 | unsigned long detect_seq;
99 | unsigned long marker_seq;
100 | double startup_time;
101 | std::string startup_time_str;
102 |
103 | boost::uuids::uuid dns_namespace_uuid;
104 |
105 | PeopleMarker pm;
106 |
107 | SimpleTracking *ekf = NULL;
108 | SimpleTracking *ukf = NULL;
109 | SimpleTracking *pf = NULL;
110 | std::map, ros::Subscriber> subscribers;
111 | std::map, ros::Subscriber> subscribers_people;
112 | std::vector > previous_poses;
113 | };
114 |
115 | #endif // PEDESTRIANLOCALISATION_H
116 |
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/scripts/to_pose_array.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | import roslib
5 | import rostopic
6 | import geometry_msgs.msg
7 | import genpy
8 |
9 |
10 | class ToPoseArray():
11 | """docstring for ToPoseArray"""
12 | def __init__(self):
13 | detectors = rospy.get_param("~detectors")
14 | subscribers = []
15 | for elem in detectors:
16 | topic = detectors[elem]["topic"]
17 | point = detectors[elem]["point_name"]
18 | rospy.loginfo(
19 | "Found detector '%s' with topic '%s'. "
20 | "Waiting for topic type...",
21 | elem,
22 | topic
23 | )
24 | type = rostopic.get_topic_type(topic, True)[0]
25 | rospy.loginfo("Got topic type: %s.", type)
26 | pub = rospy.Publisher("~"+elem, geometry_msgs.msg.PoseArray, queue_size=0)
27 | subscribers.append(
28 | rospy.Subscriber(
29 | topic,
30 | roslib.message.get_message_class(type),
31 | callback=self.callback,
32 | callback_args={
33 | "detector": elem,
34 | "point_name": point,
35 | "publisher": pub
36 | }
37 | )
38 | )
39 |
40 | def callback(self, msg, args):
41 | pose_array = geometry_msgs.msg.PoseArray()
42 | msg = self.msg_to_document(msg)
43 | headers = []
44 | # Finding a header that contains a frame_id
45 | self.find_key(msg, "header", headers)
46 | for elem in headers:
47 | if not elem["frame_id"] == '':
48 | pose_array.header.seq = elem["seq"]
49 | pose_array.header.frame_id = elem["frame_id"]
50 | pose_array.header.stamp.secs = elem["stamp"]["secs"]
51 | pose_array.header.stamp.nsecs = elem["stamp"]["nsecs"]
52 | positions = []
53 | # Find x and y values for the given position key
54 | self.find_key(msg, args["point_name"], positions)
55 | for elem in positions:
56 | pose = geometry_msgs.msg.Pose()
57 | pose.position.x = elem['x']
58 | pose.position.y = elem['y']
59 | pose.position.z = 0.0 # Project to ground plane
60 | pose.orientation.w = 1.0 # Currently no orientation
61 | pose_array.poses.append(pose)
62 | args["publisher"].publish(pose_array)
63 |
64 | def find_key(self, dictionary, key, result_list):
65 | for elem in dictionary:
66 | if str(elem) == key:
67 | result_list.append(dictionary[elem])
68 | else:
69 | if isinstance(elem, dict):
70 | self.find_key(elem, key, result_list)
71 | else:
72 | if hasattr(dictionary[elem], '__iter__'):
73 | try:
74 | self.find_key(dictionary[elem], key, result_list)
75 | except TypeError:
76 | pass
77 |
78 | def msg_to_document(self, msg):
79 | """
80 | Given a ROS message, turn it into a (nested) dictionary.
81 |
82 | >>> from geometry_msgs.msg import Pose
83 | >>> msg_to_document(Pose())
84 | {'orientation': {'w': 0.0, 'x': 0.0, 'y': 0.0, 'z': 0.0},
85 | 'position': {'x': 0.0, 'y': 0.0, 'z': 0.0}}
86 |
87 | :Args:
88 | | msg (ROS Message): An instance of a ROS message to convert
89 | :Returns:
90 | | dict : A dictionary representation of the supplied message.
91 | """
92 |
93 | d = {}
94 |
95 | slot_types = []
96 | if hasattr(msg, '_slot_types'):
97 | slot_types = msg._slot_types
98 | else:
99 | slot_types = [None] * len(msg.__slots__)
100 |
101 | for (attr, type) in zip(msg.__slots__, slot_types):
102 | d[attr] = self.sanitize_value(attr, getattr(msg, attr), type)
103 |
104 | return d
105 |
106 | def sanitize_value(self, attr, v, type):
107 | """
108 | De-rosify a msg.
109 |
110 | Internal function used to convert ROS messages into dictionaries.
111 |
112 | :Args:
113 | | attr(str): the ROS message slot name the value came from
114 | | v: the value from the message's slot to make into a MongoDB able type
115 | | type (str): The ROS type of the value passed, as given by the ressage slot_types member.
116 | :Returns:
117 | | A sanitized version of v.
118 | """
119 |
120 | if isinstance(v, str) and type == 'uint8[]':
121 | v = bytes(v)
122 |
123 | if isinstance(v, rospy.Message):
124 | return self.msg_to_document(v)
125 | elif isinstance(v, genpy.rostime.Time):
126 | return self.msg_to_document(v)
127 | elif isinstance(v, genpy.rostime.Duration):
128 | return self.msg_to_document(v)
129 | elif isinstance(v, list):
130 | result = []
131 | for t in v:
132 | if hasattr(t, '_type'):
133 | result.append(self.sanitize_value(None, t, t._type))
134 | else:
135 | result.append(self.sanitize_value(None, t, None))
136 | return result
137 | else:
138 | return v
139 |
140 | if __name__ == '__main__':
141 | rospy.init_node('to_pose_array')
142 | tpa = ToPoseArray()
143 | rospy.spin()
144 |
--------------------------------------------------------------------------------
/bayes_people_tracker/share/people_tracker_rule.bmr:
--------------------------------------------------------------------------------
1 | class update_bayes_people_tracker_PeopleTracker_748ea76bfa4fbc576a15a2b7a15777db(MessageUpdateRule):
2 | old_type = "bayes_people_tracker/PeopleTracker"
3 | old_full_text = """
4 | std_msgs/Header header
5 | string[] uuids # Unique uuid5 (NAMESPACE_DNS) person id as string. Id is based on system time on start-up and tracker id. Array index matches ids array index
6 | geometry_msgs/Pose[] poses # The real world poses of the detected people in the given target frame. Default: /map. Array index matches ids/uuids array index
7 | float64[] distances # The distances of the detected persons to the robot (polar coordinates). Array index matches ids array index.
8 | float64[] angles # Angles of the detected persons to the coordinate frames z axis (polar coordinates). Array index matches ids array index.
9 | float64 min_distance # The minimal distance in the distances array.
10 | float64 min_distance_angle # The angle according to the minimal distance.
11 |
12 | ================================================================================
13 | MSG: std_msgs/Header
14 | # Standard metadata for higher-level stamped data types.
15 | # This is generally used to communicate timestamped data
16 | # in a particular coordinate frame.
17 | #
18 | # sequence ID: consecutively increasing ID
19 | uint32 seq
20 | #Two-integer timestamp that is expressed as:
21 | # * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
22 | # * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
23 | # time-handling sugar is provided by the client library
24 | time stamp
25 | #Frame this data is associated with
26 | # 0: no frame
27 | # 1: global frame
28 | string frame_id
29 |
30 | ================================================================================
31 | MSG: geometry_msgs/Pose
32 | # A representation of pose in free space, composed of postion and orientation.
33 | Point position
34 | Quaternion orientation
35 |
36 | ================================================================================
37 | MSG: geometry_msgs/Point
38 | # This contains the position of a point in free space
39 | float64 x
40 | float64 y
41 | float64 z
42 |
43 | ================================================================================
44 | MSG: geometry_msgs/Quaternion
45 | # This represents an orientation in free space in quaternion form.
46 |
47 | float64 x
48 | float64 y
49 | float64 z
50 | float64 w
51 | """
52 |
53 | new_type = "bayes_people_tracker/PeopleTracker"
54 | new_full_text = """
55 | std_msgs/Header header
56 | string[] uuids # Unique uuid5 (NAMESPACE_DNS) person id as string. Id is based on system time on start-up and tracker id. Array index matches ids array index
57 | geometry_msgs/Pose[] poses # The real world poses of the detected people in the given target frame. Default: /map. Array index matches uuids array index
58 | geometry_msgs/Vector3[] velocities # The velocities of the detected people in the given target frame. Default: /map. Array index matches uuids array index
59 | float64[] distances # The distances of the detected persons to the robot (polar coordinates). Array index matches ids array index.
60 | float64[] angles # Angles of the detected persons to the coordinate frames z axis (polar coordinates). Array index matches ids array index.
61 | float64 min_distance # The minimal distance in the distances array.
62 | float64 min_distance_angle # The angle according to the minimal distance.
63 |
64 | ================================================================================
65 | MSG: std_msgs/Header
66 | # Standard metadata for higher-level stamped data types.
67 | # This is generally used to communicate timestamped data
68 | # in a particular coordinate frame.
69 | #
70 | # sequence ID: consecutively increasing ID
71 | uint32 seq
72 | #Two-integer timestamp that is expressed as:
73 | # * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
74 | # * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
75 | # time-handling sugar is provided by the client library
76 | time stamp
77 | #Frame this data is associated with
78 | # 0: no frame
79 | # 1: global frame
80 | string frame_id
81 |
82 | ================================================================================
83 | MSG: geometry_msgs/Pose
84 | # A representation of pose in free space, composed of postion and orientation.
85 | Point position
86 | Quaternion orientation
87 |
88 | ================================================================================
89 | MSG: geometry_msgs/Point
90 | # This contains the position of a point in free space
91 | float64 x
92 | float64 y
93 | float64 z
94 |
95 | ================================================================================
96 | MSG: geometry_msgs/Quaternion
97 | # This represents an orientation in free space in quaternion form.
98 |
99 | float64 x
100 | float64 y
101 | float64 z
102 | float64 w
103 |
104 | ================================================================================
105 | MSG: geometry_msgs/Vector3
106 | # This represents a vector in free space.
107 |
108 | float64 x
109 | float64 y
110 | float64 z
111 | """
112 |
113 | order = 0
114 | migrated_types = [
115 | ("Header","Header"),
116 | ("geometry_msgs/Pose","geometry_msgs/Pose"),]
117 |
118 | valid = True
119 |
120 | def update(self, old_msg, new_msg):
121 | self.migrate(old_msg.header, new_msg.header)
122 | new_msg.uuids = old_msg.uuids
123 | self.migrate_array(old_msg.poses, new_msg.poses, "geometry_msgs/Pose")
124 | #No matching field name in old message
125 | new_msg.velocities = [self.get_new_class("geometry_msgs/Vector3")()] * len(old_msg.distances)
126 | new_msg.distances = old_msg.distances
127 | new_msg.angles = old_msg.angles
128 | new_msg.min_distance = old_msg.min_distance
129 | new_msg.min_distance_angle = old_msg.min_distance_angle
130 |
--------------------------------------------------------------------------------
/detector_msg_to_pose_array/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
2 | Changelog for package detector_msg_to_pose_array
3 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
4 |
5 | 1.9.0 (2019-12-19)
6 | ------------------
7 |
8 | 1.8.1 (2019-05-21)
9 | ------------------
10 |
11 | 1.8.0 (2018-10-01)
12 | ------------------
13 |
14 | 1.7.0 (2018-09-04)
15 | ------------------
16 |
17 | 1.6.0 (2017-09-01)
18 | ------------------
19 | * changelogs
20 | * changed from cdondrup to marc
21 | * Contributors: Marc Hanheide
22 |
23 | 1.5.5 (2017-07-02)
24 | ------------------
25 | * updated changelogs
26 | * Contributors: Jenkins
27 |
28 | 1.5.4 (2016-11-03)
29 | ------------------
30 | * updated changelogs
31 | * Contributors: Jenkins
32 |
33 | 1.5.3 (2016-07-04)
34 | ------------------
35 | * updated changelogs
36 | * Contributors: Jenkins
37 |
38 | 1.5.2 (2016-07-02 20:52)
39 | ------------------------
40 | * updated changelogs
41 | * Contributors: Jenkins
42 |
43 | 1.5.1 (2016-07-02 17:54)
44 | ------------------------
45 | * updated changelogs
46 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into people_sitting_feature
47 | * Contributors: Ferdian Jovan, Jenkins
48 |
49 | 1.5.0 (2016-03-15)
50 | ------------------
51 | * updated changelogs
52 | * Contributors: Jenkins
53 |
54 | 1.4.0 (2016-02-17)
55 | ------------------
56 | * updated changelogs
57 | * Contributors: Jenkins
58 |
59 | 1.3.1 (2016-02-11)
60 | ------------------
61 | * updated changelogs
62 | * Merge remote-tracking branch 'upstream/indigo-devel' into indigo-devel
63 | * Contributors: Alexander Hermans, Jenkins
64 |
65 | 1.3.0 (2016-02-01)
66 | ------------------
67 | * updated changelogs
68 | * Contributors: Jenkins
69 |
70 | 1.2.1 (2016-01-28)
71 | ------------------
72 | * updated changelogs
73 | * Contributors: Jenkins
74 |
75 | 1.2.0 (2015-11-11)
76 | ------------------
77 | * updated changelogs
78 | * Contributors: Jenkins
79 |
80 | 1.1.8 (2015-09-03)
81 | ------------------
82 | * updated changelogs
83 | * Contributors: Jenkins
84 |
85 | 1.1.7 (2015-08-25)
86 | ------------------
87 |
88 | 1.1.6 (2015-06-24)
89 | ------------------
90 | * updated changelogs
91 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
92 | * 1.1.5
93 | * updated changelogs
94 | * 1.1.4
95 | * updated changelogs
96 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
97 | * Contributors: Ferdian Jovan, Jenkins
98 |
99 | 1.1.5 (2015-05-22)
100 | ------------------
101 | * updated changelogs
102 | * Contributors: Jenkins
103 |
104 | 1.1.4 (2015-05-10)
105 | ------------------
106 | * updated changelogs
107 | * Contributors: Jenkins
108 |
109 | 1.1.3 (2015-04-10)
110 | ------------------
111 | * updated changelogs
112 | * Merge branch 'indigo-devel' of http://github.com/strands-project/strands_perception_people into topolog
113 | Conflicts:
114 | bayes_people_tracker_logging/launch/logging.launch
115 | * Contributors: Christian Dondrup, Jenkins
116 |
117 | 1.1.2 (2015-04-07)
118 | ------------------
119 | * updated changelogs
120 | * Contributors: Jenkins
121 |
122 | 1.1.1 (2015-04-03)
123 | ------------------
124 | * updated changelogs
125 | * Contributors: Jenkins
126 |
127 | 1.1.0 (2015-04-02)
128 | ------------------
129 | * Merge pull request `#157 `_ from cdondrup/respawn
130 | Adding respawn flags
131 | * Adding respawn flags
132 | Closes `#152 `_
133 | Bad workaround for `#156 `_ and `#76 `_
134 | * Contributors: Christian Dondrup
135 |
136 | 1.0.0 (2015-03-10)
137 | ------------------
138 | * Updating changelogs.
139 | * Contributors: Christian Dondrup
140 |
141 | 0.1.4 (2015-03-06)
142 | ------------------
143 | * updated changelogs
144 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
145 | * Contributors: Ferdian Jovan, Jenkins
146 |
147 | 0.1.3 (2015-02-25)
148 | ------------------
149 | * updated changelogs
150 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
151 | * Contributors: Ferdian Jovan, Jenkins
152 |
153 | 0.1.2 (2015-02-20)
154 | ------------------
155 |
156 | 0.1.1 (2015-02-18 18:37)
157 | ------------------------
158 | * updated changelogs
159 | * Contributors: Jenkins
160 |
161 | 0.1.0 (2015-02-18 16:59)
162 | ------------------------
163 | * Updating changelogs
164 | * Merge pull request `#130 `_ from cdondrup/fixomatic
165 | Preparing indigo-devel to be released
166 | * Setting correct version number. The changelogs will be regenerated because the ones from the release branch would not be consistent with the changes made in the devel branch.
167 | * Adding explicit queue_size argument to get rid of indigo warning
168 | * Changed launch files to new format.
169 | * Merge pull request `#114 `_ from cdondrup/hydro-devel
170 | Changed launch files to new format.
171 | * Changed launch files to new format.
172 | * Merge pull request `#105 `_ from lucasb-eyer/hydro-devel
173 | Fixing `#101 `_ (Licenses)
174 | * Added LICENSE files. Fixes `#101 `_
175 | * Merge pull request `#97 `_ from strands-project/dependencies
176 | Release preparations
177 | * Fixed missing things
178 | * Prepared detector_msg_to_pose_array for release.
179 | * Merge pull request `#96 `_ from cdondrup/rename
180 | Renaming most of the packages to comply with ROS naming conventions
181 | * Splitting utils package into seperate packages.
182 | * Contributors: Christian Dondrup, Lucas Beyer
183 |
--------------------------------------------------------------------------------
/bayes_people_tracker/include/bayes_people_tracker/people_marker.h:
--------------------------------------------------------------------------------
1 | #ifndef PEOPLE_MARKER_H
2 | #define PEOPLE_MARKER_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 |
14 |
15 | #include
16 | #include
17 |
18 | class PeopleMarker {
19 |
20 | public:
21 | PeopleMarker() : marker_seq(0) {}
22 |
23 | geometry_msgs::Point generate_position(geometry_msgs::Point centre, double angle, double dx, double dy)
24 | {
25 | float s = sin(angle);
26 | float c = cos(angle);
27 |
28 | // rotate point
29 | geometry_msgs::Point res;
30 | res.x = dx * c - dy * s;
31 | res.y = dx * s + dy * c;
32 |
33 | // translate point back:
34 | res.x += centre.x;
35 | res.y += centre.y;
36 | res.z = centre.z;
37 | return res;
38 | }
39 |
40 | geometry_msgs::Pose generate_extremity_position(geometry_msgs::Pose centre, double dx, double dy, double z) {
41 | double angle = tf::getYaw(centre.orientation) + M_PI/2;
42 | geometry_msgs::Point p = centre.position;
43 | p.z = z;
44 | centre.position = generate_position(p, angle, dx, dy);
45 | return centre;
46 | }
47 |
48 | visualization_msgs::Marker createMarker(
49 | int id,
50 | int type,
51 | int action,
52 | geometry_msgs::Pose pose,
53 | geometry_msgs::Vector3 scale,
54 | std_msgs::ColorRGBA color,
55 | std::string target_frame) {
56 | visualization_msgs::Marker marker;
57 | marker.header.frame_id = target_frame;
58 | marker.header.stamp = ros::Time::now();
59 | marker.header.seq = ++marker_seq;
60 | marker.ns = "people_tracker";
61 | marker.id = id;
62 | marker.type = type;
63 | marker.action = action;
64 | marker.pose = pose;
65 | marker.scale = scale;
66 | marker.color = color;
67 | marker.lifetime = ros::Duration(1);
68 | return marker;
69 | }
70 |
71 | visualization_msgs::Marker createHead(
72 | int id,
73 | int action,
74 | geometry_msgs::Pose pose,
75 | std::string target_frame) {
76 | geometry_msgs::Vector3 scale;
77 | scale.x = 0.3;
78 | scale.y = 0.3;
79 | scale.z = 0.3;
80 | std_msgs::ColorRGBA color;
81 | color.a = 1.0;
82 | color.r = 233.0F/255.0F;
83 | color.g = 150.0F/255.0F;
84 | color.b = 122.0F/255.0F;
85 | pose.position.z = 1.6;
86 | return createMarker(id, visualization_msgs::Marker::SPHERE, action, pose, scale, color, target_frame);
87 | }
88 |
89 | visualization_msgs::Marker createBody(
90 | int id,
91 | int action,
92 | geometry_msgs::Pose pose,
93 | std::string target_frame) {
94 | geometry_msgs::Vector3 scale;
95 | scale.x = 0.35;
96 | scale.y = 0.35;
97 | scale.z = 0.7;
98 | std_msgs::ColorRGBA color;
99 | color.a = 1.0;
100 | color.r = 139.0F/255.0F;
101 | color.g = 0.0F/255.0F;
102 | color.b = 0.0F/255.0F;
103 | pose.position.z = 1.1;
104 | return createMarker(id, visualization_msgs::Marker::CYLINDER, action, pose, scale, color, target_frame);
105 | }
106 |
107 | std::vector createLegs(
108 | int idl, int idr,
109 | int action,
110 | geometry_msgs::Pose pose,
111 | std::string target_frame) {
112 | std::vector legs;
113 | geometry_msgs::Vector3 scale;
114 | scale.x = 0.15;
115 | scale.y = 0.2;
116 | scale.z = 0.8;
117 | std_msgs::ColorRGBA color;
118 | color.a = 1.0;
119 | color.r = 0.0F/255.0F;
120 | color.g = 0.0F/255.0F;
121 | color.b = 139.0F/255.0F;
122 | legs.push_back(createMarker(idl, visualization_msgs::Marker::CYLINDER, action, generate_extremity_position(pose, 0.1, 0.0, 0.4), scale, color, target_frame));
123 | legs.push_back(createMarker(idr, visualization_msgs::Marker::CYLINDER, action, generate_extremity_position(pose, -0.1, 0.0, 0.4), scale, color, target_frame));
124 | return legs;
125 | }
126 |
127 | std::vector createArms(
128 | int idl, int idr,
129 | int action,
130 | geometry_msgs::Pose pose,
131 | std::string target_frame) {
132 | std::vector arms;
133 | geometry_msgs::Vector3 scale;
134 | scale.x = 0.1;
135 | scale.y = 0.1;
136 | scale.z = 0.7;
137 | std_msgs::ColorRGBA color;
138 | color.a = 1.0;
139 | color.r = 139.0F/255.0F;
140 | color.g = 0.0F/255.0F;
141 | color.b = 0.0F/255.0F;
142 | arms.push_back(createMarker(idl, visualization_msgs::Marker::CYLINDER, action, generate_extremity_position(pose, 0.2, 0.0, 1.1), scale, color, target_frame));
143 | arms.push_back(createMarker(idr, visualization_msgs::Marker::CYLINDER, action, generate_extremity_position(pose, -0.2, 0.0, 1.1), scale, color, target_frame));
144 | return arms;
145 | }
146 |
147 | std::vector createHuman(
148 | int id,
149 | geometry_msgs::Pose pose,
150 | std::string target_frame) {
151 | std::vector human;
152 | human.push_back(createHead(id++, visualization_msgs::Marker::ADD, pose, target_frame));
153 | human.push_back(createBody(id++, visualization_msgs::Marker::ADD, pose, target_frame));
154 | std::vector legs = createLegs(id++, id++, visualization_msgs::Marker::ADD, pose, target_frame);
155 | human.insert(human.end(), legs.begin(), legs.end());
156 | std::vector arms = createArms(id++, id++, visualization_msgs::Marker::ADD, pose, target_frame);
157 | human.insert(human.end(), arms.begin(), arms.end());
158 | return human;
159 | }
160 |
161 | private:
162 |
163 | unsigned long marker_seq;
164 |
165 | };
166 |
167 |
168 | #endif // PEOPLE_MARKER_H
169 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## People Tracker
2 | This package uses the bayes_tracking library developed by Nicola Bellotto (University of Lincoln), please cite with: [10.5281/zenodo.15825](https://zenodo.org/record/15825) and [1]
3 |
4 | The people_tracker uses a single config file to add an arbitrary amount of detectors. The file `config/detectors.yaml` contains the necessary information for the upper_body_detector and the ROS leg_detector (see `to_pose_array` in detector_msg_to_pose_array/README.md):
5 |
6 | ```
7 | bayes_people_tracker:
8 | filter_type: "UKF" # The Kalman filter type: EKF = Extended Kalman Filter, UKF = Uncented Kalman Filter
9 | cv_noise_params: # The noise for the constant velocity prediction model
10 | x: 1.4
11 | y: 1.4
12 | detectors: # Add detectors under this namespace
13 | upper_body_detector: # Name of detector (used internally to identify them). Has to be unique.
14 | topic: "/upper_body_detector/bounding_box_centres" # The topic on which the geometry_msgs/PoseArray is published
15 | cartesian_noise_params: # The noise for the cartesian observation model
16 | x: 0.5
17 | y: 0.5
18 | matching_algorithm: "NNJPDA" # The algorthim to match different detections. NN = Nearest Neighbour, NNJPDA = NN Joint Probability Data Association
19 | leg_detector: # Name of detector (used internally to identify them). Has to be unique.
20 | topic: "/to_pose_array/leg_detector" # The topic on which the geometry_msgs/PoseArray is published
21 | cartesian_noise_params: # The noise for the cartesian observation model
22 | x: 0.2
23 | y: 0.2
24 | matching_algorithm: "NNJPDA" # The algorthim to match different detections. NN = Nearest Neighbour, NNJPDA = NN Joint Probability Data Association
25 | ```
26 |
27 | New detectors are added under the parameter namespace `bayes_people_tracker/detectors`. Let's have a look at the upper body detector as an example:
28 |
29 | ### Tracker Parameters
30 |
31 | The tracker offers two configuration parameters:
32 | * `filter_type`: This specefies which variant of the Kalman filter to use. Currently, it implements an Extended and an Unscented Kalman filter which can be chosen via `EKF` and `UKF`, respectively.
33 | * `cv_noise_params`: parameter is used for the constant velocity prediction model.
34 | * specifies the standard deviation of the x and y velocity.
35 |
36 | ### Detector Parameters
37 |
38 | * For every detector you have to create a new namespace where the name is used as an internal identifier for this detector. Therefore it has to be unique. In this case it is `upper_body_detector`
39 | * The `topic` parameter specifies the topic under which the detections are published. The type has to be `geometry_msgs/PoseArray`. See `to_pose_array` in detector_msg_to_pose_array/README.md if your detector does not publish a PoseArray.
40 | * The `cartesian_noise_params` parameter is used for the Cartesian observation model.
41 | * specifies the standard deviation of x and y.
42 | * `matching_algorithm` specifies the algorithm used to match detections from different sensors/detectors. Currently there are two different algorithms which are based on the Mahalanobis distance of the detections (default being NNJPDA if parameter is misspelled):
43 | * NN: Nearest Neighbour
44 | * NNJPDA: Nearest Neighbour Joint Probability Data Association
45 |
46 | All of these are just normal ROS parameters and can be either specified by the parameter server or using the yaml file in the provided launch file.
47 |
48 | ### Message Type:
49 |
50 | The tracker publishes the following:
51 | * `pose`: A `geometry_msgs/PoseStamped` for the clostes person.
52 | * `pose_array`: A `geometry_msgs/PoseArray` for all detections.
53 | * `people`: A `people_msgs/People` for all detections. Can be used for layerd costmaps.
54 | * `marker_array`: A `visualization_msgs/MarkerArray` showing little stick figures for every detection. Figures are orient according to the direction of velocity.
55 | * `positions`: A `bayes_people_tracker/PeopleTracker` message. See below.
56 |
57 | ```
58 | std_msgs/Header header
59 | string[] uuids # Unique uuid5 (NAMESPACE_DNS) person id as string. Id is based on system time on start-up and tracker id. Array index matches ids array index
60 | geometry_msgs/Pose[] poses # The real world poses of the detected people in the given target frame. Default: /map. Array index matches ids/uuids array index
61 | float64[] distances # The distances of the detected persons to the robot (polar coordinates). Array index matches ids array index.
62 | float64[] angles # Angles of the detected persons to the coordinate frames z axis (polar coordinates). Array index matches ids array index.
63 | float64 min_distance # The minimal distance in the distances array.
64 | float64 min_distance_angle # The angle according to the minimal distance.
65 | ```
66 |
67 | The poses will be published in a given `target_frame` (see below) but the distances and angles will always be relative to the robot in the `/base_link` tf frame.
68 |
69 | ### Running
70 | Parameters:
71 |
72 | * `target_frame`: _Default: /base_link_:the tf frame in which the tracked poses will be published.
73 | * `position`: _Default: /people_tracker/positions_: The topic under which the results are published as bayes_people_tracker/PeopleTracker`
74 | * `pose`: _Default: /people_tracker/pose_: The topic under which the closest detected person is published as a geometry_msgs/PoseStamped`
75 | * `pose_array`: _Default: /people_tracker/pose_array_: The topic under which the detections are published as a geometry_msgs/PoseArray`
76 | * `poeple`: _Default: /people_tracker/people_: The topic under which the results are published as people_msgs/People`
77 | * `marker`: _Default /people_tracker/marker_array_: A visualisation marker array.
78 |
79 | You can run the node with:
80 |
81 | ```
82 | roslaunch bayes_people_tracker people_tracker.launch
83 | ```
84 |
85 | This is the recommended way of launching it since this will also read the config file and set the right parameters for the detectors.
86 |
87 | ## Updating old bag files
88 |
89 | With version >1.1.8 the message type of the people tracker has been changed to include the velocities of humans as a Vector3. To update old rosbag files just run `rosbag check` this should tell you that there is a rule file to update this bag. Then run `rosbag fix` to update your old bag file and change the message type to the new version. The velocities will all be `0` but apart from that everything should work as intended.
90 |
91 | [1] N. Bellotto and H. Hu, “Computationally efficient solutions for tracking people with a mobile robot: an experimental evaluation of bayesian filters,” Autonomous Robots, vol. 28, no. 4, pp. 425–438, 2010.
92 |
--------------------------------------------------------------------------------
/bayes_people_tracker/config/default.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 81
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Status1
9 | - /People1
10 | - /People1/Namespaces1
11 | - /PoseArray1
12 | - /Marvelmind1
13 | - /Velodyne1
14 | - /GPS (CAR)1
15 | - /TRACKS1
16 | Splitter Ratio: 0.5
17 | Tree Height: 1327
18 | - Class: rviz/Selection
19 | Name: Selection
20 | - Class: rviz/Tool Properties
21 | Expanded:
22 | - /2D Pose Estimate1
23 | - /2D Nav Goal1
24 | - /Publish Point1
25 | Name: Tool Properties
26 | Splitter Ratio: 0.588679016
27 | - Class: rviz/Views
28 | Expanded:
29 | - /Current View1
30 | Name: Views
31 | Splitter Ratio: 0.5
32 | - Class: rviz/Time
33 | Experimental: false
34 | Name: Time
35 | SyncMode: 0
36 | SyncSource: ""
37 | Visualization Manager:
38 | Class: ""
39 | Displays:
40 | - Alpha: 0.5
41 | Cell Size: 1
42 | Class: rviz/Grid
43 | Color: 160; 160; 164
44 | Enabled: true
45 | Line Style:
46 | Line Width: 0.0299999993
47 | Value: Lines
48 | Name: Grid
49 | Normal Cell Count: 0
50 | Offset:
51 | X: 0
52 | Y: 0
53 | Z: 0
54 | Plane: XY
55 | Plane Cell Count: 10
56 | Reference Frame:
57 | Value: true
58 | - Class: rviz/MarkerArray
59 | Enabled: true
60 | Marker Topic: /people_tracker/marker_array
61 | Name: People
62 | Namespaces:
63 | people_id: true
64 | people_tracker: true
65 | people_trajectory: true
66 | vars_ellipse: true
67 | Queue Size: 100
68 | Value: true
69 | - Alpha: 1
70 | Arrow Length: 0.300000012
71 | Axes Length: 0.300000012
72 | Axes Radius: 0.00999999978
73 | Class: rviz/PoseArray
74 | Color: 0; 170; 0
75 | Enabled: true
76 | Head Length: 1.5
77 | Head Radius: 0.5
78 | Name: PoseArray
79 | Shaft Length: 0
80 | Shaft Radius: 0
81 | Shape: Arrow (3D)
82 | Topic: /tracker_tester/ground_truth/marc
83 | Unreliable: false
84 | Value: true
85 | - Alpha: 1
86 | Arrow Length: 0.600000024
87 | Axes Length: 0.300000012
88 | Axes Radius: 0.00999999978
89 | Class: rviz/PoseArray
90 | Color: 255; 25; 0
91 | Enabled: true
92 | Head Length: 0.5
93 | Head Radius: 0.5
94 | Name: Marvelmind
95 | Shaft Length: 0
96 | Shaft Radius: 0
97 | Shape: Arrow (3D)
98 | Topic: /tracker_tester/sensor/pose_array/marvelmind
99 | Unreliable: false
100 | Value: true
101 | - Alpha: 1
102 | Arrow Length: 0.5
103 | Axes Length: 0.300000012
104 | Axes Radius: 0.00999999978
105 | Class: rviz/PoseArray
106 | Color: 255; 255; 0
107 | Enabled: true
108 | Head Length: 0.5
109 | Head Radius: 0.5
110 | Name: Velodyne
111 | Shaft Length: 0
112 | Shaft Radius: 0
113 | Shape: Arrow (3D)
114 | Topic: /tracker_tester/sensor/pose_array/serhan
115 | Unreliable: false
116 | Value: true
117 | - Alpha: 1
118 | Arrow Length: 1
119 | Axes Length: 0.300000012
120 | Axes Radius: 0.00999999978
121 | Class: rviz/PoseArray
122 | Color: 255; 170; 255
123 | Enabled: true
124 | Head Length: 0.5
125 | Head Radius: 0.5
126 | Name: GPS (CAR)
127 | Shaft Length: 0
128 | Shaft Radius: 0
129 | Shape: Arrow (3D)
130 | Topic: /tracker_tester/sensor/pose_array/gps
131 | Unreliable: false
132 | Value: true
133 | - Alpha: 1
134 | Arrow Length: 1.20000005
135 | Axes Length: 0.300000012
136 | Axes Radius: 0.00999999978
137 | Class: rviz/PoseArray
138 | Color: 0; 255; 0
139 | Enabled: true
140 | Head Length: 0.200000003
141 | Head Radius: 0.200000003
142 | Name: TRACKS
143 | Shaft Length: 0.699999988
144 | Shaft Radius: 0.0500000007
145 | Shape: Arrow (3D)
146 | Topic: /people_tracker/pose_array
147 | Unreliable: false
148 | Value: true
149 | Enabled: true
150 | Global Options:
151 | Background Color: 48; 48; 48
152 | Default Light: true
153 | Fixed Frame: base_link
154 | Frame Rate: 10
155 | Name: root
156 | Tools:
157 | - Class: rviz/Interact
158 | Hide Inactive Objects: true
159 | - Class: rviz/MoveCamera
160 | - Class: rviz/Select
161 | - Class: rviz/FocusCamera
162 | - Class: rviz/Measure
163 | - Class: rviz/SetInitialPose
164 | Topic: /initialpose
165 | - Class: rviz/SetGoal
166 | Topic: /move_base_simple/goal
167 | - Class: rviz/PublishPoint
168 | Single click: true
169 | Topic: /clicked_point
170 | Value: true
171 | Views:
172 | Current:
173 | Class: rviz/Orbit
174 | Distance: 28.9135303
175 | Enable Stereo Rendering:
176 | Stereo Eye Separation: 0.0599999987
177 | Stereo Focal Distance: 1
178 | Swap Stereo Eyes: false
179 | Value: false
180 | Focal Point:
181 | X: -1.4987365
182 | Y: 0.682361782
183 | Z: 0.577264845
184 | Focal Shape Fixed Size: false
185 | Focal Shape Size: 0.0500000007
186 | Invert Z Axis: false
187 | Name: Current View
188 | Near Clip Distance: 0.00999999978
189 | Pitch: 0.785398185
190 | Target Frame:
191 | Value: Orbit (rviz)
192 | Yaw: 0.785398185
193 | Saved: ~
194 | Window Geometry:
195 | Displays:
196 | collapsed: false
197 | Height: 1691
198 | Hide Left Dock: false
199 | Hide Right Dock: false
200 | QMainWindow State: 000000ff00000000fd0000000400000000000003ee000005cffc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006d00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000056000005cf000000f100fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006500000003ce0000016d0000000000000000fb0000000a0049006d00610067006501000004f80000012d00000000000000000000000100000131000005cffc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a005600690065007700730100000056000005cf000000cb00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e1000001970000000300000b4000000048fc0100000002fb0000000800540069006d0065010000000000000b40000003a400fffffffb0000000800540069006d006501000000000000045000000000000000000000060f000005cf00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
201 | Selection:
202 | collapsed: false
203 | Time:
204 | collapsed: false
205 | Tool Properties:
206 | collapsed: false
207 | Views:
208 | collapsed: false
209 | Width: 2880
210 | X: 0
211 | Y: 31
212 |
--------------------------------------------------------------------------------
/yolox_people_detection/scripts/yolox_person_detection_node:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import os
4 | import sys
5 |
6 | # ROS
7 | import rospy
8 | import numpy as np
9 | import math
10 | #import time
11 | import statistics
12 | from torch import tensor
13 | import cv2
14 | from cv_bridge import CvBridge, CvBridgeError
15 | from yolox.exp import get_exp
16 | from clf_object_recognition_yolox import recognizer, util
17 | from clf_object_recognition_msgs.srv import Detect2D, Detect2DImage
18 |
19 | from sensor_msgs.msg import Image as ImgMsg
20 |
21 | from dynamic_reconfigure.server import Server
22 | from clf_object_recognition_cfg.cfg import YoloxConfig ##?
23 | ########
24 | import message_filters
25 | from sensor_msgs.msg import Image, CameraInfo, PointCloud
26 | ########
27 | import std_msgs.msg
28 | from geometry_msgs.msg import PoseArray, Pose
29 | from image_geometry import cameramodels
30 |
31 | class YoloxPeopleNode():
32 | def __init__(self, checkpoint, exp_path, config, camera_info_path='/xtion/depth_registered/camera_info' , image_path= '/xtion/rgb/image_raw', depth_path='/xtion/depth_registered/image_raw', classid = 0, depth_scaling = 1,variance_scaling=10):
33 | rospy.loginfo(logger_name="YoloxPeopleNode", msg="initializing:")
34 | rospy.loginfo(logger_name="YoloxPeopleNode", msg=f" - checkpoint={checkpoint}")
35 | rospy.loginfo(logger_name="YoloxPeopleNode", msg=f" - exp_path={exp_path}")
36 | self.pub = rospy.Publisher('~poses', PoseArray, queue_size=1)
37 | self.classid = classid
38 | self.depth_scaling = depth_scaling
39 |
40 | self.exp = get_exp(exp_path,"")
41 | if "conf" in config:
42 | self.exp.test_conf = config["conf"]
43 |
44 | self.recognizer = recognizer.Recognizer(checkpoint, self.exp)
45 |
46 | self._bridge = CvBridge()
47 | image_sub = message_filters.Subscriber(image_path, Image)
48 | info_sub = message_filters.Subscriber(camera_info_path, CameraInfo)
49 | depth_sub = message_filters.Subscriber(depth_path, Image)
50 | self.ts = message_filters.ApproximateTimeSynchronizer([image_sub, info_sub, depth_sub], 1, 1)
51 | self.ts.registerCallback(self.callback)
52 |
53 | self.camera = cameramodels.PinholeCameraModel()
54 |
55 | def callback(self, image, camera_info, depth_raw):
56 | variance_scaling=10#muss noch in den methodenkopf
57 | #print("got images")
58 | try:
59 | img = self._bridge.imgmsg_to_cv2(image, "bgr8")
60 | depth = self._bridge.imgmsg_to_cv2(depth_raw, "passthrough")
61 | if(depth.dtype==np.float32):
62 | self.depth_scaling=1
63 | elif(depth.dtype==np.uint16):
64 | self.depth_scaling=0.001
65 | else:
66 | self.depth_scaling=1
67 | except CvBridgeError as e:
68 | error_msg = "Could not convert to opencv image: %s" % e
69 | rospy.logerr(logger_name="YoloxPeopleNode", msg=error_msg)
70 | raise Exception(error_msg)
71 | (cls, scores, bboxes) = self.recognizer.inference(img)
72 |
73 | self.camera.fromCameraInfo(camera_info)
74 | constant_y = self.depth_scaling / self.camera.fy()
75 | constant_x = self.depth_scaling / self.camera.fx()
76 |
77 | msg = PoseArray()
78 | msg.header = image.header
79 | for c, score, box in zip(cls, scores, bboxes):
80 | #zeitanfang = time.time()
81 | if(c == self.classid and score >0.5):
82 | h = (box[3] - box[1])
83 | w= (box[2] - box[0])
84 | yc= ((box[1] + box[3])/2).item()
85 | xc = ((box[0] + box[2])/2).item()
86 | #mean_point_depth= (depth[int(yc)][int(xc)])
87 | # direction for the pointcross
88 | height_direction=1
89 | wide_direction=1
90 | h_area=h//variance_scaling
91 | w_area=w//variance_scaling
92 | if h_area<2:
93 | h_area=2
94 | if w_area<2:
95 | w_area=2
96 | mean_circle_depth_list=[]
97 | z = (depth[int(yc)][int(xc)]) * self.depth_scaling
98 | if not ((z==0) or ( np.isnan(z))):
99 | x = (xc - self.camera.cx()) * (depth[int(yc)][int(xc)]) * constant_x
100 | y = (yc - self.camera.cy()) * (depth[int(yc)][int(xc)]) * constant_y
101 | dis= math.sqrt((x**2)+(y**2)+(z**2))
102 | mean_circle_depth_list.append([x,y,z,dis])
103 | mean_circle_depth=dis
104 | #count=4#how many points should be generated
105 | for i in range(4):
106 | if i%2 == 0: #even height
107 | xd=xc
108 | yd=(yc+(np.random.randint(1, h_area)))*height_direction
109 | height_direction = height_direction*-1
110 | else: #odd wide
111 | xd=(xc+(np.random.randint(1, w_area)))*wide_direction
112 | yd=yc
113 | wide_direction = wide_direction *-1
114 | z = (depth[int(yd)][int(xd)]) * self.depth_scaling
115 | if not ((z==0) or (np.isnan(z))):
116 | x = (xd - self.camera.cx()) * (depth[int(yd)][int(xd)]) * constant_x
117 | y = (yd - self.camera.cy()) * (depth[int(yd)][int(xd)]) * constant_y
118 | dis= math.sqrt((x**2)+(y**2)+(z**2))
119 | mean_circle_depth_list.append([x,y,z, dis])
120 | #if (np.isnan(z)):
121 | # print("dead end")
122 | #mean_circle_depth=np.median(mean_circle_depth_list.T[3])
123 | #mean_circle_depth=np.mean(mean_circle_depth_list.T[3])
124 | #remove the outlier if there is one
125 | #while(abs(np.min(mean_circle_depth_list.T[3])-mean_circle_depth)>1 or abs(np.max(mean_circle_depth_list.T[3])-mean_circle_depth)>1):
126 | #for i in range(int((abs(np.min(mean_circle_depth_list.T[3])-mean_circle_depth)-1+abs(np.max(mean_circle_depth_list.T[3])-mean_circle_depth)-1)/2)):
127 | # argmin=np.argmin(mean_circle_depth_list.T[3])
128 | # argmax=np.argmax(mean_circle_depth_list.T[3])
129 | # if not (int(mean_circle_depth_list[argmin,3])==int(mean_circle_depth_list[argmax,3])): #check if there are near to each other
130 | # depthmin=math.fabs(mean_circle_depth_list[argmin,3])
131 | # depthmax= math.fabs(mean_circle_depth_list[argmax,3])
132 | # if(depthmindepthmax):
135 | # mean_circle_depth_list=np.delete(mean_circle_depth_list, argmin, 0)
136 | # else:
137 | # if dislist.size>2:
138 | # mean_circle_depth_list=np.delete(mean_circle_depth_list, argmin, 0)
139 | # mean_circle_depth_list=np.delete(mean_circle_depth_list, argmax, 0)
140 | # else:
141 | # break
142 | # mean_circle_depth=np.mean(mean_circle_depth_list.T[3])
143 |
144 | # else:
145 | # print("problem", (mean_circle_depth_list[argmin]), (mean_circle_depth_list[argmax]))
146 | # break
147 | mean_circle_depth_list=np.array(mean_circle_depth_list)
148 | try:
149 | if (((mean_circle_depth_list.size))>=4):
150 | #mean_circle_depth=statistics.median(mean_circle_depth_list.T[3])
151 | idx = (np.abs(mean_circle_depth_list.T[3] - statistics.median(mean_circle_depth_list.T[3]))).argmin()
152 | #print(idx, mean_circle_depth,"final vector",mean_circle_depth_list[idx])
153 | rospy.logdebug(logger_name="YoloxPeopleNode", msg=f"human: {mean_circle_depth_list[idx][1]}, {mean_circle_depth_list[idx][0]}, {mean_circle_depth_list[idx][2]}")
154 | pose = Pose()
155 | pose.position.x = mean_circle_depth_list[idx][0]
156 | pose.position.y = mean_circle_depth_list[idx][1]
157 | pose.position.z = mean_circle_depth_list[idx][2]
158 | pose.orientation.w = 1
159 | if not (pose.position.z == 0):
160 | msg.poses.append(pose)
161 | except:
162 | print("Warning:", mean_circle_depth_list)
163 | #zeitende = time.time()
164 | #print("Dauer Programmausführung:",)
165 | #print(zeitende-zeitanfang)
166 | self.pub.publish(msg)
167 |
168 | if __name__ == '__main__':
169 |
170 | # Start ROS node
171 | rospy.init_node('yolox_people_tracker')
172 |
173 | try:
174 | _checkpoint = os.path.expanduser(rospy.get_param("~checkpoint"))
175 | _exp_path = os.path.expanduser(rospy.get_param("~exp"))
176 | _config = rospy.get_param("~")
177 | _camera_info = rospy.get_param("~camera_info")
178 | _img = rospy.get_param("~img")
179 | _depth = rospy.get_param("~depth")
180 | _cid = rospy.get_param("~classid")
181 | _depth_scaling = rospy.get_param("~depth_scaling")
182 | #_variance_scaling = 10#rospy.get_param("~variance_scaling")
183 | except KeyError as e:
184 | rospy.logerr(logger_name="YoloxPeopleNode", msg="Parameter %s not found" % e)
185 | sys.exit(1)
186 |
187 | node = YoloxPeopleNode(_checkpoint, _exp_path, _config, _camera_info, _img, _depth, _cid, _depth_scaling, 10)
188 |
189 | rospy.loginfo(logger_name="YoloxPeopleNode", msg="\nyolox person detection running")
190 |
191 | rospy.spin()
192 |
--------------------------------------------------------------------------------
/bayes_people_tracker/scripts/test_publisher.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from geometry_msgs.msg import PoseArray, Pose
6 | from people_msgs.msg import People, Person
7 | from math import pi, sin, cos, sqrt
8 | from threading import Thread, RLock
9 | from random import randint, random
10 | from numpy import random as nprnd
11 | from collections import defaultdict
12 |
13 | class Walker(Thread):
14 | def __init__(
15 | self, name,
16 | start_coords=[0, 0],
17 | arena=[-10, -10, 10, 10]
18 | ):
19 | Thread.__init__(self, name=name)
20 | self.start_coords = start_coords
21 | self.rate = 50.0
22 | self.position = list(self.start_coords)
23 | self.arena = arena
24 | self.lock = RLock()
25 | self.pub = rospy.Publisher(
26 | 'tracker_tester/ground_truth/%s' % name,
27 | PoseArray,
28 | queue_size=10
29 | )
30 |
31 | def _crop(self):
32 | self.position[0] = (
33 | self.position[0]
34 | if self.position[0] <= self.arena[2]
35 | else self.arena[2]
36 | )
37 | self.position[1] = (
38 | self.position[1]
39 | if self.position[1] <= self.arena[3]
40 | else self.arena[3]
41 | )
42 | self.position[0] = (
43 | self.position[0]
44 | if self.position[0] >= self.arena[0]
45 | else self.arena[0]
46 | )
47 | self.position[1] = (
48 | self.position[1]
49 | if self.position[1] >= self.arena[1]
50 | else self.arena[1]
51 | )
52 |
53 | def _update(self):
54 | pass
55 |
56 | def run(self):
57 | rate = rospy.Rate(self.rate)
58 | while not rospy.is_shutdown():
59 |
60 | with self.lock:
61 | self._update()
62 | self._crop()
63 |
64 | person = self.get_pose()
65 | ppl = PoseArray()
66 | ppl.header.frame_id = '/base_link'
67 | ppl.header.stamp = rospy.Time.now()
68 | ppl.poses.append(person)
69 | self.pub.publish(ppl)
70 | rate.sleep()
71 |
72 | def get_pose(self):
73 | pose = Pose()
74 | with self.lock:
75 | pose.position.x = self.position[0]
76 | pose.position.y = self.position[1]
77 | pose.orientation.w = 1.0
78 | return pose
79 |
80 | def get_person(self):
81 | person = Person()
82 | person.name = self.name
83 | with self.lock:
84 | person.position.x = self.position[0]
85 | person.position.y = self.position[1]
86 | return person
87 |
88 |
89 | class CircularWalker(Walker):
90 | def __init__(
91 | self, name,
92 | start_coords=[0, 0],
93 | arena=[-5, -5, 5, 5],
94 | omega_deg=5, scale=[2, 1]
95 | ):
96 | Walker.__init__(
97 | self, name, start_coords, arena)
98 | self.omega = omega_deg / self.rate
99 | self.angle = randint(0, 359)
100 | self.scale = scale
101 |
102 | def _update(self):
103 | self.angle += self.omega
104 | if self.angle >= 360:
105 | self.angle = 0
106 | self.position[0] = (
107 | self.start_coords[0] +
108 | cos(self.angle * pi / 180.0) * self.scale[0])
109 | self.position[1] = (
110 | self.start_coords[1] +
111 | sin(self.angle * pi / 180.0) * self.scale[1])
112 |
113 |
114 | class LinearWalker(Walker):
115 | def __init__(
116 | self, name,
117 | start_coords=[0, 0],
118 | arena=[-10, -10, 10, 10],
119 | speed=.4, dirchange_prob=.1
120 | ):
121 | Walker.__init__(
122 | self, name, start_coords, arena)
123 | self.speed = speed / self.rate
124 | self.dirchange_prob = dirchange_prob / self.rate
125 | self._newdir()
126 |
127 | def _newdir(self):
128 | dir = [random() - .5, random() - .5]
129 | l = sqrt((dir[0] ** 2) + (dir[1] ** 2))
130 |
131 | self.direction = [
132 | dir[0] / l,
133 | dir[1] / l
134 | ]
135 |
136 | def _update(self):
137 | if (random() <= self.dirchange_prob):
138 | rospy.loginfo("%s changes direction" % self.name)
139 | self._newdir()
140 |
141 | self.position[0] += self.direction[0] * self.speed
142 | self.position[1] += self.direction[1] * self.speed
143 |
144 |
145 | class Sensor(Thread):
146 | def __init__(
147 | self, name, walkers,
148 | anonymous=True,
149 | noise=[.4, .4],
150 | avg_rate=10, std_rate=2,
151 | prob_switch_visible=0.0,
152 | prob_visible=1.0,
153 | delayed_start=0.0
154 | ):
155 | Thread.__init__(self, name=name)
156 | self.noise = noise
157 | self.avg_rate = avg_rate
158 | self.std_rate = std_rate
159 | self.anonymous = anonymous
160 | self.prob_switch_visible = prob_switch_visible / self.avg_rate
161 | self.prob_visible = prob_visible
162 | self.visible = defaultdict(lambda: random() < self.prob_visible)
163 | self.walkers = walkers
164 | self.delayed_start = delayed_start
165 | self.pub_poses = rospy.Publisher(
166 | 'tracker_tester/sensor/pose_array/%s' % name,
167 | PoseArray,
168 | queue_size=10
169 | )
170 | self.pub_people = rospy.Publisher(
171 | 'tracker_tester/sensor/people/%s' % name,
172 | People,
173 | queue_size=10
174 | )
175 |
176 | def run(self):
177 | self.start_time = rospy.Time.now()
178 | while not rospy.is_shutdown():
179 | if (
180 | rospy.Time.now() - self.start_time <
181 | rospy.Duration(self.delayed_start)
182 | ):
183 | rospy.sleep(0.1)
184 | continue
185 | poses = []
186 | persons = []
187 |
188 | for w in self.walkers:
189 | if random() < self.prob_switch_visible:
190 | self.visible[w.name] = (random() < self.prob_visible)
191 | if self.visible[w.name]:
192 | rospy.loginfo(
193 | "switch sensor %s to see %s" % (self.name, w.name))
194 | else:
195 | rospy.loginfo(
196 | "switch sensor %s to NOT see %s" % (self.name, w.name))
197 | if self.visible[w.name]:
198 | ps = w.get_pose()
199 | ps.position.x += nprnd.randn() * self.noise[0]
200 | ps.position.y += nprnd.randn() * self.noise[1]
201 | poses.append(ps)
202 | p = w.get_person()
203 | p.position.x = ps.position.x
204 | p.position.y = ps.position.y
205 | if self.anonymous:
206 | p.name = ''
207 | persons.append(p)
208 |
209 | pa = PoseArray()
210 | pa.header.frame_id = '/base_link'
211 | pa.header.stamp = rospy.Time.now()
212 | pa.poses = poses
213 | self.pub_poses.publish(pa)
214 |
215 | ppl = People()
216 | ppl.header.frame_id = '/base_link'
217 | ppl.header.stamp = rospy.Time.now()
218 | ppl.people = persons
219 | self.pub_people.publish(ppl)
220 |
221 | sleep_rate = (
222 | self.avg_rate + nprnd.randn() * self.std_rate)
223 | sleep_rate = sleep_rate if sleep_rate > .1 else .1
224 | rospy.sleep(1.0 / sleep_rate)
225 |
226 |
227 | def talker():
228 | rospy.init_node('tracker_tester', anonymous=True)
229 |
230 | walkers = [
231 | CircularWalker('marc', start_coords=[5, 5], omega_deg=10),
232 | LinearWalker('vicky', start_coords=[0, 0]),
233 | LinearWalker('greg', start_coords=[2, 2])
234 | ]
235 |
236 | for w in walkers:
237 | w.start()
238 |
239 | Sensor(
240 | 'marvelmind', walkers[:2], # not all walkers of this sensor
241 | anonymous=False, # this sensor provides IDs
242 | noise=[.2, .2], # cartesian noise
243 | delayed_start=0, # start only after N seconds
244 | prob_visible=.9, # probability of sensor being
245 | # on when switched
246 | prob_switch_visible=.2, # prob of switching (per sec)
247 | avg_rate=1, std_rate=.1).start() # avg/std of publish rate
248 | Sensor(
249 | 'serhan', walkers,
250 | anonymous=True,
251 | noise=[.04, .04],
252 | delayed_start=0,
253 | avg_rate=20, std_rate=2,
254 | prob_visible=.2,
255 | prob_switch_visible=.2).start()
256 | Sensor(
257 | 'gps', walkers,
258 | anonymous=False,
259 | noise=[1, 1],
260 | delayed_start=0,
261 | avg_rate=1, std_rate=.2,
262 | prob_visible=.5,
263 | prob_switch_visible=.1).start()
264 |
265 | rospy.spin()
266 |
267 | # rate = rospy.Rate(10)
268 | # angle = 0
269 | # angle_step = .5
270 | # scale = 2.0
271 |
272 | # while not rospy.is_shutdown():
273 | # angle += angle_step
274 | # if angle > 360:
275 | # angle = 0
276 | # pose_array = PoseArray()
277 | # pose_array.header.frame_id = '/map'
278 | # pose_array.header.stamp = rospy.Time.now()
279 |
280 | # current_pose = Pose()
281 | # current_pose.position.x = cos(angle * pi / 180.0) * scale
282 | # current_pose.position.y = sin(angle * pi / 180.0) * scale
283 | # current_pose.orientation.w = 1.0
284 | # pose_array.poses.append(current_pose)
285 |
286 | # people = People()
287 | # people.header.frame_id = '/base_link'
288 | # people.header.stamp = rospy.Time.now()
289 |
290 | # person = Person()
291 | # person.position = current_pose.position
292 | # person.name = 'hurga'
293 | # person.reliability = 1.0
294 | # people.people.append(person)
295 |
296 | # poses_pub.publish(pose_array)
297 | # people_pub.publish(people)
298 | # rate.sleep()
299 |
300 | if __name__ == '__main__':
301 | try:
302 | talker()
303 | except rospy.ROSInterruptException:
304 | pass
305 |
--------------------------------------------------------------------------------
/bayes_people_tracker/include/bayes_people_tracker/simple_tracking.h:
--------------------------------------------------------------------------------
1 | /***************************************************************************
2 | * Copyright (C) 2011 by Nicola Bellotto *
3 | * nbellotto@lincoln.ac.uk *
4 | * *
5 | * This program is free software; you can redistribute it and/or modify *
6 | * it under the terms of the GNU General Public License as published by *
7 | * the Free Software Foundation; either version 2 of the License, or *
8 | * (at your option) any later version. *
9 | * *
10 | * This program is distributed in the hope that it will be useful, *
11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13 | * GNU General Public License for more details. *
14 | * *
15 | * You should have received a copy of the GNU General Public License *
16 | * along with this program; if not, write to the *
17 | * Free Software Foundation, Inc., *
18 | * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19 | ***************************************************************************/
20 |
21 | #ifndef SIMPLE_TRACKING_H
22 | #define SIMPLE_TRACKING_H
23 |
24 | #include
25 | #include
26 | #include
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 | #include
34 | #include
35 | #include
36 | #include
37 |
38 | using namespace std;
39 | using namespace MTRK;
40 | using namespace Models;
41 |
42 | // rule to detect lost track
43 | template
44 | bool MTRK::isLost(const FilterType* filter, double stdLimit) {
45 | //ROS_INFO("var_x: %f, var_y: %f",filter->X(0,0), filter->X(2,2));
46 | // track lost if var(x)+var(y) > stdLimit^2
47 | if(filter->X(0,0) + filter->X(2,2) > sqr(stdLimit))
48 | return true;
49 | return false;
50 | }
51 |
52 | // rule to create new track
53 | template
54 | bool MTRK::initialize(FilterType* &filter, sequence_t& obsvSeq, observ_model_t om_flag) {
55 | assert(obsvSeq.size());
56 |
57 | if(om_flag == CARTESIAN) {
58 | double dt = obsvSeq.back().time - obsvSeq.front().time;
59 | if(dt == 0) return false;
60 | //assert(dt); // dt must not be null
61 |
62 | FM::Vec v((obsvSeq.back().vec - obsvSeq.front().vec) / dt);
63 |
64 | FM::Vec x(4);
65 | FM::SymMatrix X(4,4);
66 |
67 | x[0] = obsvSeq.back().vec[0];
68 | x[1] = v[0];
69 | x[2] = obsvSeq.back().vec[1];
70 | x[3] = v[1];
71 | X.clear();
72 | X(0,0) = sqr(0.2);
73 | X(1,1) = sqr(1.0);
74 | X(2,2) = sqr(0.2);
75 | X(3,3) = sqr(1.0);
76 |
77 | filter = new FilterType(4);
78 | filter->init(x, X);
79 | }
80 |
81 | if(om_flag == POLAR) {
82 | double dt = obsvSeq.back().time - obsvSeq.front().time;
83 | if(dt == 0) return false;
84 | //assert(dt); // dt must not be null
85 | double x2 = obsvSeq.back().vec[1]*cos(obsvSeq.back().vec[0]);
86 | double y2 = obsvSeq.back().vec[1]*sin(obsvSeq.back().vec[0]);
87 | double x1 = obsvSeq.front().vec[1]*cos(obsvSeq.front().vec[0]);
88 | double y1 = obsvSeq.front().vec[1]*sin(obsvSeq.front().vec[0]);
89 |
90 | FM::Vec x(4);
91 | FM::SymMatrix X(4,4);
92 |
93 | x[0] = x2;
94 | x[1] = (x2-x1)/dt;
95 | x[2] = y2;
96 | x[3] = (y2-y1)/dt;
97 | X.clear();
98 | X(0,0) = sqr(0.5);
99 | X(1,1) = sqr(1.5);
100 | X(2,2) = sqr(0.5);
101 | X(3,3) = sqr(1.5);
102 |
103 | filter = new FilterType(4);
104 | filter->init(x, X);
105 | }
106 |
107 | return true;
108 | }
109 |
110 | template
111 | class SimpleTracking
112 | {
113 | public:
114 | SimpleTracking(double sLimit = 1.0, bool prune_named_tracks = false) {
115 | time = getTime();
116 | observation = new FM::Vec(2);
117 | stdLimit = sLimit;
118 | prune_named = prune_named_tracks;
119 | }
120 |
121 | void createConstantVelocityModel(double vel_noise_x, double vel_noise_y) {
122 | cvm = new CVModel(vel_noise_x, vel_noise_y);
123 | }
124 |
125 | void addDetectorModel(std::string name, association_t alg, observ_model_t om_flag, double pos_noise_x, double pos_noise_y, unsigned int seqSize = 5, double seqTime = 0.2) {
126 | ROS_INFO("Adding detector model for: %s.", name.c_str());
127 | detector_model det;
128 | det.om_flag = om_flag;
129 | det.alg = alg;
130 | if(om_flag == CARTESIAN)
131 | det.ctm = new CartesianModel(pos_noise_x, pos_noise_y);
132 | if(om_flag == POLAR)
133 | det.plm = new PolarModel(pos_noise_x, pos_noise_y);
134 | det.seqSize = seqSize;
135 | det.seqTime = seqTime;
136 | detectors[name] = det;
137 | }
138 |
139 | std::map > track(double* track_time, std::map& tags) {
140 | boost::mutex::scoped_lock lock(mutex);
141 | std::map > result;
142 | dt = getTime() - time;
143 | time += dt;
144 | if(track_time) *track_time = time;
145 |
146 | // This function is only for the prediction step of the tracker. Thus, the update step is not performed here
147 | // Since the update step is related to observations (from detectors), it is performed in addObservation function, when a new observation comes
148 | // for(typename std::map::const_iterator it = detectors.begin();
149 | // it != detectors.end();
150 | // ++it) {
151 | // // prediction
152 | // cvm->update(dt);
153 | // mtrk.template predict(*cvm);
154 |
155 | // // process observations (if available) and update tracks
156 | // mtrk.process(*(it->second.ctm), it->second.alg);
157 | //}
158 | // prediction
159 | cvm->update(dt);
160 | mtrk.template predict(*cvm);
161 | //detector_model dummy_det;
162 | //mtrk.process(*(dummy_det.ctm));
163 | mtrk.pruneTracks(stdLimit);
164 | if (prune_named) {
165 | mtrk.pruneNamedTracks();
166 | }
167 |
168 | for (int i = 0; i < mtrk.size(); i++) {
169 | double theta = atan2(mtrk[i].filter->x[3], mtrk[i].filter->x[1]);
170 | }
171 |
172 | for (int i = 0; i < mtrk.size(); i++) {
173 | double theta = atan2(mtrk[i].filter->x[3], mtrk[i].filter->x[1]);
174 | ROS_DEBUG("trk_%ld: Position: (%f, %f), Orientation: %f, Std Deviation: %f, %f",
175 | mtrk[i].id,
176 | mtrk[i].filter->x[0], mtrk[i].filter->x[2], //x, y
177 | theta, //orientation
178 | sqrt(mtrk[i].filter->X(0,0)), sqrt(mtrk[i].filter->X(2,2))//std dev
179 | );
180 | geometry_msgs::Pose pose, vel, var; // position, velocity, variance
181 | pose.position.x = mtrk[i].filter->x[0];
182 | pose.position.y = mtrk[i].filter->x[2];
183 | pose.orientation.z = sin(theta/2);
184 | pose.orientation.w = cos(theta/2);
185 | result[mtrk[i].id].push_back(pose);
186 |
187 | vel.position.x = mtrk[i].filter->x[1];
188 | vel.position.y = mtrk[i].filter->x[3];
189 | result[mtrk[i].id].push_back(vel);
190 |
191 | var.position.x = mtrk[i].filter->X(0,0);
192 | var.position.y = mtrk[i].filter->X(2,2);
193 | result[mtrk[i].id].push_back(var);
194 | tags[mtrk[i].id] = mtrk[i].tag;
195 | }
196 | return result;
197 | }
198 |
199 | void addObservation(std::string detector_name, std::vector obsv, double obsv_time, std::vector tags) {
200 | boost::mutex::scoped_lock lock(mutex);
201 | ROS_DEBUG("Adding new observations for detector: %s", detector_name.c_str());
202 | // add last observation/s to tracker
203 | detector_model det;
204 | try {
205 | det = detectors.at(detector_name);
206 | } catch (std::out_of_range &exc) {
207 | ROS_ERROR("Detector %s was not registered!", detector_name.c_str());
208 | return;
209 | }
210 |
211 | dt = getTime() - time;
212 | time += dt;
213 |
214 | // prediction
215 | cvm->update(dt);
216 | mtrk.template predict(*cvm);
217 |
218 | // mtrk.process(*(det.ctm), det.alg);
219 |
220 | int count = 0;
221 | for(std::vector::iterator li = obsv.begin(); li != obsv.end(); ++li) {
222 | if(det.om_flag == CARTESIAN) {
223 | (*observation)[0] = li->x;
224 | (*observation)[1] = li->y;
225 | }
226 | if(det.om_flag == POLAR) {
227 | (*observation)[0] = atan2(li->y, li->x); // bearing
228 | (*observation)[1] = sqrt(pow(li->x,2) + pow(li->y,2)); // range
229 | }
230 | if (count < tags.size())
231 | mtrk.addObservation(*observation, obsv_time, tags[count]);
232 | else
233 | mtrk.addObservation(*observation, obsv_time);
234 | count++;
235 | }
236 |
237 | if(det.om_flag == CARTESIAN) {
238 | mtrk.process(*(det.ctm), det.alg, det.seqSize, det.seqTime, stdLimit, det.om_flag);
239 | }
240 | if(det.om_flag == POLAR) {
241 | //det.plm->update(robot_pose.position.x, robot_pose.position.y, robot_pose.orientation.w);
242 | det.plm->update(0, 0, 0);
243 | mtrk.process(*(det.plm), det.alg, det.seqSize, det.seqTime, stdLimit, det.om_flag);
244 | }
245 | count++;
246 | }
247 |
248 | private:
249 |
250 | FM::Vec *observation; // observation [x, y]
251 | double dt, time;
252 | boost::mutex mutex;
253 | CVModel *cvm; // CV model
254 | MultiTracker mtrk; // state [x, v_x, y, v_y]
255 | double stdLimit;
256 | bool prune_named; // upper limit for the variance of estimation position
257 |
258 | typedef struct {
259 | CartesianModel *ctm; // Cartesian observation model
260 | PolarModel *plm; // Polar observation model
261 | observ_model_t om_flag; // Observation model flag
262 | association_t alg; // Data association algorithm
263 | unsigned int seqSize; // Minimum number of observations for new track creation
264 | double seqTime; // Minimum interval between observations for new track creation
265 | } detector_model;
266 | std::map detectors;
267 |
268 | double getTime() {
269 | return ros::Time::now().toSec();
270 | }
271 | };
272 | #endif //SIMPLE_TRACKING_H
273 |
--------------------------------------------------------------------------------
/bayes_people_tracker/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
2 | Changelog for package bayes_people_tracker
3 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
4 |
5 | 1.8.0 (2018-10-01)
6 | ------------------
7 | * WIP: working towards tag-aware tracking (`#221 `_)
8 | * WIP
9 | * WIP
10 | * wip
11 | * good testing node
12 | * WIP
13 | * Fixed count
14 | * WIP
15 | * more documentation and testing
16 | * colors for variances
17 | * added rviz config
18 | * prune_named added
19 | * legacy param support
20 | * Contributors: Marc Hanheide
21 |
22 | 1.7.0 (2018-09-04)
23 | ------------------
24 | * Adapting to changes in bayestracking (`#220 `_)
25 | * Adapted to scosar/bayestracking with 2D Polar Model and Added option to select Particle Filter
26 | * random fixes
27 | * Adapted to backward-compatibility changes of bayestracking
28 | * Namespaces and topics specified as parameters. (`#218 `_)
29 | * Merged with ENRICHME branch. Parametrized topics/frame_ids
30 | * Undone the timestamp change
31 | Hi,
32 | It was a careless change. I originally thought that the timestamp should reflect time creation of the data, but it also makes sense the way you made it plus it's better not to change the meaning of a field now.
33 | * Reverted changes on original config.
34 | * Update detector.h
35 | * Default initialization on constructor
36 | * Update KConnectedComponentLabeler.cpp
37 | * Update KConnectedComponentLabeler.cpp
38 | * Update detector.cpp
39 | * Suggested change in data interpretation.
40 | Still pending to check that 0 corresponds to NaN in ushort
41 | * Bug corrected.
42 | * commented duplicate code
43 | * Contributors: Manuel Fernandez-Carmona, scosar
44 |
45 | 1.6.0 (2017-09-01)
46 | ------------------
47 | * changelogs
48 | * changed from cdondrup to marc
49 | * Contributors: Marc Hanheide
50 |
51 | 1.5.5 (2017-07-02)
52 | ------------------
53 | * updated changelogs
54 | * Contributors: Jenkins
55 |
56 | 1.5.4 (2016-11-03)
57 | ------------------
58 | * updated changelogs
59 | * Contributors: Jenkins
60 |
61 | 1.5.3 (2016-07-04)
62 | ------------------
63 | * updated changelogs
64 | * Contributors: Jenkins
65 |
66 | 1.5.2 (2016-07-02 20:52)
67 | ------------------------
68 | * updated changelogs
69 | * Contributors: Jenkins
70 |
71 | 1.5.1 (2016-07-02 17:54)
72 | ------------------------
73 | * updated changelogs
74 | * added mobility aid detection config (`#201 `_)
75 | to replace `#200 `_
76 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into people_sitting_feature
77 | * Merge pull request `#192 `_ from hawesie/indigo-devel
78 | Compilation fix for OS X.
79 | * Compilation fix for OS X.
80 | Removed unnecessary d suffix for double.
81 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into people_sitting_feature
82 | * Contributors: Ferdian Jovan, Jenkins, Lucas Beyer, Marc Hanheide, Nick Hawes, ferdianjovan
83 |
84 | 1.5.0 (2016-03-15)
85 | ------------------
86 | * updated changelogs
87 | * Contributors: Jenkins
88 |
89 | 1.4.0 (2016-02-17)
90 | ------------------
91 | * updated changelogs
92 | * Contributors: Jenkins
93 |
94 | 1.3.1 (2016-02-11)
95 | ------------------
96 | * updated changelogs
97 | * Merge remote-tracking branch 'upstream/indigo-devel' into indigo-devel
98 | * Contributors: Alexander Hermans, Jenkins
99 |
100 | 1.3.0 (2016-02-01)
101 | ------------------
102 | * updated changelogs
103 | * Contributors: Jenkins
104 |
105 | 1.2.1 (2016-01-28)
106 | ------------------
107 | * updated changelogs
108 | * Contributors: Jenkins
109 |
110 | 1.2.0 (2015-11-11)
111 | ------------------
112 | * updated changelogs
113 | * Merge pull request `#182 `_ from cdondrup/ppl_tracker_filter
114 | Adding a filter for the people tracker based on a map
115 | * Adding missing tf include
116 | * Renaming directories in bayes_people_tracker. Before this the header files were not installed correctly.
117 | Also creating a a new cals dedictaed to creating the human marker for rviz. Will be used in people_tracker_filter.
118 | * Merge pull request `#169 `_ from cdondrup/velo_message
119 | Adding the velocity of detect people to PeopleTracker message
120 | * Adding mygrate.py and missing install targets.
121 | * Adding rule to migrate rosbags to new message format
122 | * Adding velocities of detected people as a geometry_msgs/Vector3 to PeopleTracker message
123 | * Contributors: Christian Dondrup, Jenkins, Marc Hanheide
124 |
125 | 1.1.8 (2015-09-03)
126 | ------------------
127 | * updated changelogs
128 | * Contributors: Jenkins
129 |
130 | 1.1.7 (2015-08-25)
131 | ------------------
132 |
133 | 1.1.6 (2015-06-24)
134 | ------------------
135 | * updated changelogs
136 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
137 | * 1.1.5
138 | * updated changelogs
139 | * 1.1.4
140 | * updated changelogs
141 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
142 | * Contributors: Ferdian Jovan, Jenkins
143 |
144 | 1.1.5 (2015-05-22)
145 | ------------------
146 | * updated changelogs
147 | * Contributors: Jenkins
148 |
149 | 1.1.4 (2015-05-10)
150 | ------------------
151 | * updated changelogs
152 | * Contributors: Jenkins
153 |
154 | 1.1.3 (2015-04-10)
155 | ------------------
156 | * updated changelogs
157 | * Merge branch 'indigo-devel' of http://github.com/strands-project/strands_perception_people into topolog
158 | Conflicts:
159 | bayes_people_tracker_logging/launch/logging.launch
160 | * Contributors: Christian Dondrup, Jenkins
161 |
162 | 1.1.2 (2015-04-07)
163 | ------------------
164 | * updated changelogs
165 | * Contributors: Jenkins
166 |
167 | 1.1.1 (2015-04-03)
168 | ------------------
169 | * updated changelogs
170 | * Contributors: Jenkins
171 |
172 | 1.1.0 (2015-04-02)
173 | ------------------
174 | * Merge pull request `#157 `_ from cdondrup/respawn
175 | Adding respawn flags
176 | * Adding respawn flags
177 | Closes `#152 `_
178 | Bad workaround for `#156 `_ and `#76 `_
179 | * Merge pull request `#153 `_ from cdondrup/distance_fix
180 | [bayes_people_tracker] Fixing a bug in calculation of distances and angles
181 | * Stupid mistake in if statement
182 | * Actually using the transformed values helps when calculating the distance.
183 | Cleaning up unused code fragments.
184 | * Fixed a bug where the min_distance was calculated for the target frame instead of base_link.
185 | * restore detectors.yaml in bayes_people_tracker
186 | * replacing time with number of poses as suggested by Nick
187 | * Contributors: Christian Dondrup, Ferdian Jovan, Jaime Pulido Fentanes
188 |
189 | 1.0.0 (2015-03-10)
190 | ------------------
191 | * Updating changelogs.
192 | * Merge pull request `#147 `_ from cdondrup/pose_array
193 | Restructuring tracker parameters, adding Unscented Kalman filter
194 | * Nicer print
195 | * Adding ability to switch between Extended and Unscented Kalman Filter
196 | * Making simple_tracking template based.
197 | * Changed config file structure and made necessary changes to the code.
198 | * Merge pull request `#146 `_ from cdondrup/pose_array
199 | Bayes tracker visualisation improvements and making the mdl tracker optional.
200 | * Adding pose, pose_array and people publishers to connection callback.
201 | * * Publishing a pose array for all detected people to have more generic output
202 | * Added missing bayes tracker parameters to launch files and READMEs
203 | * Starting the mdl tracker is now optional when using the robot launch file. `with_mdl_tracker=true` starts the mdl tracker in addition to the bayes tracker. Default is `false`
204 | * forgot (again) to change default detector.yaml in bayes_people_tracker
205 | * adding visualization to rviz via nav_msgs/Path
206 | * Contributors: Christian Dondrup, Ferdian Jovan
207 |
208 | 0.1.4 (2015-03-06)
209 | ------------------
210 | * updated changelogs
211 | * Merge pull request `#144 `_ from cdondrup/people_msgs
212 | Publishing people_msgs/People and adding orientation.
213 | * Publishin people_msgs/People and adding orientation.
214 | * forgot to undo my config for detectors.yaml in bayes_people_tracker
215 | * provide online stitching poses into trajectories
216 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
217 | * add online trajectory construction from /people_tracker/positions
218 | * Contributors: Christian Dondrup, Ferdian Jovan, Jenkins
219 |
220 | 0.1.3 (2015-02-25)
221 | ------------------
222 | * updated changelogs
223 | * Merge branch 'indigo-devel' of https://github.com/strands-project/strands_perception_people into indigo-devel
224 | * Contributors: Ferdian Jovan, Jenkins
225 |
226 | 0.1.2 (2015-02-20)
227 | ------------------
228 |
229 | 0.1.1 (2015-02-18 18:37)
230 | ------------------------
231 | * updated changelogs
232 | * Contributors: Jenkins
233 |
234 | 0.1.0 (2015-02-18 16:59)
235 | ------------------------
236 | * Updating changelogs
237 | * Merge pull request `#130 `_ from cdondrup/fixomatic
238 | Preparing indigo-devel to be released
239 | * Setting correct version number. The changelogs will be regenerated because the ones from the release branch would not be consistent with the changes made in the devel branch.
240 | * Small bug in ros_debug statment
241 | * Changed launch files to new format.
242 | * Merge pull request `#114 `_ from cdondrup/hydro-devel
243 | Changed launch files to new format.
244 | * Changed launch files to new format.
245 | * Added proper link to paper describing bayes_tracker
246 | * Merge pull request `#105 `_ from lucasb-eyer/hydro-devel
247 | Fixing `#101 `_ (Licenses)
248 | * Added LICENSE files. Fixes `#101 `_
249 | * Merge pull request `#98 `_ from strands-project/rename
250 | Renamed strands_pedestrian_tracking to mdl_people_tracker
251 | * Renamed strands_pedestrian_tracking to mdl_people_tracker
252 | This also includes renaming the messages and most of the parameters.
253 | * Merge pull request `#97 `_ from strands-project/dependencies
254 | Release preparations
255 | * Forgot to install the config dir.
256 | * Fixed missing things
257 | * Prepared bayes_people_tracker for release.
258 | * Merge pull request `#96 `_ from cdondrup/rename
259 | Renaming most of the packages to comply with ROS naming conventions
260 | * Splitting utils package into seperate packages.
261 | * Renamed strands_people_tracker to bayes_people_tracker
262 | * Contributors: Christian Dondrup, Lucas Beyer
263 |
--------------------------------------------------------------------------------
/bayes_people_tracker/src/bayes_people_tracker/people_tracker.cpp:
--------------------------------------------------------------------------------
1 | #include "bayes_people_tracker/people_tracker.h"
2 | #include
3 | #include
4 |
5 |
6 | PeopleTracker::PeopleTracker() : detect_seq(0), marker_seq(0), m_tf_buffer(ros::Duration(30.0)), m_tf(m_tf_buffer)
7 | {
8 | ros::NodeHandle n;
9 |
10 | startup_time = ros::Time::now().toSec();
11 | startup_time_str = num_to_str(startup_time);
12 |
13 | // Declare variables that can be modified by launch file or command line.
14 | std::string pta_topic;
15 | std::string pub_topic;
16 | std::string pub_topic_pose;
17 | std::string pub_topic_pose_array;
18 | std::string pub_topic_people;
19 | std::string pub_topic_trajectory;
20 | std::string pub_marker_topic;
21 |
22 | // Initialize node parameters from launch file or command line.
23 | // Use a private node handle so that multiple instances of the node can be run simultaneously
24 | // while using different parameters.
25 | ros::NodeHandle private_node_handle("~");
26 | private_node_handle.param("target_frame", target_frame, std::string("/base_link"));
27 | private_node_handle.param("base_frame", base_frame, std::string("/base_link"));
28 | private_node_handle.param("people_array", pta_topic, std::string("/upper_body_detector/bounding_box_centres"));
29 | private_node_handle.param("tracker_frequency", tracker_frequency, double(30.0));
30 | parseParams(private_node_handle);
31 |
32 | // Create a status callback.
33 | ros::SubscriberStatusCallback con_cb = boost::bind(&PeopleTracker::connectCallback, this, boost::ref(n));
34 |
35 | private_node_handle.param("positions", pub_topic, std::string("/people_tracker/positions"));
36 | pub_detect = n.advertise(pub_topic.c_str(), 100, con_cb, con_cb);
37 | private_node_handle.param("pose", pub_topic_pose, std::string("/people_tracker/pose"));
38 | pub_pose = n.advertise(pub_topic_pose.c_str(), 100, con_cb, con_cb);
39 | private_node_handle.param("pose_array", pub_topic_pose_array, std::string("/people_tracker/pose_array"));
40 | pub_pose_array = n.advertise(pub_topic_pose_array.c_str(), 100, con_cb, con_cb);
41 | private_node_handle.param("people", pub_topic_people, std::string("/people_tracker/people"));
42 | pub_people = n.advertise(pub_topic_people.c_str(), 100, con_cb, con_cb);
43 | private_node_handle.param("trajectory", pub_topic_trajectory, std::string("/people_tracker/trajectory"));
44 | pub_trajectory = n.advertise(pub_topic_trajectory.c_str(), 100, con_cb, con_cb);
45 | private_node_handle.param("marker", pub_marker_topic, std::string("/people_tracker/marker_array"));
46 | pub_marker = n.advertise(pub_marker_topic.c_str(), 100, con_cb, con_cb);
47 |
48 | boost::thread tracking_thread(boost::bind(&PeopleTracker::trackingThread, this));
49 |
50 | ros::spin();
51 | }
52 |
53 | void PeopleTracker::parseParams(ros::NodeHandle n) {
54 | std::string filter;
55 | n.getParam("filter_type", filter);
56 | ROS_INFO_STREAM("Found filter type: " << filter);
57 |
58 | float stdLimit = 1.0;
59 | if (n.hasParam("std_limit")) {
60 | n.getParam("std_limit", stdLimit);
61 | ROS_INFO_STREAM("std_limit pruneTracks with " << stdLimit);
62 | }
63 |
64 | bool prune_named = false;
65 | if (n.hasParam("prune_named")) {
66 | n.getParam("prune_named", prune_named);
67 | ROS_INFO_STREAM("prune_named with " << prune_named);
68 | }
69 |
70 |
71 | if (filter == "EKF") {
72 | ekf = new SimpleTracking(stdLimit, prune_named);
73 | } else if (filter == "UKF") {
74 | ukf = new SimpleTracking(stdLimit, prune_named);
75 | } else if (filter == "PF") {
76 | pf = new SimpleTracking(stdLimit, prune_named);
77 | } else {
78 | ROS_FATAL_STREAM("Filter type " << filter << " is not specified. Unable to create the tracker. Please use either EKF, UKF or PF.");
79 | return;
80 | }
81 |
82 | XmlRpc::XmlRpcValue cv_noise;
83 | n.getParam("cv_noise_params", cv_noise);
84 | ROS_ASSERT(cv_noise.getType() == XmlRpc::XmlRpcValue::TypeStruct);
85 | ROS_INFO_STREAM("Constant Velocity Model noise: " << cv_noise);
86 | if (ekf != NULL) {
87 | ekf->createConstantVelocityModel(cv_noise["x"], cv_noise["y"]);
88 | } else if (ukf != NULL) {
89 | ukf->createConstantVelocityModel(cv_noise["x"], cv_noise["y"]);
90 | } else if (pf != NULL) {
91 | pf->createConstantVelocityModel(cv_noise["x"], cv_noise["y"]);
92 | } else {
93 | ROS_FATAL_STREAM("no filter configured.");
94 | }
95 | ROS_INFO_STREAM("Created " << filter << " based tracker using constant velocity prediction model.");
96 |
97 | XmlRpc::XmlRpcValue detectors;
98 | n.getParam("detectors", detectors);
99 | ROS_ASSERT(detectors.getType() == XmlRpc::XmlRpcValue::TypeStruct);
100 | for(XmlRpc::XmlRpcValue::ValueStruct::const_iterator it = detectors.begin(); it != detectors.end(); ++it) {
101 | ROS_INFO_STREAM("Found detector: " << (std::string)(it->first) << " ==> " << detectors[it->first]);
102 | observ_model_t om_flag;
103 | double pos_noise_x = .2;
104 | double pos_noise_y = .2;
105 | int seq_size = 5;
106 | double seq_time = 0.2;
107 | association_t association = NN;
108 | om_flag = CARTESIAN;
109 |
110 | try {
111 | if (detectors[it->first].hasMember("seq_size"))
112 | seq_size = (int) detectors[it->first]["seq_size"];
113 | if (detectors[it->first].hasMember("seq_time"))
114 | seq_time = (double) detectors[it->first]["seq_time"];
115 | if (detectors[it->first].hasMember("matching_algorithm"))
116 | association = detectors[it->first]["matching_algorithm"] == "NN" ? NN
117 | : detectors[it->first]["matching_algorithm"] == "NNJPDA" ? NNJPDA
118 | : detectors[it->first]["matching_algorithm"] == "NN_LABELED" ? NN_LABELED
119 | : throw(asso_exception());
120 | if (detectors[it->first].hasMember("cartesian_noise_params")) { // legacy support
121 | pos_noise_x = detectors[it->first]["cartesian_noise_params"]["x"];
122 | pos_noise_y = detectors[it->first]["cartesian_noise_params"]["y"];
123 | }
124 | if (detectors[it->first].hasMember("noise_params")) {
125 | pos_noise_x = detectors[it->first]["noise_params"]["x"];
126 | pos_noise_y = detectors[it->first]["noise_params"]["y"];
127 | }
128 | } catch (XmlRpc::XmlRpcException& e) {
129 | ROS_FATAL_STREAM("XmlRpc::XmlRpcException: '"
130 | << e.getMessage()
131 | << "'\n"
132 | << "Failed to parse definition for '"
133 | << (std::string)(it->first)
134 | << "'. Check your parameters."
135 | );
136 | throw(e);
137 |
138 | }
139 |
140 | try {
141 | if (ekf != NULL) {
142 | ekf->addDetectorModel(
143 | it->first,
144 | association,
145 | om_flag,
146 | pos_noise_x, pos_noise_y,
147 | seq_size, seq_time
148 | );
149 | } else if (ukf != NULL) {
150 | ukf->addDetectorModel(
151 | it->first,
152 | association,
153 | om_flag,
154 | pos_noise_x, pos_noise_y,
155 | seq_size, seq_time
156 | );
157 | } else if (pf != NULL) {
158 | pf->addDetectorModel(
159 | it->first,
160 | association,
161 | om_flag,
162 | pos_noise_x, pos_noise_y,
163 | seq_size, seq_time
164 | );
165 | }
166 | } catch (asso_exception& e) {
167 | ROS_FATAL_STREAM(""
168 | << e.what()
169 | << " "
170 | << detectors[it->first]["matching_algorithm"]
171 | << " is not specified. Unable to add "
172 | << (std::string)(it->first)
173 | << " to the tracker. Please use either NN or NNJPDA as association algorithms."
174 | );
175 | return;
176 | } catch (observ_exception& e) {
177 | ROS_FATAL_STREAM(""
178 | << e.what()
179 | << " "
180 | << detectors[it->first]["observation_model"]
181 | << " is not specified. Unable to add "
182 | << (std::string)(it->first)
183 | << " to the tracker. Please use either CARTESIAN or POLAR as observation models."
184 | );
185 | return;
186 | }
187 | ros::Subscriber sub;
188 | if (detectors[it->first].hasMember("topic")) {
189 | subscribers[std::pair(it->first, detectors[it->first]["topic"])] = sub;
190 | }
191 | else if (detectors[it->first].hasMember("people_topic")) {
192 | subscribers_people[std::pair(it->first, detectors[it->first]["people_topic"])] = sub;
193 | }
194 | }
195 | }
196 |
197 |
198 | void PeopleTracker::trackingThread() {
199 | ros::Rate fps(tracker_frequency);
200 | double time_sec = 0.0;
201 |
202 | while(ros::ok()) {
203 | std::map tags;
204 | geometry_msgs::TransformStamped tf;
205 | if (target_frame != base_frame) {
206 | try {
207 | ROS_DEBUG_STREAM_NAMED("BayesPeopleTracker", "transforming poses from " << target_frame << " to " << base_frame);
208 | tf = m_tf_buffer.lookupTransform(base_frame, target_frame, ros::Time(0));
209 | } catch (tf2::TransformException& ex) {
210 | ROS_ERROR_STREAM_NAMED("BayesPeopleTracker", ex.what());
211 | }
212 | } else {
213 | tf.header.frame_id = target_frame;
214 | tf.transform.rotation.w = 1;
215 | }
216 |
217 | try {
218 | std::map > ppl;
219 | if (ekf != NULL) {
220 | ppl = ekf->track(&time_sec, tags);
221 | } else if (ukf != NULL) {
222 | ppl = ukf->track(&time_sec, tags);
223 | } else if (pf != NULL) {
224 | ppl = pf->track(&time_sec, tags);
225 | }
226 |
227 | if(ppl.size()) {
228 | geometry_msgs::Pose closest_person_point;
229 | std::vector poses;
230 | std::vector vels;
231 | std::vector vars;
232 | std::vector uuids;
233 | std::vector pids;
234 | std::vector distances;
235 | std::vector angles;
236 | double min_dist = DBL_MAX;
237 | double angle;
238 |
239 | for(std::map >::const_iterator it = ppl.begin();
240 | it != ppl.end(); ++it) {
241 | poses.push_back(it->second[0]);
242 | vels.push_back(it->second[1]);
243 | vars.push_back(it->second[2]);
244 | if (tags[it->first] == "")
245 | uuids.push_back(generateUUID(startup_time_str, it->first));
246 | else
247 | uuids.push_back(tags[it->first]);
248 | pids.push_back(it->first);
249 |
250 | geometry_msgs::PoseStamped poseInRobotCoords;
251 | geometry_msgs::PoseStamped poseInTargetCoords;
252 | poseInTargetCoords.header.frame_id = target_frame;
253 | poseInTargetCoords.header.stamp.fromSec(time_sec);
254 | poseInTargetCoords.pose = it->second[0];
255 |
256 | tf2::doTransform(poseInTargetCoords,poseInRobotCoords, tf);
257 |
258 | if(pub_detect.getNumSubscribers() || pub_pose.getNumSubscribers() || pub_pose_array.getNumSubscribers() || pub_people.getNumSubscribers()) {
259 | std::vector polar = cartesianToPolar(poseInRobotCoords.pose.position);
260 | distances.push_back(polar[0]);
261 | angles.push_back(polar[1]);
262 | angle = polar[0] < min_dist ? polar[1] : angle;
263 | closest_person_point = polar[0] < min_dist ? it->second[0] : closest_person_point;
264 | min_dist = polar[0] < min_dist ? polar[0] : min_dist;
265 | }
266 | }
267 |
268 | if(pub_detect.getNumSubscribers() || pub_pose.getNumSubscribers() || pub_pose_array.getNumSubscribers() || pub_people.getNumSubscribers())
269 | publishDetections(time_sec, closest_person_point, poses, vels, vars, uuids, distances, angles, min_dist, angle);
270 |
271 | if(pub_marker.getNumSubscribers())
272 | createVisualisation(poses, vars, pids, pub_marker, uuids);
273 |
274 | //if(pub_trajectory.getNumSubscribers())
275 | publishTrajectory(poses, vels, vars, pids, pub_trajectory);
276 | }
277 | fps.sleep();
278 | }
279 | catch(std::exception& e) {
280 | ROS_INFO_STREAM("Exception: " << e.what());
281 | fps.sleep();
282 | }
283 | catch(Bayesian_filter::Numeric_exception& e) {
284 | ROS_INFO_STREAM("Exception: " << e.what());
285 | fps.sleep();
286 | }
287 | }
288 | }
289 |
290 | void PeopleTracker::publishDetections(
291 | double time_sec,
292 | geometry_msgs::Pose closest,
293 | std::vector ppl,
294 | std::vector vels,
295 | std::vector vars,
296 | std::vector uuids,
297 | std::vector distances,
298 | std::vector angles,
299 | double min_dist,
300 | double angle) {
301 | bayes_people_tracker_msgs::PeopleTracker result;
302 | result.header.stamp.fromSec(time_sec);
303 | result.header.frame_id = target_frame;
304 | result.header.seq = ++detect_seq;
305 | result.poses = ppl;
306 | result.uuids = uuids;
307 | result.distances = distances;
308 | result.angles = angles;
309 | result.min_distance = min_dist;
310 | result.min_distance_angle = angle;
311 |
312 | people_msgs::People people;
313 | people.header = result.header;
314 | for(int i = 0; i < ppl.size(); i++) {
315 | // Just running one loop for people_msgs and adding velocities to people_tracker message
316 | // Adding velocities as a vector to PeopleTracker message
317 | geometry_msgs::Vector3 v;
318 | v.x = vels[i].position.x;
319 | v.y = vels[i].position.y;
320 | result.velocities.push_back(v);
321 |
322 | // Creating and adding Person message
323 | people_msgs::Person person;
324 | person.position = ppl[i].position;
325 | person.velocity = vels[i].position;
326 | person.name = uuids[i];
327 | person.tags.push_back(uuids[i]);
328 | person.tagnames.push_back("uuid");
329 | person.reliability = 1 - sqrt(vars[i].position.x + vars[i].position.y);
330 | people.people.push_back(person);
331 | }
332 |
333 | // Publishing both messages
334 | publishDetections(result);
335 | publishDetections(people);
336 |
337 | geometry_msgs::PoseStamped pose;
338 | pose.header = result.header;
339 | pose.pose = closest;
340 | publishDetections(pose);
341 |
342 | geometry_msgs::PoseArray poses;
343 | poses.header = result.header;
344 | poses.poses = ppl;
345 | publishDetections(poses);
346 | }
347 |
348 | void PeopleTracker::publishDetections(bayes_people_tracker_msgs::PeopleTracker msg) {
349 | pub_detect.publish(msg);
350 | pub_people.publish(people_msgs::People());
351 | }
352 |
353 | void PeopleTracker::publishDetections(geometry_msgs::PoseStamped msg) {
354 | pub_pose.publish(msg);
355 | }
356 |
357 | void PeopleTracker::publishDetections(geometry_msgs::PoseArray msg) {
358 | pub_pose_array.publish(msg);
359 | }
360 |
361 | void PeopleTracker::publishDetections(people_msgs::People msg) {
362 | pub_people.publish(msg);
363 | }
364 |
365 | void PeopleTracker::publishTrajectory(std::vector poses,
366 | std::vector vels,
367 | std::vector vars,
368 | std::vector pids,
369 | ros::Publisher& pub) {
370 | /*** find trajectories ***/
371 | for(int i = 0; i < previous_poses.size(); i++) {
372 | if(boost::get<0>(previous_poses[i]) != INVALID_ID) {
373 | bool last_pose = true;
374 | for(int j = 0; j < pids.size(); j++) {
375 | if(pids[j] == boost::get<0>(previous_poses[i])) {
376 | last_pose = false;
377 | break;
378 | }
379 | }
380 | if(last_pose) {
381 | geometry_msgs::PoseArray trajectory;
382 | geometry_msgs::PoseArray velocity;
383 | geometry_msgs::PoseArray variance;
384 | trajectory.header.seq = boost::get<0>(previous_poses[i]); // tracking ID
385 | trajectory.header.stamp = ros::Time::now();
386 | trajectory.header.frame_id = target_frame; // will be reused by P-N experts
387 | for(int j = 0; j < previous_poses.size(); j++) {
388 | if(boost::get<0>(previous_poses[j]) == trajectory.header.seq) {
389 | trajectory.poses.push_back(boost::get<3>(previous_poses[j]));
390 | velocity.poses.push_back(boost::get<2>(previous_poses[j]));
391 | variance.poses.push_back(boost::get<1>(previous_poses[j]));
392 | boost::get<0>(previous_poses[j]) = INVALID_ID;
393 | }
394 | }
395 | pub.publish(trajectory);
396 | //std::cerr << "[people_tracker] trajectory ID = " << trajectory.header.seq << ", timestamp = " << trajectory.header.stamp << ", poses size = " << trajectory.poses.size() << std::endl;
397 | }
398 | }
399 | }
400 |
401 | /*** clean up ***/
402 | for(int i = 0; i < previous_poses.size(); i++) {
403 | if(boost::get<0>(previous_poses[i]) == INVALID_ID)
404 | previous_poses.erase(previous_poses.begin()+i);
405 | }
406 | }
407 |
408 | void PeopleTracker::createVisualisation(std::vector poses, std::vector vars, std::vector pids, ros::Publisher& pub, std::vector uuids) {
409 | ROS_DEBUG("Creating markers");
410 | visualization_msgs::MarkerArray marker_array;
411 | for(int i = 0; i < poses.size(); i++) {
412 | // Create Human Model
413 | std::vector human = pm.createHuman(i*10, poses[i], target_frame);
414 | marker_array.markers.insert(marker_array.markers.begin(), human.begin(), human.end());
415 | // Create ID marker and trajectory
416 | double human_height = 1.9; //meter
417 | visualization_msgs::Marker tracking_id;
418 | tracking_id.header.stamp = ros::Time::now();
419 | tracking_id.header.frame_id = target_frame;
420 | tracking_id.ns = "people_id";
421 | tracking_id.id = pids[i];
422 | tracking_id.type = visualization_msgs::Marker::TEXT_VIEW_FACING;
423 | tracking_id.pose.position.x = poses[i].position.x;
424 | tracking_id.pose.position.y = poses[i].position.y;
425 | tracking_id.pose.position.z = human_height;
426 | tracking_id.scale.z = 0.7;
427 | tracking_id.color.a = 1.0;
428 | tracking_id.color.r = 1.0;
429 | tracking_id.color.g = 0.2;
430 | tracking_id.color.b = 0.0;
431 | tracking_id.text = uuids[i] + " (" + boost::to_string(pids[i]) + ")";
432 | tracking_id.lifetime = ros::Duration(0.1);
433 | marker_array.markers.push_back(tracking_id);
434 |
435 | visualization_msgs::Marker vars_ellipse;
436 | vars_ellipse.header.stamp = ros::Time::now();
437 | vars_ellipse.header.frame_id = target_frame;
438 | vars_ellipse.ns = "vars_ellipse";
439 | vars_ellipse.id = pids[i];
440 | vars_ellipse.type = visualization_msgs::Marker::CYLINDER;
441 | vars_ellipse.pose.position.x = poses[i].position.x;
442 | vars_ellipse.pose.position.y = poses[i].position.y;
443 | vars_ellipse.pose.position.z = 0.0;
444 | vars_ellipse.pose.orientation.x = 0.0; // Identity quaternion
445 | vars_ellipse.pose.orientation.y = 0.0;
446 | vars_ellipse.pose.orientation.z = 0.0;
447 | vars_ellipse.pose.orientation.w = 1.0;
448 | vars_ellipse.scale.z = 0.2;
449 | vars_ellipse.scale.x = sqrt(vars[i].position.x);
450 | vars_ellipse.scale.y = sqrt(vars[i].position.y);
451 | vars_ellipse.color.a = 0.8;
452 | vars_ellipse.color.r = 1.0 - (1.0/(sqrt(vars[i].position.x+vars[i].position.y)+1.0));
453 | vars_ellipse.color.g = (1.0/(sqrt(vars[i].position.x+vars[i].position.y)+1.0));
454 | vars_ellipse.color.b = 0.0;
455 | vars_ellipse.text = boost::to_string(vars[i].position.x) + ", " + boost::to_string(vars[i].position.y);
456 | //ROS_INFO_STREAM(vars_ellipse.text);
457 | vars_ellipse.lifetime = ros::Duration(0.1);
458 | marker_array.markers.push_back(vars_ellipse);
459 |
460 |
461 | /* for FLOBOT - tracking trajectory */
462 | visualization_msgs::Marker tracking_tr;
463 | tracking_tr.header.stamp = ros::Time::now();
464 | tracking_tr.header.frame_id = target_frame;
465 | tracking_tr.ns = "people_trajectory";
466 | tracking_tr.id = pids[i];
467 | tracking_tr.type = visualization_msgs::Marker::LINE_STRIP;
468 | geometry_msgs::Point p;
469 | for(int j = 0; j < previous_poses.size(); j++) {
470 | if(boost::get<0>(previous_poses[j]) == pids[i]) {
471 | p.x = boost::get<3>(previous_poses[j]).position.x;
472 | p.y = boost::get<3>(previous_poses[j]).position.y;
473 | tracking_tr.points.push_back(p);
474 | }
475 | }
476 | tracking_tr.pose.orientation.x = 0.0; // Identity quaternion
477 | tracking_tr.pose.orientation.y = 0.0;
478 | tracking_tr.pose.orientation.z = 0.0;
479 | tracking_tr.pose.orientation.w = 1.0;
480 | tracking_tr.scale.x = 0.1;
481 | tracking_tr.color.a = 1.0;
482 | tracking_tr.color.r = std::max(0.3,(double)(pids[i]%3)/3.0);
483 | tracking_tr.color.g = std::max(0.3,(double)(pids[i]%6)/6.0);
484 | tracking_tr.color.b = std::max(0.3,(double)(pids[i]%9)/9.0);
485 | tracking_tr.lifetime = ros::Duration(1.0);
486 | if(tracking_tr.points.size() >= 2) { // The LINE_STRIP only makes sense if there are at least two points
487 | marker_array.markers.push_back(tracking_tr);
488 | }
489 | }
490 | pub.publish(marker_array);
491 | }
492 |
493 | std::vector PeopleTracker::cartesianToPolar(geometry_msgs::Point point) {
494 | ROS_DEBUG("cartesianToPolar: Cartesian point: x: %f, y: %f, z %f", point.x, point.y, point.z);
495 | std::vector output;
496 | double dist = sqrt(pow(point.x,2) + pow(point.y,2));
497 | double angle = atan2(point.y, point.x);
498 | output.push_back(dist);
499 | output.push_back(angle);
500 | ROS_DEBUG("cartesianToPolar: Polar point: distance: %f, angle: %f", dist, angle);
501 | return output;
502 | }
503 |
504 | void PeopleTracker::detectorCallback(const geometry_msgs::PoseArray::ConstPtr &pta, std::string detector)
505 | {
506 | // Publish an empty message to trigger callbacks even when there are no detections.
507 | // This can be used by nodes which might also want to know when there is no human detected.
508 | if(pta->poses.size() == 0) {
509 | bayes_people_tracker_msgs::PeopleTracker empty;
510 | empty.header.stamp = ros::Time::now();
511 | empty.header.frame_id = target_frame;
512 | empty.header.seq = ++detect_seq;
513 | publishDetections(empty);
514 | return;
515 | }
516 |
517 |
518 | geometry_msgs::TransformStamped tf;
519 | if (target_frame != pta->header.frame_id) {
520 | try {
521 | ROS_DEBUG_STREAM_NAMED("BayesPeopleTracker", "transforming input from " << pta->header.frame_id << " to " << target_frame);
522 | tf = m_tf_buffer.lookupTransform(target_frame, pta->header.frame_id, pta->header.stamp, ros::Duration(0.01));
523 | } catch (tf2::TransformException& ex) {
524 | ROS_ERROR_STREAM_NAMED("BayesPeopleTracker", ex.what());
525 | return;
526 | }
527 | } else {
528 | tf.header = pta->header;
529 | tf.transform.rotation.w = 1;
530 | }
531 |
532 | std::vector ppl;
533 | for(int i = 0; i < pta->poses.size(); i++) {
534 | geometry_msgs::Pose pt = pta->poses[i];
535 |
536 | //Create stamped pose for tf
537 | geometry_msgs::PoseStamped poseInCamCoords;
538 | geometry_msgs::PoseStamped poseInTargetCoords;
539 | poseInCamCoords.header = pta->header;
540 | poseInCamCoords.pose = pt;
541 |
542 | tf2::doTransform(poseInCamCoords,poseInTargetCoords, tf);
543 |
544 | poseInTargetCoords.pose.position.z = 0.0;
545 | ppl.push_back(poseInTargetCoords.pose.position);
546 |
547 | }
548 | if(ppl.size()) {
549 | if(ekf == NULL) {
550 | if(ukf == NULL) {
551 | pf->addObservation(detector, ppl, pta->header.stamp.toSec(), std::vector());
552 | } else {
553 | ukf->addObservation(detector, ppl, pta->header.stamp.toSec(), std::vector());
554 | }
555 | } else {
556 | ekf->addObservation(detector, ppl, pta->header.stamp.toSec(), std::vector());
557 | }
558 | }
559 | }
560 |
561 | void PeopleTracker::detectorCallback_people(const people_msgs::People::ConstPtr &people, std::string detector)
562 | {
563 | // Publish an empty message to trigger callbacks even when there are no detections.
564 | // This can be used by nodes which might also want to know when there is no human detected.
565 | if(people->people.size() == 0) {
566 | bayes_people_tracker_msgs::PeopleTracker empty;
567 | empty.header.stamp = ros::Time::now();
568 | empty.header.frame_id = target_frame;
569 | empty.header.seq = ++detect_seq;
570 | publishDetections(empty);
571 | return;
572 | }
573 |
574 | geometry_msgs::TransformStamped tf;
575 | if (target_frame != people->header.frame_id) {
576 | try {
577 | ROS_DEBUG_STREAM_NAMED("BayesPeopleTracker", "transforming input from " << people->header.frame_id << " to " << target_frame);
578 | tf = m_tf_buffer.lookupTransform(target_frame, people->header.frame_id, people->header.stamp, ros::Duration(0.01));
579 | } catch (tf2::TransformException& ex) {
580 | ROS_ERROR_STREAM_NAMED("BayesPeopleTracker", ex.what());
581 | return;
582 | }
583 | } else {
584 | tf.header = people->header;
585 | tf.transform.rotation.w = 1;
586 | }
587 |
588 | std::vector ppl;
589 | std::vector tags;
590 | for(int i = 0; i < people->people.size(); i++) {
591 | people_msgs::Person pt = people->people[i];
592 |
593 | //Create stamped pose for tf
594 | geometry_msgs::PoseStamped poseInCamCoords;
595 | geometry_msgs::PoseStamped poseInTargetCoords;
596 | poseInCamCoords.header = people->header;
597 |
598 | poseInCamCoords.pose.position = pt.position;
599 | // detections don't have an orientation
600 | poseInCamCoords.pose.orientation.w = 1;
601 |
602 | tf2::doTransform(poseInCamCoords,poseInTargetCoords, tf);
603 |
604 | poseInTargetCoords.pose.position.z = 0.0;
605 | ppl.push_back(poseInTargetCoords.pose.position);
606 | tags.push_back(pt.name);
607 |
608 | }
609 | if(ppl.size()) {
610 | if(ekf == NULL) {
611 | if(ukf == NULL) {
612 | pf->addObservation(detector, ppl, people->header.stamp.toSec(), tags);
613 | } else {
614 | ukf->addObservation(detector, ppl, people->header.stamp.toSec(), tags);
615 | }
616 | } else {
617 | ekf->addObservation(detector, ppl, people->header.stamp.toSec(), tags);
618 | }
619 | }
620 | }
621 |
622 |
623 | // Connection callback that unsubscribes from the tracker if no one is subscribed.
624 | void PeopleTracker::connectCallback(ros::NodeHandle &n) {
625 | bool loc = pub_detect.getNumSubscribers();
626 | bool markers = pub_marker.getNumSubscribers();
627 | bool people = pub_people.getNumSubscribers();
628 | bool pose = pub_pose.getNumSubscribers();
629 | bool pose_array = pub_pose_array.getNumSubscribers();
630 | bool trajectory = pub_trajectory.getNumSubscribers();
631 | std::map, ros::Subscriber>::const_iterator it;
632 | if(!loc && !markers && !people && !trajectory && !pose && !pose_array) {
633 | ROS_DEBUG("Pedestrian Localisation: No subscribers. Unsubscribing.");
634 | for(it = subscribers.begin(); it != subscribers.end(); ++it)
635 | const_cast(it->second).shutdown();
636 | } else {
637 | ROS_DEBUG("Pedestrian Localisation: New subscribers. Subscribing.");
638 | for(it = subscribers.begin(); it != subscribers.end(); ++it)
639 | subscribers[it->first] = n.subscribe(it->first.second.c_str(), 1000, boost::bind(&PeopleTracker::detectorCallback, this, _1, it->first.first));
640 | for(it = subscribers_people.begin(); it != subscribers_people.end(); ++it)
641 | subscribers_people[it->first] = n.subscribe(it->first.second.c_str(), 1000, boost::bind(&PeopleTracker::detectorCallback_people, this, _1, it->first.first));
642 | }
643 | }
644 |
645 | int main(int argc, char **argv)
646 | {
647 | // Set up ROS.
648 | ros::init(argc, argv, "bayes_people_tracker");
649 | PeopleTracker* pl = new PeopleTracker();
650 | return 0;
651 | }
652 |
--------------------------------------------------------------------------------