├── preview
├── preview1.png
├── preview2.png
└── rosgraph_d435_fusion.png
├── src
├── my_laser_config.yaml
├── .idea
│ ├── misc.xml
│ ├── inspectionProfiles
│ │ └── profiles_settings.xml
│ ├── modules.xml
│ ├── src.iml
│ └── workspace.xml
├── pub_pf.py
├── lidar_tf2_broadcaster.py
├── obstacle_msg_analysis.py
├── obstacle_sub.py
├── overlay_sample.py
├── multi_particle_filter_a.py
├── pf_analysis.py
├── pf_analysis_fusion.py
├── multipf.py
└── obstacle_pf.py
├── launch
├── lidar.launch
├── camera.launch
└── fusion.launch
├── package.xml
├── README.md
├── rviz
├── jsk_viz.rviz
└── urdf3.rviz
└── CMakeLists.txt
/preview/preview1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rezanatha/object-detector-fusion/HEAD/preview/preview1.png
--------------------------------------------------------------------------------
/preview/preview2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rezanatha/object-detector-fusion/HEAD/preview/preview2.png
--------------------------------------------------------------------------------
/preview/rosgraph_d435_fusion.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rezanatha/object-detector-fusion/HEAD/preview/rosgraph_d435_fusion.png
--------------------------------------------------------------------------------
/src/my_laser_config.yaml:
--------------------------------------------------------------------------------
1 | scan_filter_chain:
2 | - name: angle
3 | type: laser_filters/LaserScanAngularBoundsFilter
4 | params:
5 | lower_angle: -0.69
6 | upper_angle: 0.69
--------------------------------------------------------------------------------
/src/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/src/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/src/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/src/pub_pf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy
3 | from std_msgs.msg import Float32
4 |
5 | pub = rospy.Publisher('/pub_pf', Float32, queue_size=10)
6 | rospy.init_node('pub_pf')
7 | #Rate on which PF estimate is running
8 | rate = 60
9 | r = rospy.Rate(rate)
10 | while not rospy.is_shutdown():
11 | pub.publish(rate)
12 | r.sleep()
--------------------------------------------------------------------------------
/src/.idea/src.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/src/lidar_tf2_broadcaster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | '''
3 | Script buat broadcast frame lidar sbg child dari frame base_link camera
4 | '''
5 | import rospy
6 | import tf2_ros
7 | import tf2_msgs.msg
8 | import geometry_msgs.msg
9 |
10 |
11 | class FixedTFBroadcaster:
12 |
13 | def __init__(self):
14 | self.pub_tf = rospy.Publisher("/tf", tf2_msgs.msg.TFMessage, queue_size=1)
15 |
16 | while not rospy.is_shutdown():
17 | # Run this loop at about 10Hz
18 | rospy.sleep(0.033)
19 |
20 | t = geometry_msgs.msg.TransformStamped()
21 | t.header.frame_id = "base_link"
22 | t.header.stamp = rospy.Time.now()
23 | t.child_frame_id = "laser_scanner_frame"
24 | t.transform.translation.x = 0.0
25 | t.transform.translation.y = 0
26 | #t.transform.translation.z = -0.055
27 | t.transform.translation.z = -(0.0215 + 0.015)
28 |
29 | t.transform.rotation.x = 0.0
30 | t.transform.rotation.y = 0.0
31 | t.transform.rotation.z = 0.0
32 | t.transform.rotation.w = 1.0
33 |
34 | tfm = tf2_msgs.msg.TFMessage([t])
35 | self.pub_tf.publish(tfm)
36 |
37 | if __name__ == '__main__':
38 | rospy.init_node('fixed_tf2_broadcaster')
39 | tfb = FixedTFBroadcaster()
40 |
41 | rospy.spin()
42 |
--------------------------------------------------------------------------------
/src/obstacle_msg_analysis.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | '''
3 | Script buat subscribe dummy obstacles yg dibikin sama node obstacle_publisher trus dimirror posisinya dan dipublish ke /new_obstacles
4 | '''
5 | import rospy
6 | from nav_msgs.msg import Odometry
7 | from obstacle_detector.msg import Obstacles
8 | from obstacle_detector.msg import CircleObstacle
9 | from geometry_msgs.msg import Point
10 | import time
11 |
12 |
13 | class ObstacleListener:
14 | def __init__(self):
15 |
16 | #self.obstacle_sub = rospy.Subscriber('/raw_obstacles', Obstacles, self.sub_callback)
17 | self.obstacle_sub = rospy.Subscriber('/raw_obstacles/camera_obstacles', Obstacles, self.sub_callback)
18 | self.new_obstacles = Obstacles()
19 | self.new_circles = CircleObstacle()
20 | self.new_points = Point()
21 | self.new_obstacles.circles = [0]
22 | self.count = 0
23 |
24 |
25 | def sub_callback(self, msg):
26 | obstacles_full = msg
27 | obstacles = msg.circles
28 | print("length", len(obstacles))
29 | #if len(obstacles) != 1:
30 | # self.count +=1
31 | #print("Count", self.count)
32 | #x = obstacles.center.x
33 | #y = obstacles.center.y
34 | #print([x, y])
35 |
36 |
37 | if __name__ == "__main__":
38 | rospy.init_node('obstacle_analysis', log_level=rospy.INFO)
39 | run = ObstacleListener()
40 | rospy.spin() # mantain the service open.
41 |
--------------------------------------------------------------------------------
/src/obstacle_sub.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | '''
3 | Script buat subscribe dummy obstacles yg dibikin sama node obstacle_publisher trus dimirror posisinya dan dipublish ke /new_obstacles
4 | '''
5 | import rospy
6 | import csv
7 | from obstacle_detector.msg import Obstacles
8 | import time
9 |
10 |
11 | class ObstacleListener:
12 | def __init__(self):
13 |
14 | self.obstacle_sub = rospy.Subscriber(
15 | '/raw_obstacles/lidar_obstacles', Obstacles, self.sub_callback)
16 | self.print_read = []
17 | self.start_time = rospy.get_time()
18 | self.count = 0
19 |
20 | def sub_callback(self, msg):
21 | circles = msg.circles[0]
22 | time_elapsed = rospy.get_time() - self.start_time
23 | obj = [circles.center.x, circles.center.y,
24 | circles.true_radius, time_elapsed]
25 | self.count += 1
26 | print(obj, self.count)
27 | self.print_read.append(obj)
28 |
29 |
30 | if __name__ == "__main__":
31 | rospy.init_node('obstacle_listener', log_level=rospy.INFO)
32 | run = ObstacleListener()
33 | rospy.spin() # mantain the service open.
34 | with open('obstacle_data.csv', 'w') as f:
35 | csvwriter = csv.writer(f)
36 | csvwriter.writerow(["pos_x",
37 | "pos_y",
38 | "true_radius",
39 | "time"])
40 | csvwriter.writerows(run.print_read)
41 | print("Printed to obstacle_data.csv")
42 | f.close()
43 |
--------------------------------------------------------------------------------
/src/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | 1601176309114
24 |
25 |
26 | 1601176309114
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/src/overlay_sample.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | try:
3 | from jsk_rviz_plugins.msg import *
4 | except:
5 | import roslib;roslib.load_manifest("jsk_rviz_plugins")
6 | from jsk_rviz_plugins.msg import *
7 |
8 | from std_msgs.msg import ColorRGBA, Float32
9 | import rospy
10 | import math
11 | rospy.init_node("overlay_sample")
12 |
13 | text_pub = rospy.Publisher("text_sample", OverlayText, queue_size=1)
14 | value_pub = rospy.Publisher("value_sample", Float32, queue_size=1)
15 | counter = 0
16 | rate = 100
17 | r = rospy.Rate(rate)
18 | import random, math
19 | while not rospy.is_shutdown():
20 | counter = counter + 1
21 | text = OverlayText()
22 | theta = counter % 255 / 255.0
23 | text.width = 400
24 | text.height = 600
25 | #text.height = 600
26 | text.left = 10
27 | text.top = 10
28 | text.text_size = 12
29 | text.line_width = 2
30 | text.font = "DejaVu Sans Mono"
31 | text.text = """This is OverlayText plugin.
32 | The update rate is %d Hz.
33 | You can write several text to show to the operators.
34 | New line is supported and automatical wrapping text is also supported.
35 | And you can choose font, this text is now rendered by '%s'
36 |
37 | You can specify background color and foreground color separatelly.
38 |
39 | Of course, the text is not needed to be fixed, see the counter: %d.
40 |
41 | You can change text color like this
42 | by using css.
43 | """ % (rate, text.font, counter)
44 | text.fg_color = ColorRGBA(25 / 255.0, 1.0, 240.0 / 255.0, 1.0)
45 | text.bg_color = ColorRGBA(0.0, 0.0, 0.0, 0.2)
46 | text_pub.publish(text)
47 | value_pub.publish(math.sin(counter * math.pi * 2 / 100))
48 | if int(counter % 500) == 0:
49 | rospy.logdebug('This is ROS_DEBUG.')
50 | elif int(counter % 500) == 100:
51 | rospy.loginfo('This is ROS_INFO.')
52 | elif int(counter % 500) == 200:
53 | rospy.logwarn('This is ROS_WARN.')
54 | elif int(counter % 500) == 300:
55 | rospy.logerr('This is ROS_ERROR.')
56 | elif int(counter % 500) == 400:
57 | rospy.logfatal('This is ROS_FATAL.')
58 | r.sleep()
59 |
60 |
--------------------------------------------------------------------------------
/launch/lidar.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | object-detector-fusion
4 | 0.0.0
5 | The object-detector-fusion package
6 |
7 |
8 |
9 |
10 | wnrezaldi
11 |
12 |
13 |
14 |
15 |
16 | TODO
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | catkin
52 | rospy
53 | rospy_tutorials
54 | python-numpy
55 | rospy
56 | rospy_tutorials
57 | rospy
58 | rospy_tutorials
59 | python-numpy
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # object-detector-fusion ROS package
2 | The `object-detector-fusion` is used for detecting and tracking objects from data that is provided by a 2D LiDAR/Laser Scanner and a depth camera.
3 | The detection working principle is largely based on `obstacle_detector` created by Mateusz Przybyla, which used a density-based clustering method to group point clouds and create a geometric representation of objects within the sensor vicinity. The objects are detected by camera first, and then the measurement is corrected by LiDAR detection result, hence the "fusion" name. The dynamic state of the detected objects (position, speed, acceleration) is estimated (tracked) using particle filter.
4 |
5 | This package is designed to work with following sensors:
6 | 1. RPLiDAR A1 Laser Scanner
7 | 2. Intel RealSense D435 Depth Camera
8 |
9 | that are equipped on a land-based autonomous system.
10 |
11 | ## Requirements
12 | 1. [ROS Melodic Morenia](https://wiki.ros.org/melodic)
13 | 2. [`rplidar`](https://github.com/robopeak/rplidar_ros)
14 | 3. [`realsense-ros`](https://github.com/IntelRealSense/realsense-ros)
15 | 4. [`depthimage-to-laserscan`](https://github.com/ros-perception/depthimage_to_laserscan)
16 | 4. [`obstacle_detector`](https://github.com/tysik/obstacle_detector)
17 | 5. [`jsk_visualization`](https://github.com/jsk-ros-pkg/jsk_visualization)
18 |
19 | ## Installation
20 | 1. Clone and add to your catkin package and run `catkin_make`
21 |
22 | ## Usage
23 | Run the following commands on the terminal.
24 | To start the fusion nodes:
25 | ```
26 | roslaunch object-detector-fusion fusion.launch
27 | ```
28 | Start the nodes with only camera:
29 | ```
30 | roslaunch object-detector-fusion camera.launch
31 | ```
32 | Start the nodes with only lidar (no tracking available):
33 | ```
34 | roslaunch object-detector-fusion lidar.launch
35 | ```
36 | You should see the output on the rviz main screen.
37 | ## Preview
38 | To obtain 2D depth data from 3D depth data, this package use `depthimage-to-laserscan` which subscribes `sensor_msgs/Image` type message in `camera/depth/image_rect_raw` and publishes `sensor_msgs/LaserScan` message, the 2D depth data that has been converted to laserscan-type message. `camera_obstacle_extractor` node converts this message into set of objects which are publishes as custom type `obstacles_detector/Obstacles`. `lidar_obstacle_extractor` does the same to the
39 | `sensor_msgs/LaserScan` published by `rplidarNode`. Green circle with red concentric circle represents detected objects from camera data. Beige and blue circle represents detected objects from lidar data.
40 |
41 | `particle_filter` node will use the measurement data from these detected objects results to estimate dynamic state of an object as well as reduce measurement noise. A magenta box represents mean of the particles used to approximate the state and black dots surrounding the box represents the aforementioned particles.
42 |
43 | 
44 |
45 | )
46 |
--------------------------------------------------------------------------------
/src/multi_particle_filter_a.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | '''
3 | Script yg isinya fungsi-fungsi untuk keperluan particle filtering
4 | '''
5 | import numpy as np
6 | from numpy.linalg import norm
7 | from numpy.random import randn
8 | from numpy.random import uniform
9 | from filterpy.monte_carlo import systematic_resample
10 | from filterpy.stats import multivariate_multiply
11 | import matplotlib.pyplot as plt
12 | import scipy.stats
13 |
14 |
15 | def create_uniform_particles(x_range, y_range, xdot_range, ydot_range, N):
16 | particles = np.empty((N, 4))
17 | particles[:, 0] = uniform(x_range[0], x_range[1], size=N)
18 | particles[:, 1] = uniform(xdot_range[0], xdot_range[1], size=N)
19 | particles[:, 2] = uniform(y_range[0], y_range[1], size=N)
20 | particles[:, 3] = uniform(ydot_range[0], ydot_range[1], size=N)
21 | return particles
22 |
23 | def create_gaussian_particles(mean, std, N):
24 | particles = np.empty((N, 7))
25 | for i in range(7):
26 | particles[:, i] = mean[i] + (randn(N)*std[i])
27 | return particles
28 |
29 | def create_initial_weights(N):
30 | return np.ones(N) / N
31 |
32 | def predict(particles, std, dt):
33 |
34 | N = len(particles)
35 | #predict object true radius
36 | particles[:, 6] += randn(N) *std[6]
37 |
38 | #predict acceleration
39 | particles[:, 4] += randn(N) * std[4]
40 | particles[:, 5] += randn(N) * std[5]
41 | #predict velocity
42 | particles[:, 2] += particles[:, 4] * dt + randn(N) * std[2]
43 | particles[:, 3] += particles[:, 5] * dt + randn(N) * std[3]
44 |
45 | #predict object position
46 | particles[:, 0] += randn(N) * std[0] + particles[:, 2] * dt + 0.5 * particles[:, 4] * dt**2
47 | particles[:, 1] += randn(N) * std[1] + particles[:, 3] * dt + 0.5 * particles[:, 5] * dt**2
48 |
49 | return particles
50 |
51 | def update(particles, weights, z, R):
52 | position_update = particles[:, 0:2]
53 | radius_update = np.transpose([particles[:, 6]])
54 | update = np.hstack((position_update, radius_update))
55 | distance = np.linalg.norm(update - z, axis = 1)
56 | weights *= scipy.stats.norm(distance, R).pdf(0)
57 | #weights *= scipy.stats.norm(distance, Ry).pdf(0)
58 | weights += 1.e-300 # avoid round-off to zero
59 | weights /= sum(weights) # normalize
60 | return weights
61 |
62 | def update_multivariate(particles, weights, z, R):
63 | position_update = particles[:, 0:2]
64 | radius_update = np.transpose([particles[:, 6]])
65 | update = np.hstack((position_update, radius_update))
66 | weights *= scipy.stats.multivariate_normal.pdf(x = update, mean = z, cov = R)
67 | weights += 1.e-300 # avoid round-off to zero
68 | weights /= sum(weights) # normalize
69 | return weights
70 |
71 | def estimate(particles, weights):
72 | """returns mean and variance of the weighted particles"""
73 |
74 | pos = particles[:, :]
75 | mean = np.average(pos, weights=weights, axis=0)
76 | var = np.average((pos - mean)**2, weights=weights, axis=0)
77 | return mean, var
78 |
79 | def neff(weights):
80 | return 1. / np.sum(np.square(weights))
81 |
82 | def resample_from_index(particles, weights, indexes):
83 | particles[:] = particles[indexes]
84 | #print("before: ", np.shape(weights))
85 | resized_weights = np.resize(weights, len(particles))
86 | #print("resized: ", np.shape(resized_weights))
87 | resized_weights.fill (1.0 / len(weights))
88 | return resized_weights
89 |
90 |
--------------------------------------------------------------------------------
/rviz/jsk_viz.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 78
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Status1
9 | - /pie chart1
10 | Splitter Ratio: 0.4845130145549774
11 | Tree Height: 427
12 | - Class: rviz/Selection
13 | Name: Selection
14 | - Class: rviz/Tool Properties
15 | Expanded:
16 | - /2D Pose Estimate1
17 | - /2D Nav Goal1
18 | - /Publish Point1
19 | Name: Tool Properties
20 | Splitter Ratio: 0.5886790156364441
21 | - Class: rviz/Views
22 | Expanded:
23 | - /Current View1
24 | - /Current View1/Focal Point1
25 | Name: Views
26 | Splitter Ratio: 0.5
27 | - Class: rviz/Time
28 | Experimental: false
29 | Name: Time
30 | SyncMode: 0
31 | SyncSource: ""
32 | Preferences:
33 | PromptSaveOnExit: true
34 | Toolbars:
35 | toolButtonStyle: 2
36 | Visualization Manager:
37 | Class: ""
38 | Displays:
39 | - Alpha: 0.5
40 | Cell Size: 1
41 | Class: rviz/Grid
42 | Color: 160; 160; 164
43 | Enabled: true
44 | Line Style:
45 | Line Width: 0.029999999329447746
46 | Value: Lines
47 | Name: Grid
48 | Normal Cell Count: 0
49 | Offset:
50 | X: 0
51 | Y: 0
52 | Z: 0
53 | Plane: XY
54 | Plane Cell Count: 10
55 | Reference Frame:
56 | Value: true
57 | - Align Bottom: false
58 | Background Alpha: 0.800000011920929
59 | Background Color: 0; 0; 0
60 | Class: jsk_rviz_plugin/OverlayText
61 | Enabled: true
62 | Foreground Alpha: 0.800000011920929
63 | Foreground Color: 25; 255; 240
64 | Name: sample text
65 | Overtake Color Properties: false
66 | Overtake Position Properties: true
67 | Topic: /text_sample
68 | Value: true
69 | font: DejaVu Sans Mono
70 | height: 400
71 | left: 0
72 | line width: 2
73 | text size: 12
74 | top: 400
75 | width: 350
76 | - Buffer length: 100
77 | Class: jsk_rviz_plugin/Plotter2D
78 | Enabled: true
79 | Name: sin plot
80 | Show Value: true
81 | Topic: /value_sample
82 | Value: true
83 | auto color change: false
84 | auto scale: true
85 | background color: 0; 0; 0
86 | backround alpha: 0
87 | border: true
88 | foreground alpha: 0.699999988079071
89 | foreground color: 25; 255; 240
90 | height: 128
91 | left: 495
92 | linewidth: 2
93 | max color: 255; 0; 0
94 | max value: 1
95 | min value: -1
96 | show caption: true
97 | text size: 12
98 | top: 111
99 | update interval: 0.03999999910593033
100 | width: 128
101 | - Class: jsk_rviz_plugin/PieChart
102 | Enabled: true
103 | Name: pie chart
104 | Topic: /value_sample
105 | Value: true
106 | auto color change: false
107 | background color: 0; 0; 0
108 | backround alpha: 0
109 | foreground alpha: 0.699999988079071
110 | foreground alpha 2: 0.4000000059604645
111 | foreground color: 25; 255; 240
112 | left: 700
113 | max color: 255; 0; 0
114 | max value: 1
115 | min value: -1
116 | show caption: true
117 | size: 128
118 | text size: 14
119 | top: 80
120 | - Class: jsk_rviz_plugin/OverlayImage
121 | Enabled: true
122 | Name: rviz startup image
123 | Topic: /image_publisher/image_raw
124 | Value: true
125 | alpha: 0.800000011920929
126 | height: 128
127 | keep aspect ratio: true
128 | left: 620
129 | overwrite alpha value: false
130 | top: 550
131 | transport hint: raw
132 | width: 128
133 | - Class: jsk_rviz_plugin/OverlayMenu
134 | Enabled: true
135 | Name: menu
136 | Topic: /test_menu
137 | Value: true
138 | keep centered: true
139 | left: 128
140 | top: 128
141 | Enabled: true
142 | Global Options:
143 | Background Color: 48; 48; 48
144 | Default Light: true
145 | Fixed Frame: camera_depth_frame
146 | Frame Rate: 30
147 | Name: root
148 | Tools:
149 | - Class: rviz/Interact
150 | Hide Inactive Objects: true
151 | - Class: rviz/MoveCamera
152 | - Class: rviz/Select
153 | - Class: rviz/FocusCamera
154 | - Class: rviz/Measure
155 | - Class: rviz/SetInitialPose
156 | Theta std deviation: 0.2617993950843811
157 | Topic: /initialpose
158 | X std deviation: 0.5
159 | Y std deviation: 0.5
160 | - Class: rviz/SetGoal
161 | Topic: /move_base_simple/goal
162 | - Class: rviz/PublishPoint
163 | Single click: true
164 | Topic: /clicked_point
165 | - Class: jsk_rviz_plugin/OverlayPicker
166 | Value: true
167 | Views:
168 | Current:
169 | Class: rviz/Orbit
170 | Distance: 5.350776195526123
171 | Enable Stereo Rendering:
172 | Stereo Eye Separation: 0.05999999865889549
173 | Stereo Focal Distance: 1
174 | Swap Stereo Eyes: false
175 | Value: false
176 | Focal Point:
177 | X: 0.6575250029563904
178 | Y: -0.5965409874916077
179 | Z: 0.3936150074005127
180 | Focal Shape Fixed Size: true
181 | Focal Shape Size: 0.05000000074505806
182 | Invert Z Axis: false
183 | Name: Current View
184 | Near Clip Distance: 0.009999999776482582
185 | Pitch: 0.5997970104217529
186 | Target Frame:
187 | Value: Orbit (rviz)
188 | Yaw: 0.9258480072021484
189 | Saved: ~
190 | Window Geometry:
191 | Displays:
192 | collapsed: false
193 | Height: 718
194 | Hide Left Dock: false
195 | Hide Right Dock: false
196 | QMainWindow State: 000000ff00000000fd00000004000000000000016a00000234fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003b00000234000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f000002befc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a005600690065007700730000000034000002be000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000005560000003efc0100000002fb0000000800540069006d00650100000000000005560000024400fffffffb0000000800540069006d00650100000000000004500000000000000000000003e60000023400000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
197 | Selection:
198 | collapsed: false
199 | Time:
200 | collapsed: false
201 | Tool Properties:
202 | collapsed: false
203 | Views:
204 | collapsed: false
205 | Width: 1366
206 | X: 0
207 | Y: 24
208 |
--------------------------------------------------------------------------------
/src/pf_analysis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import roslib
3 | from std_msgs.msg import Float32, Float32MultiArray, ColorRGBA
4 | from rospy.numpy_msg import numpy_msg
5 | from jsk_rviz_plugins.msg import OverlayText
6 | from obstacle_detector.msg import Obstacles
7 | import numpy as np
8 | import rospy
9 | import csv
10 | import sys
11 | import math
12 | PKG = 'object-detector-fusion'
13 | roslib.load_manifest(PKG)
14 |
15 |
16 | np.set_printoptions(precision=3)
17 | SAVE_TO_FILE = False
18 | PUBLISH = False
19 |
20 |
21 | class ParticleFilterViz:
22 |
23 | def __init__(self):
24 | self.start_time = rospy.get_time()
25 | self.time_elapsed = 0
26 | self.print_mu = [[], [], []]
27 | self.main_sub = rospy.Subscriber(
28 | '/estimate/values/mean', numpy_msg(Float32MultiArray), self.callback)
29 | self.main_sub = rospy.Subscriber(
30 | '/runtime', Float32, self.runtime_callback)
31 | self.obstacle_sub = rospy.Subscriber(
32 | '/raw_obstacles/camera_obstacles', Obstacles, self.obstacle_callback)
33 | self.lidar_sub = rospy.Subscriber(
34 | '/raw_obstacles/lidar_obstacles', Obstacles, self.lidar_callback)
35 | self.text_pub_1 = rospy.Publisher(
36 | "/estimate/values/text_overlay/1", OverlayText, queue_size=1)
37 | self.text_pub_2 = rospy.Publisher(
38 | "/estimate/values/text_overlay/2", OverlayText, queue_size=1)
39 | self.text_pub_3 = rospy.Publisher(
40 | "/estimate/values/text_overlay/3", OverlayText, queue_size=1)
41 | if PUBLISH:
42 | self.dist = rospy.Publisher(
43 | '/estimate/values/1/dist', Float32, queue_size=1)
44 | self.x_1 = rospy.Publisher(
45 | '/estimate/values/1/pos_x', Float32, queue_size=1)
46 | self.y_1 = rospy.Publisher(
47 | '/estimate/values/1/pos_y', Float32, queue_size=1)
48 | self.dist = rospy.Publisher(
49 | '/estimate/values/1/vel', Float32, queue_size=1)
50 | self.x_vel_1 = rospy.Publisher(
51 | '/estimate/values/1/vel_x', Float32, queue_size=1)
52 | self.y_vel_1 = rospy.Publisher(
53 | '/estimate/values/1/vel_y', Float32, queue_size=1)
54 | self.x_acc_1 = rospy.Publisher(
55 | '/estimate/values/1/acc_x', Float32, queue_size=1)
56 | self.y_acc_1 = rospy.Publisher(
57 | '/estimate/values/1/acc_y', Float32, queue_size=1)
58 | self.rad_1 = rospy.Publisher(
59 | '/estimate/values/1/rad', Float32, queue_size=1)
60 | def runtime_callback(self, msg):
61 | self.runtime = msg.data
62 |
63 | def obstacle_callback(self, msg):
64 | self.obstacles = msg.circles
65 | def lidar_callback(self, msg):
66 | self.lidars = msg.circles
67 | def callback(self, msg):
68 | main_msg = np.reshape(msg.data, [3, 7])
69 | #print(main_msg)
70 |
71 | pos_x = main_msg[0, 0]
72 | pos_y = main_msg[0, 1]
73 | vel_x = main_msg[0, 2]
74 | vel_y = main_msg[0, 3]
75 | acc_x = main_msg[0, 4]
76 | acc_y = main_msg[0, 5]
77 | rad = main_msg[0, 6]
78 | if PUBLISH:
79 | self.x_1.publish(pos_x)
80 | self.y_1.publish(pos_y)
81 | self.x_vel_1.publish(vel_x)
82 | self.y_vel_1.publish(vel_y)
83 | self.x_acc_1.publish(acc_x)
84 | self.y_acc_1.publish(acc_y)
85 | self.rad_1.publish(rad)
86 |
87 | text = OverlayText()
88 | text.width = 400
89 | text.height = 600
90 | text.left = 10
91 | text.top = 10
92 | text.text_size = 12
93 | text.line_width = 2
94 | text.fg_color = ColorRGBA(25 / 255.0, 1.0, 240.0 / 255.0, 1.0)
95 | text.bg_color = ColorRGBA(0.0, 0.0, 0.0, 0.2)
96 |
97 | text.font = "DejaVu Sans Mono"
98 | arg = [x for x in main_msg[0, :]]
99 | dist = math.sqrt((arg[0]**2 + arg[1]**2))
100 | speed = math.sqrt((arg[2]**2 + arg[3]**2))
101 | text.text = """
102 | Runtime: %.2f ms
103 | Object 1
104 | Position = [%.2f, %.2f]
105 | Distance = %.2f
106 | Velocity = [%.2f, %.2f]
107 | Speed = %.2f
108 | Acceleration = [%.2f, %.2f]
109 | True Radius = %.2f
110 | """ % (self.runtime, arg[0], arg[1], dist, arg[2], arg[3], speed, arg[4], arg[5], arg[6])
111 | self.text_pub_1.publish(text)
112 |
113 | arg = [x for x in main_msg[1, :]]
114 | dist = math.sqrt((arg[0]**2 + arg[1]**2))
115 | speed = math.sqrt((arg[2]**2 + arg[3]**2))
116 | text.text = """
117 | Object 2
118 | Position = [%.2f, %.2f]
119 | Distance = %.2f
120 | Velocity = [%.2f, %.2f]
121 | Speed = %.2f
122 | Acceleration = [%.2f, %.2f]
123 | True Radius = %.2f
124 | """ % (arg[0], arg[1], dist, arg[2], arg[3], speed, arg[4], arg[5], arg[6])
125 | self.text_pub_2.publish(text)
126 |
127 | arg = [x for x in main_msg[2, :]]
128 | dist = math.sqrt((arg[0]**2 + arg[1]**2))
129 | speed = math.sqrt((arg[2]**2 + arg[3]**2))
130 | text.text = """
131 | Object 3
132 | Position = [%.2f, %.2f]
133 | Distance = %.2f
134 | Velocity = [%.2f, %.2f]
135 | Speed = %.2f
136 | Acceleration = [%.2f, %.2f]
137 | True Radius = %.2f
138 | """ % (arg[0], arg[1],dist, arg[2], arg[3], speed, arg[4], arg[5], arg[6])
139 | self.text_pub_3.publish(text)
140 |
141 | for i, mu in enumerate(main_msg[:3]):
142 | self.time_elapsed = rospy.get_time() - self.start_time
143 | circles = self.obstacles[i]
144 | circles_lidar = self.lidars[i]
145 |
146 | self.print_mu[i].append(np.append(mu,
147 | (self.runtime,
148 | self.time_elapsed,
149 | circles.center.x,
150 | circles.center.y,
151 | circles.true_radius,
152 | circles_lidar.x,
153 | circles_lidar.y,
154 | circles_lidar.true_radius)))
155 | #self.print_mu[i].append(np.append(mu, (self.runtime, self.time_elapsed)))
156 |
157 |
158 |
159 | if __name__ == '__main__':
160 | rospy.init_node('pf_estimate_analysis')
161 | run = ParticleFilterViz()
162 | try:
163 | rospy.spin()
164 | except KeyboardInterrupt:
165 | print("Shutting down....")
166 |
167 | if SAVE_TO_FILE:
168 | for i in range(len(run.print_mu)):
169 | with open("estimate_data_{}.csv".format(i), "w") as f:
170 | csvwriter = csv.writer(f)
171 | csvwriter.writerow(["x_estimate",
172 | "y_estimate",
173 | "x_vel_estimate",
174 | "y_vel_estimate",
175 | "x_accel_estimate",
176 | "y_accel_estimate",
177 | "true_rad_estimate",
178 | "runtime",
179 | "time",
180 | "x_cam",
181 | "y_cam",
182 | "rad_cam",
183 | "x_lid",
184 | "y_lid",
185 | "rad_lid"])
186 | csvwriter.writerows(run.print_mu[i])
187 | print("Printed to estimate_data_{}.csv".format(i))
188 | f.close()
189 |
--------------------------------------------------------------------------------
/src/pf_analysis_fusion.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import roslib
3 | from std_msgs.msg import Float32, Float32MultiArray, ColorRGBA
4 | from rospy.numpy_msg import numpy_msg
5 | from jsk_rviz_plugins.msg import OverlayText
6 | from obstacle_detector.msg import Obstacles
7 | import numpy as np
8 | import rospy
9 | import csv
10 | import sys
11 | import math
12 | PKG = 'object-detector-fusion'
13 | roslib.load_manifest(PKG)
14 |
15 |
16 | np.set_printoptions(precision=3)
17 | SAVE_TO_FILE = True
18 | PUBLISH = False
19 |
20 |
21 | class ParticleFilterViz:
22 |
23 | def __init__(self):
24 | self.start_time = rospy.get_time()
25 | self.time_elapsed = 0
26 | self.print_mu = [[], [], []]
27 | self.main_sub = rospy.Subscriber(
28 | '/estimate/values/mean', numpy_msg(Float32MultiArray), self.callback)
29 | self.main_sub = rospy.Subscriber(
30 | '/runtime', Float32, self.runtime_callback)
31 | self.obstacle_sub = rospy.Subscriber(
32 | '/raw_obstacles/camera_obstacles', Obstacles, self.obstacle_callback)
33 | self.lidar_sub = rospy.Subscriber(
34 | '/raw_obstacles/lidar_obstacles', Obstacles, self.lidar_callback)
35 | self.text_pub_1 = rospy.Publisher(
36 | "/estimate/values/text_overlay/1", OverlayText, queue_size=1)
37 | self.text_pub_2 = rospy.Publisher(
38 | "/estimate/values/text_overlay/2", OverlayText, queue_size=1)
39 | self.text_pub_3 = rospy.Publisher(
40 | "/estimate/values/text_overlay/3", OverlayText, queue_size=1)
41 | if PUBLISH:
42 | self.dist = rospy.Publisher(
43 | '/estimate/values/1/dist', Float32, queue_size=1)
44 | self.x_1 = rospy.Publisher(
45 | '/estimate/values/1/pos_x', Float32, queue_size=1)
46 | self.y_1 = rospy.Publisher(
47 | '/estimate/values/1/pos_y', Float32, queue_size=1)
48 | self.dist = rospy.Publisher(
49 | '/estimate/values/1/vel', Float32, queue_size=1)
50 | self.x_vel_1 = rospy.Publisher(
51 | '/estimate/values/1/vel_x', Float32, queue_size=1)
52 | self.y_vel_1 = rospy.Publisher(
53 | '/estimate/values/1/vel_y', Float32, queue_size=1)
54 | self.x_acc_1 = rospy.Publisher(
55 | '/estimate/values/1/acc_x', Float32, queue_size=1)
56 | self.y_acc_1 = rospy.Publisher(
57 | '/estimate/values/1/acc_y', Float32, queue_size=1)
58 | self.rad_1 = rospy.Publisher(
59 | '/estimate/values/1/rad', Float32, queue_size=1)
60 | def runtime_callback(self, msg):
61 | self.runtime = msg.data
62 |
63 | def obstacle_callback(self, msg):
64 | self.obstacles = msg.circles
65 | def lidar_callback(self, msg):
66 | self.lidars = msg.circles
67 | def callback(self, msg):
68 | main_msg = np.reshape(msg.data, [3, 7])
69 | #print(main_msg)
70 |
71 | pos_x = main_msg[0, 0]
72 | pos_y = main_msg[0, 1]
73 | vel_x = main_msg[0, 2]
74 | vel_y = main_msg[0, 3]
75 | acc_x = main_msg[0, 4]
76 | acc_y = main_msg[0, 5]
77 | rad = main_msg[0, 6]
78 | if PUBLISH:
79 | self.x_1.publish(pos_x)
80 | self.y_1.publish(pos_y)
81 | self.x_vel_1.publish(vel_x)
82 | self.y_vel_1.publish(vel_y)
83 | self.x_acc_1.publish(acc_x)
84 | self.y_acc_1.publish(acc_y)
85 | self.rad_1.publish(rad)
86 |
87 | text = OverlayText()
88 | text.width = 400
89 | text.height = 600
90 | text.left = 10
91 | text.top = 10
92 | text.text_size = 12
93 | text.line_width = 2
94 | text.fg_color = ColorRGBA(25 / 255.0, 1.0, 240.0 / 255.0, 1.0)
95 | text.bg_color = ColorRGBA(0.0, 0.0, 0.0, 0.2)
96 |
97 | text.font = "DejaVu Sans Mono"
98 | arg = [x for x in main_msg[0, :]]
99 | dist = math.sqrt((arg[0]**2 + arg[1]**2))
100 | speed = math.sqrt((arg[2]**2 + arg[3]**2))
101 | text.text = """
102 | Runtime: %.2f ms
103 | Object 1
104 | Position = [%.2f, %.2f]
105 | Distance = %.2f
106 | Velocity = [%.2f, %.2f]
107 | Speed = %.2f
108 | Acceleration = [%.2f, %.2f]
109 | True Radius = %.2f
110 | """ % (self.runtime, arg[0], arg[1], dist, arg[2], arg[3], speed, arg[4], arg[5], arg[6])
111 | self.text_pub_1.publish(text)
112 |
113 | arg = [x for x in main_msg[1, :]]
114 | dist = math.sqrt((arg[0]**2 + arg[1]**2))
115 | speed = math.sqrt((arg[2]**2 + arg[3]**2))
116 | text.text = """
117 | Object 2
118 | Position = [%.2f, %.2f]
119 | Distance = %.2f
120 | Velocity = [%.2f, %.2f]
121 | Speed = %.2f
122 | Acceleration = [%.2f, %.2f]
123 | True Radius = %.2f
124 | """ % (arg[0], arg[1], dist, arg[2], arg[3], speed, arg[4], arg[5], arg[6])
125 | self.text_pub_2.publish(text)
126 |
127 | arg = [x for x in main_msg[2, :]]
128 | dist = math.sqrt((arg[0]**2 + arg[1]**2))
129 | speed = math.sqrt((arg[2]**2 + arg[3]**2))
130 | text.text = """
131 | Object 3
132 | Position = [%.2f, %.2f]
133 | Distance = %.2f
134 | Velocity = [%.2f, %.2f]
135 | Speed = %.2f
136 | Acceleration = [%.2f, %.2f]
137 | True Radius = %.2f
138 | """ % (arg[0], arg[1],dist, arg[2], arg[3], speed, arg[4], arg[5], arg[6])
139 | self.text_pub_3.publish(text)
140 |
141 | for i, mu in enumerate(main_msg[:3]):
142 | self.time_elapsed = rospy.get_time() - self.start_time
143 | circles = self.obstacles[i]
144 | circles_lidar = self.lidars[i]
145 |
146 | self.print_mu[i].append(np.append(mu,
147 | (self.runtime,
148 | self.time_elapsed,
149 | circles.center.x,
150 | circles.center.y,
151 | circles.true_radius,
152 | circles_lidar.center.x,
153 | circles_lidar.center.y,
154 | circles_lidar.true_radius)))
155 | #self.print_mu[i].append(np.append(mu, (self.runtime, self.time_elapsed)))
156 |
157 |
158 |
159 | if __name__ == '__main__':
160 | rospy.init_node('pf_estimate_analysis')
161 | run = ParticleFilterViz()
162 | try:
163 | rospy.spin()
164 | except KeyboardInterrupt:
165 | print("Shutting down....")
166 |
167 | if SAVE_TO_FILE:
168 | for i in range(len(run.print_mu)):
169 | with open("estimate_data_{}.csv".format(i), "w") as f:
170 | csvwriter = csv.writer(f)
171 | csvwriter.writerow(["x_estimate",
172 | "y_estimate",
173 | "x_vel_estimate",
174 | "y_vel_estimate",
175 | "x_accel_estimate",
176 | "y_accel_estimate",
177 | "true_rad_estimate",
178 | "runtime",
179 | "time",
180 | "x_cam",
181 | "y_cam",
182 | "rad_cam",
183 | "x_lid",
184 | "y_lid",
185 | "rad_lid"])
186 | csvwriter.writerows(run.print_mu[i])
187 | print("Printed to estimate_data_{}.csv".format(i))
188 | f.close()
189 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.0.2)
2 | project(object-detector-fusion)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | # add_compile_options(-std=c++11)
6 |
7 | ## Find catkin macros and libraries
8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | ## is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | rospy
12 | rospy_tutorials
13 | )
14 |
15 | ## System dependencies are found with CMake's conventions
16 | # find_package(Boost REQUIRED COMPONENTS system)
17 |
18 |
19 | ## Uncomment this if the package has a setup.py. This macro ensures
20 | ## modules and global scripts declared therein get installed
21 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
22 | # catkin_python_setup()
23 |
24 | ################################################
25 | ## Declare ROS messages, services and actions ##
26 | ################################################
27 |
28 | ## To declare and build messages, services or actions from within this
29 | ## package, follow these steps:
30 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in
31 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
32 | ## * In the file package.xml:
33 | ## * add a build_depend tag for "message_generation"
34 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
35 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
36 | ## but can be declared for certainty nonetheless:
37 | ## * add a exec_depend tag for "message_runtime"
38 | ## * In this file (CMakeLists.txt):
39 | ## * add "message_generation" and every package in MSG_DEP_SET to
40 | ## find_package(catkin REQUIRED COMPONENTS ...)
41 | ## * add "message_runtime" and every package in MSG_DEP_SET to
42 | ## catkin_package(CATKIN_DEPENDS ...)
43 | ## * uncomment the add_*_files sections below as needed
44 | ## and list every .msg/.srv/.action file to be processed
45 | ## * uncomment the generate_messages entry below
46 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
47 |
48 | ## Generate messages in the 'msg' folder
49 | # add_message_files(
50 | # FILES
51 | # Message1.msg
52 | # Message2.msg
53 | # )
54 |
55 | ## Generate services in the 'srv' folder
56 | # add_service_files(
57 | # FILES
58 | # Service1.srv
59 | # Service2.srv
60 | # )
61 |
62 | ## Generate actions in the 'action' folder
63 | # add_action_files(
64 | # FILES
65 | # Action1.action
66 | # Action2.action
67 | # )
68 |
69 | ## Generate added messages and services with any dependencies listed here
70 | # generate_messages(
71 | # DEPENDENCIES
72 | # std_msgs # Or other packages containing msgs
73 | # )
74 |
75 | ################################################
76 | ## Declare ROS dynamic reconfigure parameters ##
77 | ################################################
78 |
79 | ## To declare and build dynamic reconfigure parameters within this
80 | ## package, follow these steps:
81 | ## * In the file package.xml:
82 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
83 | ## * In this file (CMakeLists.txt):
84 | ## * add "dynamic_reconfigure" to
85 | ## find_package(catkin REQUIRED COMPONENTS ...)
86 | ## * uncomment the "generate_dynamic_reconfigure_options" section below
87 | ## and list every .cfg file to be processed
88 |
89 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
90 | # generate_dynamic_reconfigure_options(
91 | # cfg/DynReconf1.cfg
92 | # cfg/DynReconf2.cfg
93 | # )
94 |
95 | ###################################
96 | ## catkin specific configuration ##
97 | ###################################
98 | ## The catkin_package macro generates cmake config files for your package
99 | ## Declare things to be passed to dependent projects
100 | ## INCLUDE_DIRS: uncomment this if your package contains header files
101 | ## LIBRARIES: libraries you create in this project that dependent projects also need
102 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need
103 | ## DEPENDS: system dependencies of this project that dependent projects also need
104 | catkin_package(
105 | # INCLUDE_DIRS include
106 | # LIBRARIES sensor_reader
107 | # CATKIN_DEPENDS rospy rospy_tutorials
108 | # DEPENDS system_lib
109 | )
110 |
111 | ###########
112 | ## Build ##
113 | ###########
114 |
115 | ## Specify additional locations of header files
116 | ## Your package locations should be listed before other locations
117 | include_directories(
118 | # include
119 | ${catkin_INCLUDE_DIRS}
120 | )
121 |
122 | ## Declare a C++ library
123 | # add_library(${PROJECT_NAME}
124 | # src/${PROJECT_NAME}/sensor_reader.cpp
125 | # )
126 |
127 | ## Add cmake target dependencies of the library
128 | ## as an example, code may need to be generated before libraries
129 | ## either from message generation or dynamic reconfigure
130 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
131 |
132 | ## Declare a C++ executable
133 | ## With catkin_make all packages are built within a single CMake context
134 | ## The recommended prefix ensures that target names across packages don't collide
135 | # add_executable(${PROJECT_NAME}_node src/sensor_reader_node.cpp)
136 |
137 | ## Rename C++ executable without prefix
138 | ## The above recommended prefix causes long target names, the following renames the
139 | ## target back to the shorter version for ease of user use
140 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
141 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
142 |
143 | ## Add cmake target dependencies of the executable
144 | ## same as for the library above
145 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
146 |
147 | ## Specify libraries to link a library or executable target against
148 | # target_link_libraries(${PROJECT_NAME}_node
149 | # ${catkin_LIBRARIES}
150 | # )
151 |
152 | #############
153 | ## Install ##
154 | #############
155 |
156 | # all install targets should use catkin DESTINATION variables
157 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
158 |
159 | ## Mark executable scripts (Python etc.) for installation
160 | ## in contrast to setup.py, you can choose the destination
161 | # catkin_install_python(PROGRAMS
162 | # scripts/my_python_script
163 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
164 | # )
165 |
166 | ## Mark executables for installation
167 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
168 | # install(TARGETS ${PROJECT_NAME}_node
169 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
170 | # )
171 |
172 | ## Mark libraries for installation
173 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
174 | # install(TARGETS ${PROJECT_NAME}
175 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
176 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
177 | # RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
178 | # )
179 |
180 | ## Mark cpp header files for installation
181 | # install(DIRECTORY include/${PROJECT_NAME}/
182 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
183 | # FILES_MATCHING PATTERN "*.h"
184 | # PATTERN ".svn" EXCLUDE
185 | # )
186 |
187 | ## Mark other files for installation (e.g. launch and bag files, etc.)
188 | # install(FILES
189 | # # myfile1
190 | # # myfile2
191 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
192 | # )
193 |
194 | #############
195 | ## Testing ##
196 | #############
197 |
198 | ## Add gtest based cpp test target and link libraries
199 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_sensor_reader.cpp)
200 | # if(TARGET ${PROJECT_NAME}-test)
201 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
202 | # endif()
203 |
204 | ## Add folders to be run by python nosetests
205 | # catkin_add_nosetests(test)
206 |
--------------------------------------------------------------------------------
/launch/camera.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 | filter_field_name: y
117 | filter_limit_min: 0
118 | filter_limit_max: 0.01
119 | filter_limit_negative: False
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 | scan_height: 10
130 | range_min: 0.1
131 | range_max: 12
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
--------------------------------------------------------------------------------
/launch/fusion.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 | scan_height: 10
128 | range_min: 0.1
129 | range_max: 12
130 |
131 |
132 |
133 |
134 |
135 |
136 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
--------------------------------------------------------------------------------
/rviz/urdf3.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 0
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Status1
9 | - /RobotModel1
10 | - /TF1
11 | - /PointCloud21
12 | - /PointCloud22
13 | - /LaserScan1
14 | - /LaserScan2
15 | - /Obstacles1
16 | - /Obstacles2
17 | - /Mean1
18 | - /Particles1
19 | - /OverlayText1
20 | - /OverlayText2
21 | - /OverlayText3
22 | - /Camera1
23 | - /Camera1/Visibility1
24 | Splitter Ratio: 0.6360294222831726
25 | Tree Height: 215
26 | - Class: rviz/Selection
27 | Name: Selection
28 | - Class: rviz/Tool Properties
29 | Expanded:
30 | - /2D Pose Estimate1
31 | - /2D Nav Goal1
32 | - /Publish Point1
33 | Name: Tool Properties
34 | Splitter Ratio: 0.5886790156364441
35 | - Class: rviz/Views
36 | Expanded:
37 | - /Current View1
38 | Name: Views
39 | Splitter Ratio: 0.5
40 | - Class: rviz/Time
41 | Experimental: false
42 | Name: Time
43 | SyncMode: 0
44 | SyncSource: LaserScan
45 | Preferences:
46 | PromptSaveOnExit: true
47 | Toolbars:
48 | toolButtonStyle: 2
49 | Visualization Manager:
50 | Class: ""
51 | Displays:
52 | - Alpha: 0.5
53 | Cell Size: 1
54 | Class: rviz/Grid
55 | Color: 160; 160; 164
56 | Enabled: true
57 | Line Style:
58 | Line Width: 0.029999999329447746
59 | Value: Lines
60 | Name: Grid
61 | Normal Cell Count: 0
62 | Offset:
63 | X: 0
64 | Y: 0
65 | Z: 0
66 | Plane: XY
67 | Plane Cell Count: 10
68 | Reference Frame:
69 | Value: true
70 | - Alpha: 1
71 | Class: rviz/RobotModel
72 | Collision Enabled: false
73 | Enabled: true
74 | Links:
75 | All Links Enabled: true
76 | Expand Joint Details: false
77 | Expand Link Details: false
78 | Expand Tree: false
79 | Link Tree Style: Links in Alphabetic Order
80 | base_link:
81 | Alpha: 1
82 | Show Axes: false
83 | Show Trail: false
84 | camera_bottom_screw_frame:
85 | Alpha: 1
86 | Show Axes: false
87 | Show Trail: false
88 | camera_link:
89 | Alpha: 1
90 | Show Axes: false
91 | Show Trail: false
92 | Value: true
93 | Name: RobotModel
94 | Robot Description: robot_description
95 | TF Prefix: ""
96 | Update Interval: 0
97 | Value: true
98 | Visual Enabled: true
99 | - Class: rviz/TF
100 | Enabled: true
101 | Frame Timeout: 15
102 | Frames:
103 | All Enabled: false
104 | base_link:
105 | Value: true
106 | camera_bottom_screw_frame:
107 | Value: false
108 | camera_color_frame:
109 | Value: true
110 | camera_color_optical_frame:
111 | Value: true
112 | camera_depth_frame:
113 | Value: false
114 | camera_depth_optical_frame:
115 | Value: false
116 | camera_link:
117 | Value: true
118 | laser_scanner_frame:
119 | Value: true
120 | Marker Scale: 0.10000000149011612
121 | Name: TF
122 | Show Arrows: true
123 | Show Axes: true
124 | Show Names: true
125 | Tree:
126 | base_link:
127 | camera_bottom_screw_frame:
128 | camera_link:
129 | camera_color_frame:
130 | camera_color_optical_frame:
131 | {}
132 | camera_depth_frame:
133 | camera_depth_optical_frame:
134 | {}
135 | laser_scanner_frame:
136 | {}
137 | Update Interval: 0
138 | Value: true
139 | - Alpha: 1
140 | Autocompute Intensity Bounds: true
141 | Autocompute Value Bounds:
142 | Max Value: 4.263000011444092
143 | Min Value: 0.3970000147819519
144 | Value: true
145 | Axis: X
146 | Channel Name: intensity
147 | Class: rviz/PointCloud2
148 | Color: 255; 255; 255
149 | Color Transformer: AxisColor
150 | Decay Time: 0
151 | Enabled: false
152 | Invert Rainbow: false
153 | Max Color: 255; 255; 255
154 | Max Intensity: 4096
155 | Min Color: 0; 0; 0
156 | Min Intensity: 0
157 | Name: PointCloud2
158 | Position Transformer: XYZ
159 | Queue Size: 10
160 | Selectable: true
161 | Size (Pixels): 3
162 | Size (m): 0.009999999776482582
163 | Style: Points
164 | Topic: /camera/depth/color/points
165 | Unreliable: false
166 | Use Fixed Frame: true
167 | Use rainbow: true
168 | Value: false
169 | - Alpha: 1
170 | Autocompute Intensity Bounds: true
171 | Autocompute Value Bounds:
172 | Max Value: 12.121000289916992
173 | Min Value: 1.1200000047683716
174 | Value: true
175 | Axis: X
176 | Channel Name: intensity
177 | Class: rviz/PointCloud2
178 | Color: 115; 210; 22
179 | Color Transformer: FlatColor
180 | Decay Time: 0
181 | Enabled: false
182 | Invert Rainbow: false
183 | Max Color: 204; 0; 0
184 | Max Intensity: 4096
185 | Min Color: 0; 0; 0
186 | Min Intensity: 0
187 | Name: PointCloud2
188 | Position Transformer: XYZ
189 | Queue Size: 10
190 | Selectable: true
191 | Size (Pixels): 3
192 | Size (m): 0.05000000074505806
193 | Style: Squares
194 | Topic: /passthrough/output
195 | Unreliable: false
196 | Use Fixed Frame: true
197 | Use rainbow: true
198 | Value: false
199 | - Alpha: 1
200 | Autocompute Intensity Bounds: true
201 | Autocompute Value Bounds:
202 | Max Value: 7.358502388000488
203 | Min Value: 1.758765459060669
204 | Value: true
205 | Axis: X
206 | Channel Name: intensity
207 | Class: rviz/LaserScan
208 | Color: 255; 255; 255
209 | Color Transformer: AxisColor
210 | Decay Time: 0
211 | Enabled: true
212 | Invert Rainbow: false
213 | Max Color: 255; 255; 255
214 | Max Intensity: 4096
215 | Min Color: 0; 0; 0
216 | Min Intensity: 0
217 | Name: LaserScan
218 | Position Transformer: XYZ
219 | Queue Size: 10
220 | Selectable: true
221 | Size (Pixels): 3
222 | Size (m): 0.05000000074505806
223 | Style: Flat Squares
224 | Topic: /camera/depth/laserscan
225 | Unreliable: false
226 | Use Fixed Frame: true
227 | Use rainbow: true
228 | Value: true
229 | - Alpha: 1
230 | Autocompute Intensity Bounds: true
231 | Autocompute Value Bounds:
232 | Max Value: 10
233 | Min Value: -10
234 | Value: true
235 | Axis: Z
236 | Channel Name: intensity
237 | Class: rviz/LaserScan
238 | Color: 255; 255; 255
239 | Color Transformer: Intensity
240 | Decay Time: 0
241 | Enabled: true
242 | Invert Rainbow: false
243 | Max Color: 255; 255; 255
244 | Max Intensity: 47
245 | Min Color: 0; 0; 0
246 | Min Intensity: 47
247 | Name: LaserScan
248 | Position Transformer: XYZ
249 | Queue Size: 10
250 | Selectable: true
251 | Size (Pixels): 3
252 | Size (m): 0.05000000074505806
253 | Style: Flat Squares
254 | Topic: /scan_filtered
255 | Unreliable: false
256 | Use Fixed Frame: true
257 | Use rainbow: true
258 | Value: true
259 | - Circles color: 170; 0; 0
260 | Class: Obstacles
261 | Enabled: true
262 | Margin color: 0; 170; 0
263 | Name: Obstacles
264 | Opacity: 0.5
265 | Segments color: 170; 170; 0
266 | Segments thickness: 0.009999999776482582
267 | Topic: /raw_obstacles/camera_obstacles
268 | Unreliable: false
269 | Value: true
270 | - Circles color: 52; 101; 164
271 | Class: Obstacles
272 | Enabled: true
273 | Margin color: 233; 185; 110
274 | Name: Obstacles
275 | Opacity: 0.5
276 | Segments color: 170; 170; 0
277 | Segments thickness: 0.009999999776482582
278 | Topic: /raw_obstacles/lidar_obstacles
279 | Unreliable: false
280 | Value: true
281 | - Class: rviz/Marker
282 | Enabled: true
283 | Marker Topic: /estimate/viz/mean
284 | Name: Mean
285 | Namespaces:
286 | "": true
287 | Queue Size: 100
288 | Value: true
289 | - Class: rviz/Marker
290 | Enabled: true
291 | Marker Topic: /estimate/viz/particles
292 | Name: Particles
293 | Namespaces:
294 | particles: true
295 | Queue Size: 100
296 | Value: true
297 | - Align Bottom: false
298 | Background Alpha: 0.800000011920929
299 | Background Color: 0; 0; 0
300 | Class: jsk_rviz_plugin/OverlayText
301 | Enabled: true
302 | Foreground Alpha: 0.800000011920929
303 | Foreground Color: 25; 255; 240
304 | Name: OverlayText
305 | Overtake Color Properties: true
306 | Overtake Position Properties: true
307 | Topic: /estimate/values/text_overlay/1
308 | Value: true
309 | font: DejaVu Sans Mono
310 | height: 160
311 | left: 0
312 | line width: 2
313 | text size: 10
314 | top: 0
315 | width: 230
316 | - Align Bottom: false
317 | Background Alpha: 0.800000011920929
318 | Background Color: 0; 0; 0
319 | Class: jsk_rviz_plugin/OverlayText
320 | Enabled: true
321 | Foreground Alpha: 0.800000011920929
322 | Foreground Color: 25; 255; 240
323 | Name: OverlayText
324 | Overtake Color Properties: true
325 | Overtake Position Properties: true
326 | Topic: /estimate/values/text_overlay/2
327 | Value: true
328 | font: DejaVu Sans Mono
329 | height: 160
330 | left: 0
331 | line width: 2
332 | text size: 10
333 | top: 160
334 | width: 230
335 | - Align Bottom: false
336 | Background Alpha: 0.800000011920929
337 | Background Color: 0; 0; 0
338 | Class: jsk_rviz_plugin/OverlayText
339 | Enabled: true
340 | Foreground Alpha: 0.800000011920929
341 | Foreground Color: 25; 255; 240
342 | Name: OverlayText
343 | Overtake Color Properties: true
344 | Overtake Position Properties: true
345 | Topic: /estimate/values/text_overlay/3
346 | Value: true
347 | font: DejaVu Sans Mono
348 | height: 160
349 | left: 0
350 | line width: 2
351 | text size: 10
352 | top: 320
353 | width: 230
354 | - Class: rviz/Camera
355 | Enabled: true
356 | Image Rendering: background and overlay
357 | Image Topic: /camera/color/image_raw
358 | Name: Camera
359 | Overlay Alpha: 0.5
360 | Queue Size: 2
361 | Transport Hint: raw
362 | Unreliable: false
363 | Value: true
364 | Visibility:
365 | Axes: true
366 | Grid: true
367 | LaserScan: true
368 | Mean: true
369 | Obstacles: true
370 | OverlayText: true
371 | Particles: true
372 | PointCloud2: true
373 | RobotModel: false
374 | TF: true
375 | Value: true
376 | Zoom Factor: 1
377 | - Class: rviz/Axes
378 | Enabled: true
379 | Length: 1
380 | Name: Axes
381 | Radius: 0.10000000149011612
382 | Reference Frame:
383 | Value: true
384 | Enabled: true
385 | Global Options:
386 | Background Color: 48; 48; 48
387 | Default Light: true
388 | Fixed Frame: base_link
389 | Frame Rate: 30
390 | Name: root
391 | Tools:
392 | - Class: rviz/Interact
393 | Hide Inactive Objects: true
394 | - Class: rviz/MoveCamera
395 | - Class: rviz/Select
396 | - Class: rviz/FocusCamera
397 | - Class: rviz/Measure
398 | - Class: rviz/SetInitialPose
399 | Theta std deviation: 0.2617993950843811
400 | Topic: /initialpose
401 | X std deviation: 0.5
402 | Y std deviation: 0.5
403 | - Class: rviz/SetGoal
404 | Topic: /move_base_simple/goal
405 | - Class: rviz/PublishPoint
406 | Single click: true
407 | Topic: /clicked_point
408 | Value: true
409 | Views:
410 | Current:
411 | Angle: -1.5750004053115845
412 | Class: rviz/TopDownOrtho
413 | Enable Stereo Rendering:
414 | Stereo Eye Separation: 0.05999999865889549
415 | Stereo Focal Distance: 1
416 | Swap Stereo Eyes: false
417 | Value: false
418 | Invert Z Axis: false
419 | Name: Current View
420 | Near Clip Distance: 0.009999999776482582
421 | Scale: 64.80142211914062
422 | Target Frame:
423 | Value: TopDownOrtho (rviz)
424 | X: 3.3395180702209473
425 | Y: 1.2777992486953735
426 | Saved:
427 | - Angle: -1.5650004148483276
428 | Class: rviz/TopDownOrtho
429 | Enable Stereo Rendering:
430 | Stereo Eye Separation: 0.05999999865889549
431 | Stereo Focal Distance: 1
432 | Swap Stereo Eyes: false
433 | Value: false
434 | Invert Z Axis: false
435 | Name: TopDownOrtho
436 | Near Clip Distance: 0.009999999776482582
437 | Scale: 190.40074157714844
438 | Target Frame:
439 | Value: TopDownOrtho (rviz)
440 | X: 0
441 | Y: 0
442 | Window Geometry:
443 | Camera:
444 | collapsed: false
445 | Displays:
446 | collapsed: false
447 | Height: 718
448 | Hide Left Dock: false
449 | Hide Right Dock: false
450 | QMainWindow State: 000000ff00000000fd0000000400000000000002280000021efc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000000430000011f000000d400fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d0065007200610100000168000000f90000001b00ffffff000000010000010f0000021efc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a0056006900650077007300000000430000021e000000b500fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000055600000044fc0100000002fb0000000800540069006d00650100000000000005560000023100fffffffb0000000800540069006d00650100000000000004500000000000000000000003280000021e00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
451 | Selection:
452 | collapsed: false
453 | Time:
454 | collapsed: false
455 | Tool Properties:
456 | collapsed: false
457 | Views:
458 | collapsed: false
459 | Width: 1366
460 | X: 0
461 | Y: 24
462 |
--------------------------------------------------------------------------------
/src/multipf.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | '''
3 | Script track obstacle yg terdeteksi oleh kamera & lidar dengan particle filter
4 | '''
5 | import roslib
6 | import rospy
7 | import csv
8 |
9 | import numpy as np
10 | import multi_particle_filter_a as pf
11 | from scipy.optimize import linear_sum_assignment
12 | from filterpy.stats import multivariate_multiply
13 | from filterpy.monte_carlo import systematic_resample
14 |
15 | from rospy.numpy_msg import numpy_msg
16 | from std_msgs.msg import ColorRGBA, Float32MultiArray, Float32
17 | from geometry_msgs.msg import Point, Pose
18 | from visualization_msgs.msg import Marker
19 | from obstacle_detector.msg import Obstacles
20 |
21 | PKG = 'object-detector-fusion'
22 | roslib.load_manifest(PKG)
23 |
24 |
25 | np.set_printoptions(precision=3)
26 | PRINT = False
27 | SAVE_TO_FILE = False
28 |
29 |
30 | class ParticleFilter:
31 | mu_array = []
32 | time_elapsed = 0
33 |
34 | def __init__(self):
35 | self.lidar_obstacle_sub = rospy.Subscriber(
36 | '/raw_obstacles/lidar_obstacles', Obstacles, self.lidar_callback)
37 | self.obstacle_sub = rospy.Subscriber(
38 | '/raw_obstacles/camera_obstacles', Obstacles, self.camera_callback)
39 | self.pf_sub = rospy.Subscriber(
40 | '/pub_pf', Float32, self.pf_callback)
41 | self.obstacle_pub_mean = rospy.Publisher(
42 | '/estimate/viz/mean', Marker, queue_size=10)
43 | self.obstacle_pub_particles = rospy.Publisher(
44 | '/estimate/viz/particles', Marker, queue_size=100)
45 | self.obstacle_pub_values = rospy.Publisher(
46 | '/estimate/values/mean', numpy_msg(Float32MultiArray), queue_size=10)
47 | self.pub_runtime = rospy.Publisher(
48 | '/runtime', Float32, queue_size=10)
49 | self.marker_particle = Marker()
50 | self.marker_mean = Marker()
51 | self.marker_pose = Pose()
52 | self.pub_particles = []
53 | self.list_of_pub_mean = []
54 | self.est_values = np.zeros([3, 7], dtype=np.float32)
55 |
56 | self.N = 1000
57 | self.pf_size = 3
58 | self.start_time = rospy.get_time()
59 | self.pf_is_running = False
60 | self.first_time = True
61 |
62 | self.old_camera_measurements = []
63 |
64 | self.new_particles = np.empty((self.N, 7, self.pf_size))
65 | self.new_weights = np.empty((self.N, self.pf_size))
66 |
67 | self.lidar_is_on = False
68 | self.lidar_time = self.start_time
69 |
70 | def initialize(self, initial_pos, iteration):
71 | initial_x = (initial_pos.center.x, initial_pos.center.y,
72 | 0.05, 0.05, 0, 0, initial_pos.true_radius)
73 | std = (0.2, 0.2, 0.5, 0.5, 0.5, 0.5, 0.1)
74 | self.new_particles[:, :, iteration] = pf.create_gaussian_particles(
75 | mean=initial_x, std=std, N=self.N)
76 | self.new_weights[:, iteration] = pf.create_initial_weights(self.N)
77 |
78 | self.old_camera_measurements.append(
79 | [initial_pos.center.x, initial_pos.center.y, initial_pos.true_radius])
80 |
81 | for particle in self.new_particles[:, :, iteration]:
82 | pub_particle = Point()
83 | pub_particle.x = particle[0]
84 | pub_particle.y = particle[1]
85 | pub_particle.z = 0.35
86 | self.pub_particles.append(pub_particle)
87 |
88 | def run_pf(self, camera, lidar, dt, i):
89 | #sigma (uncertainty) for the camera
90 | cam_err_x = 0.35
91 | cam_err_y = 0.25
92 | cam_err_rad = 0.1
93 |
94 | if lidar is not None:
95 | #sigma (uncertainty) for lidar
96 | lid_err_x = 0.05
97 | lid_err_y = 0.05
98 | lid_err_rad = 0.05
99 | noise_factor = 0
100 |
101 | lid_err = [[lid_err_x**2, 0, 0],
102 | [0, lid_err_y**2, 0],
103 | [0, 0, lid_err_rad**2]]
104 | cam_err = [[cam_err_x**2, 0, 0],
105 | [0, cam_err_y**2, 0],
106 | [0, 0, lid_err_rad**2]]
107 |
108 |
109 | zs, sigma = multivariate_multiply(camera, cam_err, lidar, lid_err)
110 | R = sigma * (1 + noise_factor)
111 |
112 | else:
113 | zs = camera
114 | R = [[cam_err_x**2, 0, 0],
115 | [0, cam_err_y**2, 0],
116 | [0, 0, cam_err_rad**2]]
117 |
118 | # do predict
119 | std_predict = (0.02, 0.02, 0.04, 0.04, 0.04, 0.04, 0.04)
120 | pf.predict(self.new_particles[:, :, i],
121 | std=std_predict,
122 | dt=dt)
123 |
124 | # incorporate measurements
125 | pf.update_multivariate(
126 | self.new_particles[:, :, i], self.new_weights[:, i], z=zs, R=R)
127 |
128 | # resample if too few effective particles
129 | if pf.neff(self.new_weights[:, i]) < self.N/2:
130 | indexes = systematic_resample(self.new_weights[:, i])
131 | self.new_weights[:, i] = pf.resample_from_index(
132 | self.new_particles[:, :, i], self.new_weights[:, i], indexes)
133 |
134 | self.mu, self.var = pf.estimate(
135 | self.new_particles[:, :, i], self.new_weights[:, i])
136 |
137 | # reinitialize
138 | t = 10
139 | time = round(self.time_elapsed, 1)
140 | mu_pos = np.array(self.mu[0:2])
141 | z_pos = np.array(zs[0:2])
142 | euclid_dist = np.linalg.norm(mu_pos-z_pos)
143 | if time % t == 10 or euclid_dist > 2:
144 | print("Reinitializing")
145 | initial = (zs[0], zs[1], 0.1, 0.1, 0, 0, zs[2])
146 | init_std = (0.01, 0.01, 0.1, 0.1, 0.1, 0.1, 0.01)
147 | self.new_particles[:, :, i] = pf.create_gaussian_particles(
148 | mean=initial, std=init_std, N=self.N)
149 | self.new_weights[:, i] = pf.create_initial_weights(self.N)
150 |
151 | def cost_function(self, x_i, x_j, y_i, y_j, r_i, r_j):
152 | #i = row; new measurements
153 | #j = column; old measurements
154 | cost = np.sqrt((x_i-x_j)**2 + (y_i-y_j)**2 + (r_i-r_j)**2)
155 | return cost
156 |
157 | def calibrate(self, old, a = 1, b = 0):
158 | return a*old + b
159 |
160 | def objects_assigner(self, old_measurements, new_measurements):
161 | old = np.array(old_measurements)
162 | new = np.array(new_measurements)
163 |
164 | cost_matrix = np.empty((len(new), len(old)))
165 |
166 | for i in range(len(cost_matrix)):
167 | for j in range(len(cost_matrix[i])):
168 | cost_matrix[i, j] = self.cost_function(
169 | new[i, 0], old[j, 0], new[i, 1], old[j, 1], new[i, 2], old[j, 2])
170 | row_idx, col_idx = linear_sum_assignment(cost_matrix)
171 | return row_idx, col_idx, cost_matrix
172 |
173 | def lidar_callback(self, ros_data):
174 | self.new_lidar_measurements = []
175 | self.lidar_all = ros_data.circles[:self.pf_size]
176 | for i in range(len(self.lidar_all)):
177 | lidar_circle = self.lidar_all[i]
178 | new_lid_x = self.calibrate(lidar_circle.center.x, 1.0124, -0.0401)
179 | new_lid_y = self.calibrate(lidar_circle.center.y, 0.9982, -0.0215)
180 | self.new_lidar_measurements.append(
181 | [new_lid_x , new_lid_y, lidar_circle.true_radius])
182 | self.lidar_row_idx, self.lidar_col_idx, self.lidar_cost_matrix = self.objects_assigner(self.new_camera_measurements, self.new_lidar_measurements)
183 | #print("Lidar measurement in lidar callback", self.new_lidar_measurements)
184 | self.lidar_time = rospy.get_time()
185 | self.lidar_is_on = True
186 |
187 | def camera_callback(self, ros_data):
188 | self.obstacles_full = ros_data
189 | self.camera_all = ros_data.circles[:self.pf_size]
190 |
191 | def pf_callback(self, ros_data):
192 | init_runtime = rospy.get_time()
193 | time = 1 / ros_data.data
194 | print(time)
195 |
196 | fusion_idx = [None, None, None]
197 | self.camera_done = False
198 |
199 | if self.first_time:
200 | self.pf_is_running = [False for _ in range(self.pf_size)]
201 | self.first_time = False
202 | #print("Running", self.pf_is_running)
203 |
204 | self.new_camera_measurements = []
205 | self.run_fusion = [False for _ in range(self.pf_size)]
206 | #print("run fusion before", self.run_fusion)
207 | for i in range(len(self.camera_all)):
208 | camera_circle = self.camera_all[i]
209 | new_cam_x = self.calibrate(camera_circle.center.x, 0.7485, 0.2889)
210 | new_cam_y = self.calibrate(camera_circle.center.y, 0.9955, -0.0108)
211 | self.new_camera_measurements.append(
212 | [new_cam_x, new_cam_y, camera_circle.true_radius])
213 | self.row_idx, self.col_idx, self.cost_matrix = self.objects_assigner(
214 | self.old_camera_measurements, self.new_camera_measurements)
215 |
216 | #Data Gating for camera measurements
217 | for k in range(len(self.row_idx)):
218 | camera_cost = self.cost_matrix[self.row_idx[k], self.col_idx[k]]
219 | print("camera cost", camera_cost)
220 | if camera_cost > 5:
221 | self.row_idx = np.delete(self.row_idx, k)
222 | #self.row_idx[k] = -1
223 | print("Blocked")
224 |
225 | if self.lidar_is_on:
226 | #print("is ON?", self.lidar_is_on)
227 | lidars = self.new_lidar_measurements
228 | for k in range(len(self.lidar_row_idx)):
229 | lidar_cost = self.lidar_cost_matrix[self.lidar_row_idx[k],
230 | self.lidar_col_idx[k]]
231 | #print("ROW AND COST", self.lidar_row_idx[k], lidar_cost)
232 | if lidar_cost < 0.5:
233 | #print("{} with cost {} are fusioned with {}".format(self.lidar_row_idx[k], lidar_cost, self.lidar_col_idx[k]))
234 | self.run_fusion[self.lidar_col_idx[k]] = True
235 | #print("the measurement", self.new_lidar_measurements)
236 | fusion_idx[self.lidar_col_idx[k]] = self.lidar_row_idx[k]
237 |
238 | #print("run fusion after", self.run_fusion)
239 | for i in range(len(self.new_camera_measurements)):
240 | if not self.pf_is_running[i]:
241 | initial_pos = self.camera_all[i]
242 | self.initialize(initial_pos, i)
243 | self.run_pf(
244 | self.new_camera_measurements[i], None, dt=time, i=i)
245 | self.pf_is_running[i] = True
246 |
247 | elif self.run_fusion[self.col_idx[i]]:
248 | print("Fused")
249 | #print("lidar row", self.lidar_row_idx)
250 | #print("lidar col", self.lidar_col_idx)
251 | #print("camera row", self.row_idx)
252 | #print("camera col", self.col_idx)
253 | #print("but which one", self.lidar_row_idx[i_lidar], self.new_lidar_measurements[self.lidar_row_idx[i_lidar]])
254 | #print("fusion index is {}. Now we use {} indexed with {}".format(fusion_idx, fusion_idx[i], i))
255 | #print("fusion index (col) is {}. Now we use {} indexed with {}".format(fusion_idx, fusion_idx[self.col_idx[i]], self.col_idx[i]))
256 | self.run_pf(self.new_camera_measurements[self.row_idx[i]],
257 | lidars[fusion_idx[self.col_idx[i]]],
258 | dt=time, i=self.col_idx[i])
259 | else:
260 | self.run_fusion[self.col_idx[i]] = False
261 | self.run_pf(
262 | self.new_camera_measurements[self.row_idx[i]], None, dt=time, i=self.col_idx[i])
263 |
264 | #Fill the marker_particle of Marker class
265 |
266 | if i < len(self.col_idx) - 1:
267 | i_particle = self.col_idx[i]
268 | else:
269 | i_particle = i
270 |
271 | for j, particle in enumerate(self.new_particles[:, :, self.col_idx[i]]):
272 | pub_new_particle = Point()
273 | pub_new_particle.x = particle[0]
274 | pub_new_particle.y = particle[1]
275 | pub_new_particle.z = 0.35
276 | self.pub_particles[j+self.N*self.col_idx[i]] = pub_new_particle
277 |
278 | self.time_elapsed = rospy.get_time() - self.start_time
279 |
280 | self.est_values[i_particle] = self.mu
281 | if PRINT:
282 | print('object: ', self.col_idx[i], self.time_elapsed)
283 | print('Fused?', self.run_fusion[self.col_idx[i]])
284 | print('pos: ', np.array(self.new_camera_measurements[i][0:2]))
285 | print('distance:', np.linalg.norm(
286 | [self.new_camera_measurements[i][0:2]], axis=1))
287 | print('velocity estimate: ', self.mu[2:4])
288 | print('acceleration estimate: ', self.mu[4:6])
289 | print('true radius estimate: ', self.mu[6])
290 | #print('camera: ', self.camera)
291 | #print('position error: ', self.obstacle_pos-self.mu[0:2])
292 | #test = True
293 | pub_mean = Point()
294 | pub_mean.x = self.mu[0]
295 | pub_mean.y = self.mu[1]
296 | pub_mean.z = 0.32
297 | self.list_of_pub_mean.append(pub_mean)
298 |
299 | print("--------")
300 | #Fill the marker_particle of Marker class
301 | self.marker_particle.header = self.obstacles_full.header
302 | self.marker_particle.type = 8 # POINTS
303 | self.marker_particle.points = self.pub_particles
304 | self.pub_particles = [Point() for _ in range(len(self.pub_particles))]
305 | #print(len(self.pub_particles))
306 | self.marker_particle.scale.x = 0.03
307 | self.marker_particle.scale.y = 0.03
308 | # self.marker_particle.scale.z = 0.02
309 | self.marker_particle.pose.orientation.x = 0
310 | self.marker_particle.pose.orientation.y = 0
311 | self.marker_particle.pose.orientation.z = 0
312 | self.marker_particle.pose.orientation.w = 1
313 |
314 | self.marker_particle.color.r = 0
315 | self.marker_particle.color.g = 0
316 | self.marker_particle.color.b = 0
317 | self.marker_particle.color.a = 1
318 |
319 | #self.marker_particle.lifetime = rospy.Duration(1)
320 | self.marker_particle.ns = "particles"
321 |
322 | self.obstacle_pub_particles.publish(self.marker_particle)
323 |
324 | #Fill the marker_mean of Marker class
325 | self.marker_mean.header = self.obstacles_full.header
326 | self.marker_mean.type = 6 # CUBE_LIST
327 |
328 | self.marker_pose.orientation.x = 0
329 | self.marker_pose.orientation.y = 0
330 | self.marker_pose.orientation.z = 0
331 | self.marker_pose.orientation.w = 1
332 |
333 | self.marker_mean.pose = self.marker_pose
334 |
335 | self.marker_mean.points = self.list_of_pub_mean
336 | self.marker_mean.scale.x = 0.15
337 | self.marker_mean.scale.y = 0.15
338 | self.marker_mean.scale.z = 0.15
339 |
340 | mean_color = ColorRGBA()
341 | mean_color.r = 230.0 / 255.0
342 | mean_color.g = 26.0 / 255.0
343 | mean_color.b = 102.0 / 255.0
344 | mean_color.a = 1
345 |
346 | self.marker_mean.colors = [
347 | mean_color for _ in range(len(self.list_of_pub_mean))]
348 |
349 | self.obstacle_pub_mean.publish(self.marker_mean)
350 | self.list_of_pub_mean = []
351 |
352 | val = self.est_values.ravel()
353 | #print("values", val)
354 | pub_values = Float32MultiArray()
355 | pub_values.data = np.array(val, dtype=np.float32)
356 | self.obstacle_pub_values.publish(pub_values)
357 |
358 | self.running_time = rospy.get_time()
359 | #apabila lidar gak ngirim data dalam waktu > 0.1 s, sensor fusionnya gak dipake
360 | self.time_difference = self.running_time - self.lidar_time
361 | if self.time_difference >= 0.08:
362 | self.lidar_is_on = False
363 |
364 | #Update Old measurement dgn yang new
365 | for i in range(len(self.row_idx)):
366 | self.old_camera_measurements[self.col_idx[i]
367 | ] = self.new_camera_measurements[self.row_idx[i]]
368 | self.pub_runtime.publish((self.running_time - init_runtime) * 1000)
369 | #print("Runtime", self.running_time-init_runtime)
370 | self.camera_done = True
371 |
372 |
373 | if __name__ == "__main__":
374 | rospy.init_node('particle_filters')
375 | run = ParticleFilter()
376 | #write estimate data to file
377 | try:
378 | rospy.spin()
379 | except KeyboardInterrupt:
380 | print("Shutting down....")
381 |
382 |
--------------------------------------------------------------------------------
/src/obstacle_pf.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | '''
3 | Script track obstacle yg terdeteksi oleh kamera dengan particle filter
4 | '''
5 | import roslib
6 | import rospy
7 | import csv
8 |
9 | import numpy as np
10 | import multi_particle_filter_a as pf
11 | from scipy.optimize import linear_sum_assignment
12 | from filterpy.stats import multivariate_multiply
13 | from filterpy.monte_carlo import systematic_resample
14 | from numpy.random import randn
15 |
16 | from rospy.numpy_msg import numpy_msg
17 | from std_msgs.msg import ColorRGBA, Float32MultiArray, Float32
18 | from geometry_msgs.msg import Point, Pose
19 | from visualization_msgs.msg import Marker
20 | from obstacle_detector.msg import Obstacles
21 |
22 | PKG = 'object-detector-fusion'
23 | roslib.load_manifest(PKG)
24 |
25 |
26 | np.set_printoptions(precision=3)
27 | PRINT = False
28 | SAVE_TO_FILE = False
29 |
30 |
31 | class ParticleFilter:
32 | mu_array = []
33 | time_elapsed = 0
34 |
35 | def __init__(self):
36 | self.lidar_obstacle_sub = rospy.Subscriber(
37 | '/lidar_obstacles', Obstacles, self.lidar_callback)
38 | self.obstacle_sub = rospy.Subscriber(
39 | '/obstacles', Obstacles, self.camera_callback)
40 | self.pf_sub = rospy.Subscriber(
41 | '/pub_pf', Float32, self.pf_callback)
42 | self.obstacle_pub_mean = rospy.Publisher(
43 | '/estimate/viz/mean', Marker, queue_size=10)
44 | self.obstacle_pub_particles = rospy.Publisher(
45 | '/estimate/viz/particles', Marker, queue_size=100)
46 | self.obstacle_pub_values = rospy.Publisher(
47 | '/estimate/values/mean', numpy_msg(Float32MultiArray), queue_size=10)
48 | self.pub_runtime = rospy.Publisher(
49 | '/runtime', Float32, queue_size=10)
50 | self.marker_particle = Marker()
51 | self.marker_mean = Marker()
52 | self.marker_pose = Pose()
53 | self.pub_particles = []
54 | self.list_of_pub_mean = []
55 | self.est_values = np.zeros([3, 7], dtype=np.float32)
56 | self.count = 0
57 |
58 | self.N = 10
59 | self.pf_size = 3
60 | self.start_time = rospy.get_time()
61 | self.pf_is_running = False
62 | self.first_time = True
63 |
64 | self.old_camera_measurements = []
65 | self.runtime = []
66 |
67 | self.lidar_is_on = False
68 | #self.lidar_all = [CircleObstacle() for _ in range(self.pf_size)]
69 | self.lidar_time = self.start_time
70 |
71 | self.new_particles = np.empty((self.N, 7, self.pf_size))
72 | self.new_weights = np.empty((self.N, self.pf_size))
73 |
74 | def initialize(self, initial_pos, iteration):
75 | initial_x = (initial_pos.center.x, initial_pos.center.y,
76 | 0.05, 0.05, 0, 0, initial_pos.true_radius)
77 | std = (0.1, 0.1, 0.5, 0.5, 0.5, 0.5, 0.1)
78 | self.new_particles[:, :, iteration] = pf.create_gaussian_particles(
79 | mean=initial_x, std=std, N=self.N)
80 | self.new_weights[:, iteration] = pf.create_initial_weights(self.N)
81 |
82 | cam_circle_err_x = randn()*0.1
83 | cam_circle_err_y = randn()*0.1
84 | cam_circle_err_rad = randn()*0.1
85 |
86 | self.old_camera_measurements.append([initial_pos.center.x + cam_circle_err_x,
87 | initial_pos.center.y + cam_circle_err_y,
88 | initial_pos.true_radius + cam_circle_err_rad])
89 |
90 | for particle in self.new_particles[:, :, iteration]:
91 | pub_particle = Point()
92 | pub_particle.x = particle[0]
93 | pub_particle.y = particle[1]
94 | pub_particle.z = 0.35
95 | self.pub_particles.append(pub_particle)
96 |
97 | def run_pf(self, camera=None, lidar=None, dt=0, i=0):
98 | #self.camera_obstacle_pos = np.array([camera.center.x, camera.center.y])
99 | #self.camera_radius = np.array([camera.radius])
100 | #sigma (uncertainty) sensor with added noise
101 | cam_err_x = 0.3
102 | cam_err_y = 0.2
103 | cam_err_rad = 0.08
104 |
105 | if self.run_fusion[i]:
106 | lid_err_x = 0.1
107 | lid_err_y = 0.08
108 | lid_err_rad = 0.08
109 | lid_var = [[lid_err_x**2, 0, 0],
110 | [0, lid_err_y**2, 0],
111 | [0, 0, lid_err_rad**2]]
112 | cam_var = [[cam_err_x**2, 0, 0],
113 | [0, cam_err_y**2, 0],
114 | [0, 0, lid_err_rad**2]]
115 | zs, sigma = multivariate_multiply(camera, cam_var, lidar, lid_var)
116 | #fused_radius = (camera[2] * cam_err_rad**2 + lidar[2] * lid_err_rad**2)/(cam_err_rad**2+lid_err_rad**2)
117 | #camera_err_norm = camera_std_err/(camera_std_err+lidar_std_err)
118 | #lidar_err_norm = lidar_std_err/(camera_std_err+lidar_std_err)
119 | #R = np.average([camera_std_err, lidar_std_err],
120 | # weights = [(1-camera_err_norm), (1-lidar_err_norm)])
121 | var = sigma * (1 + 0.5)
122 |
123 | else:
124 | #self.camera = self.camera_obstacle_pos + randn()* camera_std_err
125 | #self.camera = self.camera_obstacle_pos + np.array([randn()* camera_std_err_x, randn()* camera_std_err_y])
126 | #print(type(self.camera))
127 | #zs = np.concatenate((self.camera, self.camera_radius))
128 | zs = camera
129 | var = [[cam_err_x**2, 0, 0],
130 | [0, cam_err_y**2, 0],
131 | [0, 0, cam_err_rad**2]]
132 | # do predict
133 | std = (0.02, 0.02, 0.04, 0.04, 0.04, 0.04, 0.02)
134 | pf.predict(self.new_particles[:, :, i],
135 | std=std,
136 | dt=dt)
137 |
138 | # incorporate measurements
139 | #pf.update(self.new_particles[:, :,i], self.new_weights[:, i], z = zs, R = R)
140 |
141 | pf.update_multivariate(
142 | self.new_particles[:, :, i], self.new_weights[:, i], z=zs, R=var)
143 |
144 | # resample if too few effective particles
145 | if pf.neff(self.new_weights[:, i]) < self.N/2:
146 | indexes = systematic_resample(self.new_weights[:, i])
147 | self.new_weights[:, i] = pf.resample_from_index(
148 | self.new_particles[:, :, i], self.new_weights[:, i], indexes)
149 | #assert np.allclose(self.weights, 1/self.N)
150 |
151 | self.mu, self.var = pf.estimate(
152 | self.new_particles[:, :, i], self.new_weights[:, i])
153 | #reinitialize every t seconds
154 | #t = 0
155 | #time = round(self.time_elapsed, 1)
156 | if False:
157 | print("Reinitializing")
158 | initial = (zs[0], zs[1], 0.05, 0.05, 0, 0, zs[2])
159 | init_std = (0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1)
160 | self.new_particles[:, :, i] = pf.create_gaussian_particles(
161 | mean=initial, std=init_std, N=self.N)
162 | self.new_weights[:, i] = pf.create_initial_weights(self.N)
163 |
164 | def cost_function(self, x_i, x_j, y_i, y_j, r_i, r_j):
165 | #i = row; new measurements
166 | #j = column; old measurements
167 | cost = np.sqrt((x_i-x_j)**2 + (y_i-y_j)**2 + (r_i-r_j)**2)
168 | return cost
169 |
170 | def objects_assigner(self, old_measurements, new_measurements):
171 | old = np.array(old_measurements)
172 | new = np.array(new_measurements)
173 |
174 | cost_matrix = np.empty((len(new), len(old)))
175 |
176 | for i in range(len(cost_matrix)):
177 | for j in range(len(cost_matrix[i])):
178 | cost_matrix[i, j] = self.cost_function(
179 | new[i, 0], old[j, 0], new[i, 1], old[j, 1], new[i, 2], old[j, 2])
180 | row_idx, col_idx = linear_sum_assignment(cost_matrix)
181 | return row_idx, col_idx, cost_matrix
182 |
183 | def lidar_callback(self, ros_data):
184 | self.new_lidar_measurements = []
185 | self.lidar_all = ros_data.circles[:6]
186 | for i in range(len(self.lidar_all)):
187 | lidar_circle = self.lidar_all[i]
188 | self.new_lidar_measurements.append(
189 | [lidar_circle.center.x, lidar_circle.center.y, lidar_circle.true_radius])
190 | row_idx, col_idx, cost_matrix = self.objects_assigner(
191 | self.new_camera_measurements, self.new_lidar_measurements)
192 | #print("lidar new indices", row_idx)
193 | self.lidar_row_idx = row_idx
194 | self.lidar_col_idx = col_idx
195 | self.lidar_cost_matrix = cost_matrix
196 | self.lidar_time = rospy.get_time()
197 | self.lidar_is_on = True
198 |
199 | def camera_callback(self, ros_data):
200 | self.obstacles_full = ros_data
201 | self.camera_all = ros_data.circles[:self.pf_size]
202 |
203 | def pf_callback(self, ros_data):
204 | time = 1 / ros_data.data
205 | print(time)
206 | init_runtime = rospy.get_time()
207 | #obstacles_full = ros_data
208 | #camera_all = ros_data.circles[:self.pf_size]
209 |
210 | if self.first_time:
211 | self.pf_is_running = [False for _ in range(self.pf_size)]
212 | self.first_time = False
213 | #self.pf_size = len(camera_all)
214 | #print("old measurement", self.old_camera_measurements)
215 | self.new_camera_measurements = []
216 |
217 | cam_circle_err_x = randn()*0.1 # sigma (uncertainty sensor yg asli)
218 | cam_circle_err_y = randn()*0.1 # sigma (uncertainty sensor yg asli)
219 | cam_circle_err_rad = randn()*0.1 # sigma (uncertainty sensor yg asli)
220 |
221 | self.run_fusion = [False for _ in range(self.pf_size)]
222 | for i in range(len(self.camera_all)):
223 | camera_circle = self.camera_all[i]
224 | self.new_camera_measurements.append([camera_circle.center.x + cam_circle_err_x,
225 | camera_circle.center.y + cam_circle_err_y,
226 | camera_circle.true_radius + cam_circle_err_rad])
227 | self.row_idx, col_idx, cost_matrix = self.objects_assigner(
228 | self.old_camera_measurements, self.new_camera_measurements)
229 | #print("row", row_idx)
230 | #print("col", col_idx)
231 | #print("camera cost matrix", cost)
232 | #print("new measurement", self.new_camera_measurements)
233 |
234 | #Data Gating for camera measurements
235 | for k in range(len(self.row_idx)):
236 | camera_cost = cost_matrix[self.row_idx[k], col_idx[k]]
237 | if camera_cost > 2:
238 | self.row_idx = np.delete(self.row_idx, k)
239 | #self.row_idx[k] = -1
240 |
241 | if self.lidar_is_on:
242 | #Data Gating for lidar measurements
243 | #print("lidar cost matrix", self.lidar_cost_matrix)
244 | for k in range(len(self.lidar_row_idx)):
245 | #print("row", self.lidar_row_idx)
246 | #print("col", self.lidar_col_idx)
247 | #print("cost ", self.lidar_cost_matrix)
248 | lidar_cost = self.lidar_cost_matrix[self.lidar_row_idx[k],
249 | self.lidar_col_idx[k]]
250 | #print(self.lidar_row_idx[k], lidar_cost)
251 | if lidar_cost < 0.4:
252 | #print("cost", k)
253 | self.run_fusion[self.lidar_col_idx[k]] = True
254 | #print("lidar row", self.lidar_row_idx)
255 |
256 | #print("cost matrix: ", cost_matrix)
257 | #print("new measurement indices (row)", self.row_idx)
258 | #print("old measurement indices (col)", col_idx)
259 | #print(len(col_idx))
260 | #self.pf_size = len(row_idx)
261 |
262 | i_lidar = 0
263 | for i in range(len(self.camera_all)):
264 | if not self.pf_is_running[i]:
265 | initial_pos = self.camera_all[i]
266 | self.initialize(initial_pos, i)
267 | print("Initialize for object {}".format(i))
268 | #print(len(self.pub_particles))
269 | self.run_pf(self.new_camera_measurements[i], dt=time, i=i)
270 | self.pf_is_running[i] = True
271 |
272 | #self.old_measurements.append(initial_pos)
273 |
274 | elif self.run_fusion[col_idx[i]]:
275 | #print("lidar measurements",len(self.new_lidar_measurements))
276 | #print("row_idx",len(self.lidar_row_idx))
277 | #print("Fused")
278 | self.run_pf(self.new_camera_measurements[self.row_idx[i]],
279 | self.new_lidar_measurements[self.lidar_row_idx[i_lidar]],
280 | dt=time,
281 | i=col_idx[i])
282 | i_lidar += 1
283 | #print("len", len(camera_all), len(self.lidar_all))
284 | else:
285 | self.run_pf(
286 | self.new_camera_measurements[self.row_idx[i]], dt=time, i=col_idx[i])
287 | #print(self.mu)
288 |
289 | #print(len(self.new_particles[:,:,i]))
290 | #Fill the marker_particle of Marker class
291 | if i < len(col_idx) - 1:
292 | i_particle = col_idx[i]
293 | else:
294 | i_particle = i
295 |
296 | for j, particle in enumerate(self.new_particles[:, :, i_particle]):
297 | pub_new_particle = Point()
298 | pub_new_particle.x = particle[0]
299 | pub_new_particle.y = particle[1]
300 | pub_new_particle.z = 0.35
301 | self.pub_particles[j+self.N*i_particle] = pub_new_particle
302 |
303 |
304 |
305 | self.est_values[i_particle] = self.mu
306 | #print("mu", self.mu)
307 | self.time_elapsed = rospy.get_time() - self.start_time
308 |
309 | if PRINT:
310 | print('object: ', i, self.time_elapsed)
311 | print('pos: ', np.array(self.new_camera_measurements[i][0:2]))
312 | print('distance:', np.linalg.norm(
313 | [self.new_camera_measurements[i][0:2]], axis=1))
314 | print('velocity estimate: ', self.mu[2:4])
315 | print('acceleration estimate: ', self.mu[4:6])
316 | print('true radius estimate: ', self.mu[6])
317 | #print('camera: ', self.camera)
318 | #print('position error: ', self.obstacle_pos-self.mu[0:2])
319 | #test = True
320 | #print(self.mu[0], self.mu[1])
321 |
322 | pub_mean = Point()
323 | pub_mean.x = self.mu[0]
324 | pub_mean.y = self.mu[1]
325 | pub_mean.z = 0.32
326 | self.list_of_pub_mean.append(pub_mean)
327 |
328 | #print("New: ",np.shape(self.new_measurements))
329 |
330 | print("--------")
331 | pf_running_time = rospy.get_time()
332 | #Fill the marker_particle of Marker class
333 | self.marker_particle.header = self.obstacles_full.header
334 | self.marker_particle.type = 8 # POINTS
335 | self.marker_particle.points = self.pub_particles
336 | #print(len(self.pub_particles))
337 | self.marker_particle.scale.x = 0.02
338 | self.marker_particle.scale.y = 0.02
339 | # self.marker_particle.scale.z = 0.02
340 | self.marker_particle.pose.orientation.x = 0
341 | self.marker_particle.pose.orientation.y = 0
342 | self.marker_particle.pose.orientation.z = 0
343 | self.marker_particle.pose.orientation.w = 1
344 |
345 | self.marker_particle.color.r = 0
346 | self.marker_particle.color.g = 0
347 | self.marker_particle.color.b = 0
348 | self.marker_particle.color.a = 2
349 |
350 | self.obstacle_pub_particles.publish(self.marker_particle)
351 |
352 | #Fill the marker_mean of Marker class
353 |
354 | self.marker_mean.header = self.obstacles_full.header
355 | self.marker_mean.type = 6 # CUBE_LIST
356 |
357 | self.marker_pose.orientation.x = 0
358 | self.marker_pose.orientation.y = 0
359 | self.marker_pose.orientation.z = 0
360 | self.marker_pose.orientation.w = 1
361 |
362 | self.marker_mean.pose = self.marker_pose
363 |
364 | #print(self.list_of_pub_mean)
365 |
366 | self.marker_mean.points = self.list_of_pub_mean
367 | self.marker_mean.scale.x = 0.1
368 | self.marker_mean.scale.y = 0.1
369 | self.marker_mean.scale.z = 0.1
370 |
371 | mean_color = ColorRGBA()
372 | mean_color.r = 2.0 / 255.0
373 | mean_color.g = 255.0 / 255.0
374 | mean_color.b = 20.0 / 255.0
375 | mean_color.a = 1
376 |
377 | self.marker_mean.colors = [
378 | mean_color for _ in range(len(self.list_of_pub_mean))]
379 | #self.marker_mean.color.g = 0
380 | #self.marker_mean.color.b = 0
381 | #self.marker_mean.color.a = 1
382 |
383 | self.obstacle_pub_mean.publish(self.marker_mean)
384 | self.list_of_pub_mean = []
385 |
386 | #print(est_values)
387 | val = self.est_values.ravel()
388 | #print("values", val)
389 | pub_values = Float32MultiArray()
390 | pub_values.data = np.array(val, dtype=np.float32)
391 | self.obstacle_pub_values.publish(pub_values)
392 |
393 | self.running_time = rospy.get_time()
394 | #apabila lidar gak ngirim data dalam waktu > 0.1 s, sensor fusionnya gak dipake
395 | self.time_difference = self.running_time - self.lidar_time
396 | #print(self.time_difference)
397 | if self.time_difference >= 1/5.5:
398 | self.lidar_is_on = False
399 |
400 | if not len(col_idx) == 0:
401 | print("CHECK IT'S RUNNING ")
402 | for i in range(len(self.row_idx)):
403 | self.old_camera_measurements[col_idx[i]
404 | ] = self.new_camera_measurements[self.row_idx[i]]
405 | #print("PF Runtime", 1000 * (pf_running_time - init_runtime))
406 | #print("Runtime", 1000 * (self.running_time - init_runtime))
407 | #print("Count", self.count)
408 | self.count += 1
409 | self.pub_runtime.publish((self.running_time - init_runtime) * 1000)
410 |
411 | self.runtime.append([self.running_time - init_runtime, rospy.get_time() - self.start_time])
412 |
413 | if __name__ == "__main__":
414 | rospy.init_node("particle_filters")
415 | run = ParticleFilter()
416 | try:
417 | rospy.spin()
418 | except KeyboardInterrupt:
419 | print("Shutting down....")
420 | if SAVE_TO_FILE:
421 | with open("runtime_data.csv", "w") as f:
422 | csvwriter = csv.writer(f)
423 | csvwriter.writerow(["runtime", "time"])
424 | csvwriter.writerows(run.runtime)
425 | print("Printed to runtime_data.csv")
426 | f.close()
427 |
--------------------------------------------------------------------------------