├── doc
├── plier_check.png
└── deteced_object.png
├── copy_paste_files
├── hj_p.yaml
├── p.cfg
├── YoloObjectDetector.hpp
└── YoloObjectDetector.cpp
├── urdf
└── d435_world.urdf.xacro
├── launch
├── bringup_d435.launch
├── hj_object_detect_rviz.launch
├── hj_object_detect.launch
├── hj_darknet.launch
└── hj_jsk_test.launch
├── config
├── hj_darknet_config.yaml
└── hj_yolo_jsk.rviz
├── .gitignore
├── package.xml
├── src
├── hj_object_collector.py
└── detection_clustering.py
├── CMakeLists.txt
├── readme.md
└── LICENSE
/doc/plier_check.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hj26-park/hj-object-detect/HEAD/doc/plier_check.png
--------------------------------------------------------------------------------
/doc/deteced_object.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hj26-park/hj-object-detect/HEAD/doc/deteced_object.png
--------------------------------------------------------------------------------
/copy_paste_files/hj_p.yaml:
--------------------------------------------------------------------------------
1 | yolo_model:
2 | config_file:
3 | name: p.cfg
4 | weight_file:
5 | name: p_final.weights
6 | threshold:
7 | value: 0.3
8 | detection_classes:
9 | names:
10 | - hammer
11 | - plier
12 | - screw_driver
--------------------------------------------------------------------------------
/urdf/d435_world.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/launch/bringup_d435.launch:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
--------------------------------------------------------------------------------
/launch/hj_object_detect_rviz.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/config/hj_darknet_config.yaml:
--------------------------------------------------------------------------------
1 | #subscribers:
2 | #
3 | # camera_reading:
4 | # topic: /camera/color/image_raw
5 | # queue_size: 1
6 |
7 | actions:
8 |
9 | camera_reading:
10 | name: /darknet_ros/check_for_objects
11 |
12 | publishers:
13 |
14 | object_detector:
15 | topic: /darknet_ros/found_object
16 | queue_size: 1
17 | latch: false
18 |
19 | bounding_boxes:
20 | topic: /darknet_ros/bounding_boxes
21 | queue_size: 1
22 | latch: false
23 |
24 | detection_image:
25 | topic: /darknet_ros/detection_image
26 | queue_size: 1
27 | latch: true
28 |
29 | image_view:
30 |
31 | enable_opencv: true
32 | wait_key_delay: 1
33 | enable_console_output: true
34 |
--------------------------------------------------------------------------------
/launch/hj_object_detect.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/launch/hj_darknet.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/ros,linux,windows,visualstudiocode
3 | # Edit at https://www.gitignore.io/?templates=ros,linux,windows,visualstudiocode
4 |
5 | ### Linux ###
6 | *~
7 |
8 | # temporary files which can be created if a process still has a handle open of a deleted file
9 | .fuse_hidden*
10 |
11 | # KDE directory preferences
12 | .directory
13 |
14 | # Linux trash folder which might appear on any partition or disk
15 | .Trash-*
16 |
17 | # .nfs files are created when an open file is removed but is still being accessed
18 | .nfs*
19 |
20 | ### ROS ###
21 | devel/
22 | logs/
23 | build/
24 | bin/
25 | lib/
26 | msg_gen/
27 | srv_gen/
28 | msg/*Action.msg
29 | msg/*ActionFeedback.msg
30 | msg/*ActionGoal.msg
31 | msg/*ActionResult.msg
32 | msg/*Feedback.msg
33 | msg/*Goal.msg
34 | msg/*Result.msg
35 | msg/_*.py
36 | build_isolated/
37 | devel_isolated/
38 |
39 | # Generated by dynamic reconfigure
40 | *.cfgc
41 | /cfg/cpp/
42 | /cfg/*.py
43 |
44 | # Ignore generated docs
45 | *.dox
46 | *.wikidoc
47 |
48 | # eclipse stuff
49 | .project
50 | .cproject
51 |
52 | # qcreator stuff
53 | CMakeLists.txt.user
54 |
55 | srv/_*.py
56 | *.pcd
57 | *.pyc
58 | qtcreator-*
59 | *.user
60 |
61 | /planning/cfg
62 | /planning/docs
63 | /planning/src
64 |
65 |
66 | # Emacs
67 | .#*
68 |
69 | # Catkin custom files
70 | CATKIN_IGNORE
71 |
72 | ### VisualStudioCode ###
73 | .vscode/*
74 | !.vscode/settings.json
75 | !.vscode/tasks.json
76 | !.vscode/launch.json
77 | !.vscode/extensions.json
78 |
79 | ### VisualStudioCode Patch ###
80 | # Ignore all local history of files
81 | .history
82 |
83 | ### Windows ###
84 | # Windows thumbnail cache files
85 | Thumbs.db
86 | ehthumbs.db
87 | ehthumbs_vista.db
88 |
89 | # Dump file
90 | *.stackdump
91 |
92 | # Folder config file
93 | [Dd]esktop.ini
94 |
95 | # Recycle Bin used on file shares
96 | $RECYCLE.BIN/
97 |
98 | # Windows Installer files
99 | *.cab
100 | *.msi
101 | *.msix
102 | *.msm
103 | *.msp
104 |
105 | # Windows shortcuts
106 | *.lnk
107 |
108 | # End of https://www.gitignore.io/api/ros,linux,windows,visualstudiocode
--------------------------------------------------------------------------------
/launch/hj_jsk_test.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
33 |
34 |
35 |
36 |
37 |
39 |
40 |
41 |
42 | approximate_sync: true
43 |
44 |
45 | publish_tf: true
46 | tf_prefix: yolo_
47 |
48 |
49 |
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | hj_object_detect
4 | 0.0.0
5 | The hj_object_detect package
6 |
7 |
8 |
9 |
10 | msi-park
11 |
12 |
13 |
14 |
15 |
16 | TODO
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | catkin
52 | darknet_ros
53 | jsk_pcl_ros
54 | rospy
55 | darknet_ros
56 | jsk_pcl_ros
57 | rospy
58 | darknet_ros
59 | jsk_pcl_ros
60 | rospy
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
--------------------------------------------------------------------------------
/src/hj_object_collector.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy
3 | import struct
4 | import argparse
5 | import numpy as np
6 | import matplotlib.pyplot as plt
7 | import sensor_msgs.point_cloud2 as pc2
8 | from sensor_msgs.msg import PointCloud2
9 | from darknet_ros_msgs.msg import BoundingBox, BoundingBoxes
10 | from geometry_msgs.msg import Vector3
11 |
12 | ###
13 | # ROS CLASS IMPLEMENTATION
14 | # https://answers.ros.org/question/225008/python-class-for-subscribing-data-for-several-topics/
15 | #
16 |
17 | #############################################
18 | # Global Variables #
19 | #############################################
20 | bbox = BoundingBox()
21 | pre_bbox= BoundingBox()
22 | pc = PointCloud2()
23 | center = Vector3()
24 | point = Vector3()
25 | points = []
26 | depth = 0.0
27 |
28 | def bbox_callback(msg):
29 | num_box = len(msg.bounding_boxes)
30 | if num_box>0:
31 | b = msg.bounding_boxes[0]
32 | bbox.xmin = b.xmin
33 | bbox.xmax = b.xmax
34 | bbox.ymin = b.ymin
35 | bbox.ymax = b.ymax
36 |
37 | def point_callback(msg):
38 | global points
39 | global bbox
40 | global pre_bbox
41 | pc.header = msg.header
42 | pc.height = msg.height
43 | pc.width = msg.width
44 | pc.fields = msg.fields # channels and their layout
45 | pc.is_bigendian = msg.is_bigendian
46 | pc.point_step = msg.point_step
47 | pc.row_step = msg.row_step
48 | pc.data = msg.data # Actual point data, size is (row_step*height)
49 | resolution = (pc.height, pc.width)
50 |
51 |
52 | if bbox.xmin==pre_bbox.xmin and \
53 | bbox.xmin==pre_bbox.xmin and \
54 | bbox.xmin==pre_bbox.xmin and \
55 | bbox.xmin==pre_bbox.xmin:
56 | pass
57 | else:
58 | points = [ ]
59 | for p in pc2.read_points(msg, skip_nans=False, field_names=("x", "y", "z")):
60 | points.append(p[2])
61 | if len(points) == pc.width*pc.height:
62 | z_points = np.array(points, dtype=np.float32)
63 | z = z_points.reshape(resolution)
64 |
65 | if not (bbox.xmin==0 and bbox.xmax==0):
66 | print('Box: {}, {}'.format(bbox.xmin, bbox.xmax))
67 | z_box = z[bbox.xmin:bbox.xmax, bbox.ymin:bbox.ymax]
68 | z_value = z_box[~np.isnan(z_box)]
69 |
70 | distance = min(z_value)
71 | print('Distance: {}'.format(distance))
72 |
73 | pre_bbox.xmin = bbox.xmin
74 | pre_bbox.xmax = bbox.xmax
75 | pre_bbox.ymin = bbox.ymin
76 | pre_bbox.ymax = bbox.ymax
77 |
78 |
79 | def main(args):
80 | rospy.init_node('ObjectDepth', anonymous=True)
81 |
82 | point_sub = rospy.Subscriber('camera/depth_registered/points', PointCloud2, point_callback)
83 | bbox_sub = rospy.Subscriber('darknet_ros/bounding_boxes', BoundingBoxes, bbox_callback)
84 |
85 | freq = 30
86 | rate = rospy.Rate(freq)
87 |
88 | while not rospy.is_shutdown():
89 | pass
90 |
91 | if __name__ == '__main__':
92 | parser = argparse.ArgumentParser(description='Output Depth of an Target Object')
93 | args = parser.parse_args()
94 | main(args)
--------------------------------------------------------------------------------
/copy_paste_files/p.cfg:
--------------------------------------------------------------------------------
1 | [net]
2 | batch=64
3 | subdivisions=16
4 | height=416
5 | width=416
6 | channels=3
7 | momentum=0.9
8 | decay=0.0005
9 | angle=0
10 | saturation = 1.5
11 | exposure = 1.5
12 | hue=.1
13 |
14 | learning_rate=0.0001
15 | max_batches = 4500
16 | policy=steps
17 | steps=100,25000,35000
18 | scales=10,.1,.1
19 |
20 | [convolutional]
21 | batch_normalize=1
22 | filters=32
23 | size=3
24 | stride=1
25 | pad=1
26 | activation=leaky
27 |
28 | [maxpool]
29 | size=2
30 | stride=2
31 |
32 | [convolutional]
33 | batch_normalize=1
34 | filters=64
35 | size=3
36 | stride=1
37 | pad=1
38 | activation=leaky
39 |
40 | [maxpool]
41 | size=2
42 | stride=2
43 |
44 | [convolutional]
45 | batch_normalize=1
46 | filters=128
47 | size=3
48 | stride=1
49 | pad=1
50 | activation=leaky
51 |
52 | [convolutional]
53 | batch_normalize=1
54 | filters=64
55 | size=1
56 | stride=1
57 | pad=1
58 | activation=leaky
59 |
60 | [convolutional]
61 | batch_normalize=1
62 | filters=128
63 | size=3
64 | stride=1
65 | pad=1
66 | activation=leaky
67 |
68 | [maxpool]
69 | size=2
70 | stride=2
71 |
72 | [convolutional]
73 | batch_normalize=1
74 | filters=256
75 | size=3
76 | stride=1
77 | pad=1
78 | activation=leaky
79 |
80 | [convolutional]
81 | batch_normalize=1
82 | filters=128
83 | size=1
84 | stride=1
85 | pad=1
86 | activation=leaky
87 |
88 | [convolutional]
89 | batch_normalize=1
90 | filters=256
91 | size=3
92 | stride=1
93 | pad=1
94 | activation=leaky
95 |
96 | [maxpool]
97 | size=2
98 | stride=2
99 |
100 | [convolutional]
101 | batch_normalize=1
102 | filters=512
103 | size=3
104 | stride=1
105 | pad=1
106 | activation=leaky
107 |
108 | [convolutional]
109 | batch_normalize=1
110 | filters=256
111 | size=1
112 | stride=1
113 | pad=1
114 | activation=leaky
115 |
116 | [convolutional]
117 | batch_normalize=1
118 | filters=512
119 | size=3
120 | stride=1
121 | pad=1
122 | activation=leaky
123 |
124 | [convolutional]
125 | batch_normalize=1
126 | filters=256
127 | size=1
128 | stride=1
129 | pad=1
130 | activation=leaky
131 |
132 | [convolutional]
133 | batch_normalize=1
134 | filters=512
135 | size=3
136 | stride=1
137 | pad=1
138 | activation=leaky
139 |
140 | [maxpool]
141 | size=2
142 | stride=2
143 |
144 | [convolutional]
145 | batch_normalize=1
146 | filters=1024
147 | size=3
148 | stride=1
149 | pad=1
150 | activation=leaky
151 |
152 | [convolutional]
153 | batch_normalize=1
154 | filters=512
155 | size=1
156 | stride=1
157 | pad=1
158 | activation=leaky
159 |
160 | [convolutional]
161 | batch_normalize=1
162 | filters=1024
163 | size=3
164 | stride=1
165 | pad=1
166 | activation=leaky
167 |
168 | [convolutional]
169 | batch_normalize=1
170 | filters=512
171 | size=1
172 | stride=1
173 | pad=1
174 | activation=leaky
175 |
176 | [convolutional]
177 | batch_normalize=1
178 | filters=1024
179 | size=3
180 | stride=1
181 | pad=1
182 | activation=leaky
183 |
184 |
185 | #######
186 |
187 | [convolutional]
188 | batch_normalize=1
189 | size=3
190 | stride=1
191 | pad=1
192 | filters=1024
193 | activation=leaky
194 |
195 | [convolutional]
196 | batch_normalize=1
197 | size=3
198 | stride=1
199 | pad=1
200 | filters=1024
201 | activation=leaky
202 |
203 | [route]
204 | layers=-9
205 |
206 | [reorg]
207 | stride=2
208 |
209 | [route]
210 | layers=-1,-3
211 |
212 | [convolutional]
213 | batch_normalize=1
214 | size=3
215 | stride=1
216 | pad=1
217 | filters=1024
218 | activation=leaky
219 |
220 | [convolutional]
221 | size=1
222 | stride=1
223 | pad=1
224 | filters=40
225 | activation=linear
226 |
227 | [region]
228 | anchors = 1.08,1.19, 3.42,4.41, 6.63,11.38, 9.42,5.11, 16.62,10.52
229 | bias_match=1
230 | classes=3
231 | coords=4
232 | num=5
233 | softmax=1
234 | jitter=.2
235 | rescore=1
236 |
237 | object_scale=5
238 | noobject_scale=1
239 | class_scale=1
240 | coord_scale=1
241 |
242 | absolute=1
243 | thresh = .6
244 | random=0
245 |
--------------------------------------------------------------------------------
/src/detection_clustering.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # http://scikit-learn.org/stable/auto_examples/cluster/plot_dbscan.html
4 |
5 | import os
6 | import yaml
7 | import argparse
8 | import numpy as np
9 | import matplotlib.pyplot as plt
10 | from sklearn.cluster import DBSCAN
11 |
12 | class DetectionClustering(object):
13 | def __init__(self, detections, eps=0.3, min_samples=10, plot=False, out_dir='out'):
14 | self.dbscan = DBSCAN(eps=eps, min_samples=min_samples)
15 | self.clusters = {}
16 | for name, pos in detections.iteritems():
17 | midpoints = self.compute(name, pos, plot=plot, out_dir=out_dir)
18 | if midpoints: self.clusters[name] = midpoints
19 |
20 | def compute(self, name, array, plot=False, out_dir='out'):
21 | # Compute DBSCAN
22 | X = np.array(array)
23 | db = self.dbscan.fit(X)
24 | core_samples_mask = np.zeros_like(db.labels_, dtype=bool)
25 | core_samples_mask[db.core_sample_indices_] = True
26 | labels = db.labels_
27 |
28 | # Number of clusters in labels, ignoring noise if present.
29 | # n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)
30 |
31 | # Compute midpoints
32 | label_types = set(labels)
33 | if -1 in label_types: label_types.remove(-1)
34 | result = []
35 | for k in label_types:
36 | class_member_mask = (labels == k)
37 | points = X[class_member_mask & core_samples_mask]
38 | mean = points.mean(axis=0)
39 | result.append(mean.tolist())
40 |
41 | if plot:
42 | # Black is removed and used for noise instead.
43 | unique_labels = set(labels)
44 | colors = [plt.cm.Spectral(each)
45 | for each in np.linspace(0, 1, len(unique_labels))]
46 |
47 | for k, col in zip(unique_labels, colors):
48 | if k == -1: col = [0, 0, 0, 1] # Black is used for noise.
49 |
50 | class_member_mask = (labels == k)
51 |
52 | xy = X[class_member_mask & core_samples_mask]
53 | plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=tuple(col),
54 | markeredgecolor='k', markersize=14)
55 |
56 | mean = xy.mean(axis=0)
57 | plt.plot(mean[0], mean[1], 'k+', markeredgewidth=2, markersize=10)
58 |
59 | xy = X[class_member_mask & ~core_samples_mask]
60 | plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=tuple(col),
61 | markeredgecolor='k', markersize=6)
62 |
63 | output_file = out_dir + '/' + name + '.png'
64 | print 'Writing graph to {}...'.format(output_file)
65 | plt.title("Clustering result of class '{}'".format(name))
66 | plt.savefig(output_file)
67 |
68 | return result
69 |
70 | if __name__ == '__main__':
71 | # Custom Positive Integer type
72 | def positive_int(val):
73 | i = int(val)
74 | if i <= 0:
75 | raise argparse.ArgumentTypeError('invalid positive_int value: {}'.format(val))
76 | return i
77 |
78 | # Argument Parser
79 | parser = argparse.ArgumentParser()
80 | parser.add_argument('-i', '--input_file', type=str, default='~/.ros/detections_raw.db')
81 | parser.add_argument('-o', '--output_file', type=str, default='detections_dbscan.db')
82 | parser.add_argument('-p', '--plot', type=bool, default=False)
83 | parser.add_argument('-d', '--output_directory', type=str, default='out',
84 | help='Directory to where graphs are saved when --plot is set.')
85 | parser.add_argument('--eps', type=float, default=0.3)
86 | parser.add_argument('--min_samples', type=positive_int, default=10)
87 |
88 | args, _ = parser.parse_known_args()
89 |
90 | if args.plot and not os.path.exists(args.output_directory):
91 | os.makedirs(args.output_directory)
92 |
93 | print 'Reading from {}...'.format(args.input_file)
94 | with open(os.path.expanduser(args.input_file), 'r') as infile:
95 | detections = yaml.load(infile)
96 |
97 | dc = DetectionClustering(detections, args.eps, args.min_samples,
98 | args.plot, args.output_directory)
99 |
100 | print 'Writing to {}...'.format(args.output_file)
101 | with open(os.path.expanduser(args.output_file), 'w') as outfile:
102 | yaml.dump(dc.clusters, outfile)
--------------------------------------------------------------------------------
/copy_paste_files/YoloObjectDetector.hpp:
--------------------------------------------------------------------------------
1 | /*
2 | * YoloObjectDetector.h
3 | *
4 | * Created on: Dec 19, 2016
5 | * Author: Marko Bjelonic
6 | * Institute: ETH Zurich, Robotic Systems Lab
7 | */
8 |
9 | /*
10 | * Edited on: Aug 20, 2019
11 | * Author: Hyeonjun Park
12 | * Institute: Human-Robot Interaction LAB, Kyung Hee Unviersity, South Korea
13 | */
14 |
15 | #pragma once
16 |
17 | // c++
18 | #include
19 | #include
20 | #include
21 | #include
22 | #include
23 | #include
24 | #include
25 |
26 | // ROS
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 | #include
34 | #include
35 | #include
36 |
37 | // OpenCv
38 | #include
39 | #include
40 | #include
41 | #include
42 |
43 | // darknet_ros_msgs
44 | #include
45 | #include
46 | #include
47 |
48 | // Darknet.
49 | #ifdef GPU
50 | #include "cuda_runtime.h"
51 | #include "curand.h"
52 | #include "cublas_v2.h"
53 | #endif
54 |
55 | extern "C" {
56 | #include "network.h"
57 | #include "detection_layer.h"
58 | #include "region_layer.h"
59 | #include "cost_layer.h"
60 | #include "utils.h"
61 | #include "parser.h"
62 | #include "box.h"
63 | #include "darknet_ros/image_interface.h"
64 | #include
65 | }
66 |
67 | extern "C" void ipl_into_image(IplImage* src, image im);
68 | extern "C" image ipl_to_image(IplImage* src);
69 | extern "C" void show_image_cv(image p, const char *name, IplImage *disp);
70 |
71 | namespace darknet_ros {
72 |
73 | //! Bounding box of the detected object.
74 | typedef struct
75 | {
76 | float x, y, w, h, prob;
77 | int num, Class;
78 | } RosBox_;
79 |
80 | class YoloObjectDetector
81 | {
82 | public:
83 | /*!
84 | * Constructor.
85 | */
86 | explicit YoloObjectDetector(ros::NodeHandle nh);
87 |
88 | /*!
89 | * Destructor.
90 | */
91 | ~YoloObjectDetector();
92 |
93 | private:
94 | /*!
95 | * Reads and verifies the ROS parameters.
96 | * @return true if successful.
97 | */
98 | bool readParameters();
99 |
100 | /*!
101 | * Initialize the ROS connections.
102 | */
103 | void init();
104 |
105 | /*!
106 | * Callback of camera.
107 | * @param[in] msg image pointer.
108 | */
109 | void cameraCallback(const sensor_msgs::ImageConstPtr& msg);
110 |
111 |
112 | /*!
113 | * Callback of targetObject.
114 | */
115 | void targetObjectCallback(const std_msgs::String::ConstPtr& msg);
116 |
117 | /*! hj editing
118 | * Callback of targetObject2.
119 | */
120 | void targetObjectCallback2(const std_msgs::String::ConstPtr& msg);
121 |
122 |
123 | /*!
124 | * Check for objects action goal callback.
125 | */
126 | void checkForObjectsActionGoalCB();
127 |
128 | /*!
129 | * Check for objects action preempt callback.
130 | */
131 | void checkForObjectsActionPreemptCB();
132 |
133 | /*!
134 | * Check if a preempt for the check for objects action has been requested.
135 | * @return false if preempt has been requested or inactive.
136 | */
137 | bool isCheckingForObjects() const;
138 |
139 | /*!
140 | * Publishes the detection image.
141 | * @return true if successful.
142 | */
143 | bool publishDetectionImage(const cv::Mat& detectionImage);
144 | bool publishLabelImage(const cv::Mat& labelImage);
145 |
146 | //! Typedefs.
147 | typedef actionlib::SimpleActionServer CheckForObjectsActionServer;
148 | typedef std::shared_ptr CheckForObjectsActionServerPtr;
149 |
150 | //! ROS node handle.
151 | ros::NodeHandle nodeHandle_;
152 |
153 | //! Class labels.
154 | int numClasses_;
155 | std::vector classLabels_;
156 |
157 | //! Check for objects action server.
158 | CheckForObjectsActionServerPtr checkForObjectsActionServer_;
159 |
160 | //! Advertise and subscribe to image topics.
161 | image_transport::ImageTransport imageTransport_;
162 |
163 | //! ROS subscriber and publisher.
164 | image_transport::Subscriber imageSubscriber_;
165 | ros::Subscriber targetObjectSubscriber_;
166 | //! hj editing
167 | ros::Subscriber targetObjectSubscriber2_;
168 |
169 | ros::Publisher objectPublisher_;
170 | ros::Publisher boundingBoxesPublisher_;
171 |
172 | //! Detected objects.
173 | std::vector > rosBoxes_;
174 | std::vector rosBoxCounter_;
175 | darknet_ros_msgs::BoundingBoxes boundingBoxesResults_;
176 |
177 | //! Camera related parameters.
178 | int frameWidth_;
179 | int frameHeight_;
180 |
181 | //! Publisher of the bounding box image.
182 | ros::Publisher detectionImagePublisher_;
183 |
184 | //! Publisher of the label image.
185 | ros::Publisher labelImagePublisher_;
186 |
187 | // Yolo running on thread.
188 | std::thread yoloThread_;
189 |
190 | // Darknet.
191 | char **demoNames_;
192 | image **demoAlphabet_;
193 | int demoClasses_;
194 |
195 | network *net_;
196 | image buff_[3];
197 | image buffLetter_[3];
198 | int buffId_[3];
199 | int buffIndex_ = 0;
200 | IplImage * ipl_;
201 | float fps_ = 0;
202 | float demoThresh_ = 0;
203 | float demoHier_ = .5;
204 | int running_ = 0;
205 |
206 | int demoDelay_ = 0;
207 | int demoFrame_ = 3;
208 | float **predictions_;
209 | int demoIndex_ = 0;
210 | int demoDone_ = 0;
211 | float *lastAvg2_;
212 | float *lastAvg_;
213 | float *avg_;
214 | int demoTotal_ = 0;
215 | double demoTime_;
216 |
217 | RosBox_ *roiBoxes_;
218 | bool viewImage_;
219 | bool enableConsoleOutput_;
220 | int waitKeyDelay_;
221 | int fullScreen_;
222 | char *demoPrefix_;
223 |
224 | std_msgs::Header imageHeader_;
225 | cv::Mat camImageCopy_;
226 | boost::shared_mutex mutexImageCallback_;
227 |
228 | bool imageStatus_ = false;
229 | boost::shared_mutex mutexImageStatus_;
230 |
231 | bool isNodeRunning_ = true;
232 | boost::shared_mutex mutexNodeStatus_;
233 |
234 | int actionId_;
235 | boost::shared_mutex mutexActionStatus_;
236 |
237 | // double getWallTime();
238 |
239 | int sizeNetwork(network *net);
240 |
241 | void rememberNetwork(network *net);
242 |
243 | detection *avgPredictions(network *net, int *nboxes);
244 |
245 | void *detectInThread();
246 |
247 | void *fetchInThread();
248 |
249 | void *displayInThread(void *ptr);
250 |
251 | void *displayLoop(void *ptr);
252 |
253 | void *detectLoop(void *ptr);
254 |
255 | void setupNetwork(char *cfgfile, char *weightfile, char *datafile, float thresh,
256 | char **names, int classes,
257 | int delay, char *prefix, int avg_frames, float hier, int w, int h,
258 | int frames, int fullscreen);
259 |
260 | void yolo();
261 |
262 | IplImage* getIplImage();
263 |
264 | bool getImageStatus(void);
265 |
266 | bool isNodeRunning(void);
267 |
268 | void *publishInThread();
269 | };
270 |
271 | } /* namespace darknet_ros*/
272 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(hj_object_detect)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | # add_compile_options(-std=c++11)
6 |
7 | ## Find catkin macros and libraries
8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | ## is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | darknet_ros
12 | jsk_pcl_ros
13 | rospy
14 | )
15 |
16 | ## System dependencies are found with CMake's conventions
17 | # find_package(Boost REQUIRED COMPONENTS system)
18 |
19 |
20 | ## Uncomment this if the package has a setup.py. This macro ensures
21 | ## modules and global scripts declared therein get installed
22 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
23 | # catkin_python_setup()
24 |
25 | ################################################
26 | ## Declare ROS messages, services and actions ##
27 | ################################################
28 |
29 | ## To declare and build messages, services or actions from within this
30 | ## package, follow these steps:
31 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in
32 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
33 | ## * In the file package.xml:
34 | ## * add a build_depend tag for "message_generation"
35 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
36 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
37 | ## but can be declared for certainty nonetheless:
38 | ## * add a exec_depend tag for "message_runtime"
39 | ## * In this file (CMakeLists.txt):
40 | ## * add "message_generation" and every package in MSG_DEP_SET to
41 | ## find_package(catkin REQUIRED COMPONENTS ...)
42 | ## * add "message_runtime" and every package in MSG_DEP_SET to
43 | ## catkin_package(CATKIN_DEPENDS ...)
44 | ## * uncomment the add_*_files sections below as needed
45 | ## and list every .msg/.srv/.action file to be processed
46 | ## * uncomment the generate_messages entry below
47 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
48 |
49 | ## Generate messages in the 'msg' folder
50 | # add_message_files(
51 | # FILES
52 | # Message1.msg
53 | # Message2.msg
54 | # )
55 |
56 | ## Generate services in the 'srv' folder
57 | # add_service_files(
58 | # FILES
59 | # Service1.srv
60 | # Service2.srv
61 | # )
62 |
63 | ## Generate actions in the 'action' folder
64 | # add_action_files(
65 | # FILES
66 | # Action1.action
67 | # Action2.action
68 | # )
69 |
70 | ## Generate added messages and services with any dependencies listed here
71 | # generate_messages(
72 | # DEPENDENCIES
73 | # std_msgs # Or other packages containing msgs
74 | # )
75 |
76 | ################################################
77 | ## Declare ROS dynamic reconfigure parameters ##
78 | ################################################
79 |
80 | ## To declare and build dynamic reconfigure parameters within this
81 | ## package, follow these steps:
82 | ## * In the file package.xml:
83 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
84 | ## * In this file (CMakeLists.txt):
85 | ## * add "dynamic_reconfigure" to
86 | ## find_package(catkin REQUIRED COMPONENTS ...)
87 | ## * uncomment the "generate_dynamic_reconfigure_options" section below
88 | ## and list every .cfg file to be processed
89 |
90 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
91 | # generate_dynamic_reconfigure_options(
92 | # cfg/DynReconf1.cfg
93 | # cfg/DynReconf2.cfg
94 | # )
95 |
96 | ###################################
97 | ## catkin specific configuration ##
98 | ###################################
99 | ## The catkin_package macro generates cmake config files for your package
100 | ## Declare things to be passed to dependent projects
101 | ## INCLUDE_DIRS: uncomment this if your package contains header files
102 | ## LIBRARIES: libraries you create in this project that dependent projects also need
103 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need
104 | ## DEPENDS: system dependencies of this project that dependent projects also need
105 | catkin_package(
106 | # INCLUDE_DIRS include
107 | # LIBRARIES hj_object_detect
108 | # CATKIN_DEPENDS darknet_ros jsk_pcl_ros rospy
109 | # DEPENDS system_lib
110 | )
111 |
112 | ###########
113 | ## Build ##
114 | ###########
115 |
116 | ## Specify additional locations of header files
117 | ## Your package locations should be listed before other locations
118 | include_directories(
119 | # include
120 | ${catkin_INCLUDE_DIRS}
121 | )
122 |
123 | ## Declare a C++ library
124 | # add_library(${PROJECT_NAME}
125 | # src/${PROJECT_NAME}/hj_object_detect.cpp
126 | # )
127 |
128 | ## Add cmake target dependencies of the library
129 | ## as an example, code may need to be generated before libraries
130 | ## either from message generation or dynamic reconfigure
131 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
132 |
133 | ## Declare a C++ executable
134 | ## With catkin_make all packages are built within a single CMake context
135 | ## The recommended prefix ensures that target names across packages don't collide
136 | # add_executable(${PROJECT_NAME}_node src/hj_object_detect_node.cpp)
137 |
138 | ## Rename C++ executable without prefix
139 | ## The above recommended prefix causes long target names, the following renames the
140 | ## target back to the shorter version for ease of user use
141 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
142 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
143 |
144 | ## Add cmake target dependencies of the executable
145 | ## same as for the library above
146 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
147 |
148 | ## Specify libraries to link a library or executable target against
149 | # target_link_libraries(${PROJECT_NAME}_node
150 | # ${catkin_LIBRARIES}
151 | # )
152 |
153 | #############
154 | ## Install ##
155 | #############
156 |
157 | # all install targets should use catkin DESTINATION variables
158 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
159 |
160 | ## Mark executable scripts (Python etc.) for installation
161 | ## in contrast to setup.py, you can choose the destination
162 | # install(PROGRAMS
163 | # scripts/my_python_script
164 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
165 | # )
166 |
167 | ## Mark executables for installation
168 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
169 | # install(TARGETS ${PROJECT_NAME}_node
170 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
171 | # )
172 |
173 | ## Mark libraries for installation
174 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
175 | # install(TARGETS ${PROJECT_NAME}
176 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
177 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
178 | # RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
179 | # )
180 |
181 | ## Mark cpp header files for installation
182 | # install(DIRECTORY include/${PROJECT_NAME}/
183 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
184 | # FILES_MATCHING PATTERN "*.h"
185 | # PATTERN ".svn" EXCLUDE
186 | # )
187 |
188 | ## Mark other files for installation (e.g. launch and bag files, etc.)
189 | # install(FILES
190 | # # myfile1
191 | # # myfile2
192 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
193 | # )
194 |
195 | #############
196 | ## Testing ##
197 | #############
198 |
199 | ## Add gtest based cpp test target and link libraries
200 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_hj_object_detect.cpp)
201 | # if(TARGET ${PROJECT_NAME}-test)
202 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
203 | # endif()
204 |
205 | ## Add folders to be run by python nosetests
206 | # catkin_add_nosetests(test)
207 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # hj_object_detect (Kinetic)
2 |
3 | ## Overview
4 |
5 | This is a ROS package developed for object detection in camera images.
6 | Using [darknet_ros(YOLO)](https://github.com/leggedrobotics/darknet_ros), for real-time object detection system. And using [jsk_pcl](https://github.com/jsk-ros-pkg/jsk_recognition) for estimation coordinates of detected objects, just one class. NOT SUPPORT MULTI CLASSES.
7 | In the following ROS package detecting "plier", "hammer", "screw_driver". for this, I've ref. the [open-manipulator-object-tracking-apple](https://github.com/AuTURBO/open-manipulator-object-tracking-apple).
8 |
9 | The packages have been tested under ROS Kinetic and Ubuntu 16.04,
10 | OpenCV 3.4.2,
11 | NVIDIA-SMI 418.87.00 Driver Version: 418.87.00 CUDA Version: 10.1
12 |
13 |
14 | **Author: [Hyeonjun Park](https://www.linkedin.com/in/hyeonjun-park-41bb59125), koreaphj91@gmail.com**
15 |
16 | **Affiliation: [Human-Robot Interaction LAB](https://khu-hri.weebly.com), Kyung Hee Unviersity, South Korea**
17 |
18 | 
19 |
20 |
21 | ## Installation
22 | - Before do this, please backup important files.
23 |
24 | ### Dependencies
25 |
26 | This software is built on the Robotic Operating System ([ROS](http://wiki.ros.org/ROS/Installation)).
27 |
28 | One line install: https://cafe.naver.com/openrt/14575
29 | ```
30 | for Desktop
31 |
32 | wget https://raw.githubusercontent.com/ROBOTIS-GIT/robotis_tools/master/install_ros_kinetic.sh && chmod 755 ./install_ros_kinetic.sh && bash ./install_ros_kinetic.sh
33 | ```
34 |
35 | Additionally, YOLO for ROS depends on following software:
36 |
37 | - [OpenCV 3.4.x](http://opencv.org/) (computer vision library),
38 | - [OpenCV 3.4.2](https://jsh93.tistory.com/53) (How to install the opencv? in Korean),
39 | ```
40 | $ sudo apt-get update && sudo apt-get upgrade
41 |
42 | -- installation the env.
43 | $ sudo apt-get -y purge libopencv* python-opencv
44 | $ sudo apt-get -y install build-essential cmake vim
45 | $ sudo apt-get -y install pkg-config libjpeg-dev libtiff5-dev libjasper-dev libpng12-dev libavcodec-dev
46 | $ sudo apt-get -y install libavformat-dev libswscale-dev libxvidcore-dev libx264-dev libxine2-dev libv4l-dev
47 | $ sudo apt-get -y install v4l-utils libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libqt4-dev
48 | $ sudo apt-get -y install libgtk2.0-dev libgtk-3-dev mesa-utils libgl1-mesa-dri libqt4-opengl-dev
49 | $ sudo apt-get -y install libatlas-base-dev gfortran libeigen3-dev python3-dev python3-numpy python-dev python-numpy libatlas-base-dev gfortran
50 |
51 | -- opencv 3.4.2 & opencv_contrib-3.4.2 download
52 | $ wget -O opencv.zip https://github.com/opencv/opencv/archive/3.4.2.zip
53 | $ unzip opencv.zip
54 | $ wget -O opencv_contrib.zip https://github.com/opencv/opencv_contrib/archive/3.4.2.zip
55 | $ unzip opencv_contrib.zip
56 |
57 | -- opencv build
58 | $ cd ~/opencv-3.4.2/
59 | $ mkdir build
60 | $ cd build
61 | $ cmake -D CMAKE_BUILD_TYPE=RELEASE \
62 | -D CMAKE_INSTALL_PREFIX=/usr/local \
63 | -D WITH_TBB=OFF \
64 | -D WITH_IPP=OFF \
65 | -D WITH_1394=OFF \
66 | -D BUILD_WITH_DEBUG_INFO=OFF \
67 | -D BUILD_DOCS=OFF \
68 | -D INSTALL_C_EXAMPLES=ON \
69 | -D INSTALL_PYTHON_EXAMPLES=ON \
70 | -D BUILD_EXAMPLES=OFF \
71 | -D BUILD_TESTS=OFF \
72 | -D BUILD_PERF_TESTS=OFF \
73 | -D WITH_QT=OFF \
74 | -D WITH_GTK=ON \
75 | -D WITH_OPENGL=ON \
76 | -D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-3.4.2/modules \
77 | -D WITH_V4L=ON \
78 | -D WITH_FFMPEG=ON \
79 | -D WITH_XINE=ON \
80 | -D BUILD_NEW_PYTHON_SUPPORT=ON \
81 | ../
82 |
83 | -- opencv make compile
84 | $ time make
85 |
86 | -- opencv install
87 | $ sudo make install
88 | ```
89 | - [CUDA 10.0 with cuDNN 10.x](https://greedywyatt.tistory.com/106), (How to install the cuda? in Korean),
90 | - notice: Do it after install the nvidia driver.
91 | And the cuda version is follow on your GPU. check up the [GPUs supported](https://en.wikipedia.org/wiki/CUDA)
92 | - Download the CUDA in the NVIDIA webpage ([link](https://developer.nvidia.com/cuda-10.0-download-archive?target_os=Linux&target_arch=x86_64&target_distro=Ubuntu&target_version=1604&target_type=deblocal))
93 | ```
94 | $ sudo dpkg -i cuda-repo-ubuntu1604-10-0-local-10.0.130-410.48_1.0-1_amd64.deb
95 | $ sudo apt-key add /var/cuda-repo-/7fa2af80.pub
96 | $ sudo apt-get update
97 | $ sudo apt-get install cuda
98 |
99 | $ gedit ~/.bashrc
100 | export PATH=/usr/local/cuda-10.0/bin${PATH:+:${PATH}}$
101 | export LD_LIBRARY_PATH=/usr/local/cuda-10.0/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
102 |
103 | Or, if you installed the cuda 10.1,
104 | $ gedit ~/.bashrc
105 | export PATH=/usr/local/cuda-10.1/bin${PATH:+:${PATH}}$
106 | export LD_LIBRARY_PATH=/usr/local/cuda-10.1/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
107 |
108 | $ reboot
109 |
110 | - check up the installed
111 | $ cat /proc/driver/nvidia/version
112 | NVRM version: NVIDIA UNIX x86_64 Kernel Module 418.87.00 Thu Aug 8 15:35:46 CDT 2019
113 | GCC version: gcc version 5.4.0 20160609 (Ubuntu 5.4.0-6ubuntu1~16.04.11)
114 | $ nvcc -V
115 | nvcc: NVIDIA (R) Cuda compiler driver
116 | Copyright (c) 2005-2018 NVIDIA Corporation
117 | Built on Sat_Aug_25_21:08:01_CDT_2018
118 | Cuda compilation tools, release 10.0, V10.0.130
119 |
120 | - After download the cuDNN
121 | $ sudo dpkg -i libcudnn7_7.6.2.24-1+cuda10.0_amd64.deb
122 | $ sudo dpkg -i libcudnn7-dev_7.6.2.24-1+cuda10.0_amd64.deb
123 | $ sudo dpkg -i libcudnn7-doc_7.6.2.24-1+cuda10.0_amd64.deb
124 | ```
125 |
126 | - [darknet YOLO](https://juni-94.tistory.com/9), (How to install the YOLO? in Korean),
127 |
128 | - [darknet_ros](https://qiita.com/nnn112358/items/d696681d5b0577d633b6) (How to install the darknet_ros? in Japanese)
129 | ```
130 | $ cd ~/catkin_ws/src
131 | $ git clone --recursive https://github.com/leggedrobotics/darknet_ros.git
132 | $ cd ..
133 | $ catkin_make darknet_ros
134 | $ cd ~/catkin_ws/devel
135 | $ source setup.bash
136 | ```
137 |
138 | - [Intel® RealSense™ Depth Camera D435 library](https://github.com/IntelRealSense/realsense-ros)
139 | ```
140 | Intel SDK 2.0 install
141 |
142 | - Register the server's public key
143 | $ sudo apt-key adv --keyserver keys.gnupg.net --recv-key F6E65AC044F831AC80A06380C8B3A55A6F3EFCDE || sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-key F6E65AC044F831AC80A06380C8B3A55A6F3EFCDE
144 |
145 | - Add the server to the list of repositories:
146 | $ sudo add-apt-repository "deb http://realsense-hw-public.s3.amazonaws.com/Debian/apt-repo xenial main" -u
147 | $ sudo apt-get install librealsense2-dkms
148 | $ sudo apt-get install librealsense2-utils
149 | $ sudo apt-get install librealsense2-dev
150 | $ sudo apt-get install librealsense2-dbg
151 |
152 | RealSense ROS
153 |
154 | $ cd ~/catkin_ws/src/realsense
155 | $ git clone https://github.com/pal-robotics/ddynamic_reconfigure.git
156 | ```
157 |
158 | - [jsk_recognition](https://github.com/jsk-ros-pkg/jsk_common)
159 | ```
160 | sudo apt-get install ros-kinetic-jsk-recognition
161 | sudo apt-get install ros-kinetic-jsk-topic-tools
162 |
163 | source biuld pacakges
164 | $ cd ~/catkin_ws/src
165 | $ git clone https://github.com/jsk-ros-pkg/jsk_common.git
166 | $ cd jsk_common
167 | $ rosdep install -y -r --from-path . --ignore-src
168 | $ cd ..
169 | $ catkin_make darknet_ros
170 | ```
171 |
172 | - nodelet, rtabmap-ros, navigation
173 | ```
174 | $ sudo apt-get install ros-kinetic-octomap-server
175 | $ sudo apt-get install ros-kinetic-nodelet
176 | $ sudo apt-get install ros-kinetic-depth-image-proc
177 | $ sudo apt-get install ros-kinetic-rtabmap-ros
178 | $ sudo apt-get install ros-kinetic-navigation
179 | ```
180 | [ref. page, Oroca naver cafe, Korean](https://cafe.naver.com/openrt/21022)
181 |
182 | ## How to start?
183 | Copy and paste
184 | ```
185 | $ roscd hj_object_detect/
186 | $ cd copy_paste_files
187 | $ cp -r hj_p.yaml ~/catkin_ws/src/darknet_ros/darknet_ros/config
188 |
189 | $ cp -r p.cfg ~/catkin_ws/src/darknet_ros/darknet_ros/yolo_network_config/cfg
190 |
191 | $ cp -r YoloObjectDetector.cpp ~/catkin_ws/src/darknet_ros/darknet_ros/src
192 | $ cp -r YoloObjectDetector.hpp ~/catkin_ws/src/darknet_ros/darknet_ros/include/darknet_ros
193 | ```
194 |
195 | Weight file Dowload [link](https://drive.google.com/file/d/1RxoWf0uWkAOB2uymEG259AuPujBKl_Oa/view?usp=sharing)
196 |
197 | ```
198 | $ cd (your dowload folder)
199 | $ cp -r p_final.weights ~/catkin_ws/src/darknet_ros/darknet_ros/yolo_network_config/weights
200 | ```
201 |
202 | Real realsense camera
203 | ```
204 | $ roslaunch hj_object_detect hj_object_detect_rviz.launch
205 | $ roslaunch hj_object_detect hj_jsk_test.launch
206 | ```
207 |
208 | Gazebo simulation
209 | ```
210 | $ roslaunch hj_object_detect hj_object_detect_rviz.launch sim:=true
211 | $ roslaunch hj_object_detect hj_jsk_test.launch sim:=true
212 | ```
213 |
214 | ## Launch tree
215 |
216 | #### hj_object_detect.launch
217 | - bringup_d435.launch
218 | - rs_rgbd.launch
219 | - hj_darknet.launch
220 | - config: $(find darknet_ros)/yolo_network_config/weights
221 | - weight: $(find darknet_ros)/yolo_network_config/cfg
222 | - parameter: $(find hj_object_detect)/config/hj_darknet_config.yaml
223 | - parameter: $(find darknet_ros)/config/hj_p.yaml
224 |
225 | #### hj_jsk_test.launch
226 | - point cloud nodelet
227 | - jsk_pcl_utils/LabelToClusterPointIndices nodelet
228 | - jsk_pcl/ClusterPointIndicesDecomposer
229 |
230 | ## Note
231 | - If the coordinate of detected hammer object is not visible, try to this process.
232 | ```
233 | $ cd ~/catkin_ws/src
234 | $ cd jsk_common
235 | $ rosdep install -y -r --from-path . --ignore-src
236 | ```
237 |
238 | ## Video
239 | Click image to link to YouTube video.
240 | [](https://youtu.be/3cFitqKaLN0)
241 |
242 |
243 |
--------------------------------------------------------------------------------
/config/hj_yolo_jsk.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 0
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Status1
9 | - /TF1
10 | - /TF1/Frames1
11 | - /PoseArray1
12 | - /PointCloud21
13 | - /Camera1
14 | - /depth_Camera1
15 | - /PointCloud22
16 | Splitter Ratio: 0.602305472
17 | Tree Height: 493
18 | - Class: rviz/Selection
19 | Name: Selection
20 | - Class: rviz/Tool Properties
21 | Expanded:
22 | - /2D Pose Estimate1
23 | - /2D Nav Goal1
24 | - /Publish Point1
25 | Name: Tool Properties
26 | Splitter Ratio: 0.588679016
27 | - Class: rviz/Views
28 | Expanded:
29 | - /Current View1
30 | Name: Views
31 | Splitter Ratio: 0.5
32 | - Class: rviz/Time
33 | Experimental: false
34 | Name: Time
35 | SyncMode: 0
36 | SyncSource: PointCloud2
37 | Toolbars:
38 | toolButtonStyle: 2
39 | Visualization Manager:
40 | Class: ""
41 | Displays:
42 | - Alpha: 0.5
43 | Cell Size: 1
44 | Class: rviz/Grid
45 | Color: 160; 160; 164
46 | Enabled: true
47 | Line Style:
48 | Line Width: 0.0299999993
49 | Value: Lines
50 | Name: Grid
51 | Normal Cell Count: 0
52 | Offset:
53 | X: 0
54 | Y: 0
55 | Z: 0
56 | Plane: XY
57 | Plane Cell Count: 10
58 | Reference Frame:
59 | Value: true
60 | - Class: rviz/TF
61 | Enabled: true
62 | Frame Timeout: 10
63 | Frames:
64 | All Enabled: false
65 | camera_accel_frame:
66 | Value: false
67 | camera_accel_optical_frame:
68 | Value: false
69 | camera_aligned_depth_to_color_frame:
70 | Value: false
71 | camera_aligned_depth_to_infra1_frame:
72 | Value: false
73 | camera_color_frame:
74 | Value: false
75 | camera_color_optical_frame:
76 | Value: true
77 | camera_depth_frame:
78 | Value: false
79 | camera_depth_optical_frame:
80 | Value: false
81 | camera_gyro_frame:
82 | Value: false
83 | camera_gyro_optical_frame:
84 | Value: false
85 | camera_infra1_frame:
86 | Value: false
87 | camera_infra1_optical_frame:
88 | Value: false
89 | camera_infra2_frame:
90 | Value: false
91 | camera_infra2_optical_frame:
92 | Value: false
93 | camera_link:
94 | Value: true
95 | yolo_output00:
96 | Value: true
97 | Marker Scale: 0.5
98 | Name: TF
99 | Show Arrows: true
100 | Show Axes: true
101 | Show Names: true
102 | Tree:
103 | camera_link:
104 | camera_accel_frame:
105 | camera_accel_optical_frame:
106 | {}
107 | camera_aligned_depth_to_color_frame:
108 | camera_color_optical_frame:
109 | yolo_output00:
110 | {}
111 | camera_aligned_depth_to_infra1_frame:
112 | camera_infra1_optical_frame:
113 | {}
114 | camera_color_frame:
115 | {}
116 | camera_depth_frame:
117 | camera_depth_optical_frame:
118 | {}
119 | camera_gyro_frame:
120 | camera_gyro_optical_frame:
121 | {}
122 | camera_infra1_frame:
123 | {}
124 | camera_infra2_frame:
125 | camera_infra2_optical_frame:
126 | {}
127 | Update Interval: 0
128 | Value: true
129 | - Alpha: 1
130 | Arrow Length: 0.300000012
131 | Axes Length: 0.300000012
132 | Axes Radius: 0.00999999978
133 | Class: rviz/PoseArray
134 | Color: 255; 255; 0
135 | Enabled: true
136 | Head Length: 0.0700000003
137 | Head Radius: 0.0299999993
138 | Name: PoseArray
139 | Shaft Length: 0.230000004
140 | Shaft Radius: 0.00999999978
141 | Shape: Arrow (Flat)
142 | Topic: /cluster_decomposer/centroid_pose_array
143 | Unreliable: false
144 | Value: true
145 | - Alpha: 1
146 | Autocompute Intensity Bounds: true
147 | Autocompute Value Bounds:
148 | Max Value: 10
149 | Min Value: -10
150 | Value: true
151 | Axis: Z
152 | Channel Name: intensity
153 | Class: rviz/PointCloud2
154 | Color: 255; 255; 255
155 | Color Transformer: RGB8
156 | Decay Time: 0
157 | Enabled: true
158 | Invert Rainbow: false
159 | Max Color: 255; 255; 255
160 | Max Intensity: 4096
161 | Min Color: 0; 0; 0
162 | Min Intensity: 0
163 | Name: PointCloud2
164 | Position Transformer: XYZ
165 | Queue Size: 10
166 | Selectable: true
167 | Size (Pixels): 1
168 | Size (m): 0.00999999978
169 | Style: Points
170 | Topic: /cluster_decomposer/debug_output
171 | Unreliable: false
172 | Use Fixed Frame: true
173 | Use rainbow: true
174 | Value: true
175 | - Class: rviz/Camera
176 | Enabled: true
177 | Image Rendering: background and overlay
178 | Image Topic: /camera/color/image_raw
179 | Name: Camera
180 | Overlay Alpha: 0.5
181 | Queue Size: 2
182 | Transport Hint: raw
183 | Unreliable: false
184 | Value: true
185 | Visibility:
186 | Grid: true
187 | PointCloud2: true
188 | PoseArray: true
189 | TF: true
190 | Value: true
191 | depth_Camera: true
192 | Zoom Factor: 1
193 | - Class: rviz/Camera
194 | Enabled: true
195 | Image Rendering: overlay
196 | Image Topic: /camera/depth/image_rect_raw
197 | Name: depth_Camera
198 | Overlay Alpha: 0.5
199 | Queue Size: 2
200 | Transport Hint: raw
201 | Unreliable: false
202 | Value: true
203 | Visibility:
204 | Camera: true
205 | Grid: true
206 | PointCloud2: true
207 | PoseArray: true
208 | TF: true
209 | Value: true
210 | Zoom Factor: 1
211 | - Alpha: 1
212 | Autocompute Intensity Bounds: true
213 | Autocompute Value Bounds:
214 | Max Value: 10
215 | Min Value: -10
216 | Value: true
217 | Axis: Z
218 | Channel Name: intensity
219 | Class: rviz/PointCloud2
220 | Color: 255; 255; 255
221 | Color Transformer: RGB8
222 | Decay Time: 0
223 | Enabled: true
224 | Invert Rainbow: false
225 | Max Color: 255; 255; 255
226 | Max Intensity: 4096
227 | Min Color: 0; 0; 0
228 | Min Intensity: 0
229 | Name: PointCloud2
230 | Position Transformer: XYZ
231 | Queue Size: 10
232 | Selectable: true
233 | Size (Pixels): 1
234 | Size (m): 0.00999999978
235 | Style: Points
236 | Topic: /camera/depth_registered/points
237 | Unreliable: false
238 | Use Fixed Frame: true
239 | Use rainbow: true
240 | Value: true
241 | Enabled: true
242 | Global Options:
243 | Background Color: 48; 48; 48
244 | Default Light: true
245 | Fixed Frame: camera_link
246 | Frame Rate: 30
247 | Name: root
248 | Tools:
249 | - Class: rviz/Interact
250 | Hide Inactive Objects: true
251 | - Class: rviz/MoveCamera
252 | - Class: rviz/Select
253 | - Class: rviz/FocusCamera
254 | - Class: rviz/Measure
255 | - Class: rviz/SetInitialPose
256 | Topic: /initialpose
257 | - Class: rviz/SetGoal
258 | Topic: /move_base_simple/goal
259 | - Class: rviz/PublishPoint
260 | Single click: true
261 | Topic: /clicked_point
262 | Value: true
263 | Views:
264 | Current:
265 | Class: rtabmap_ros/OrbitOriented
266 | Distance: 0.297682256
267 | Enable Stereo Rendering:
268 | Stereo Eye Separation: 0.0599999987
269 | Stereo Focal Distance: 1
270 | Swap Stereo Eyes: false
271 | Value: false
272 | Focal Point:
273 | X: 0.0349433832
274 | Y: -0.125312746
275 | Z: 0.0655479133
276 | Focal Shape Fixed Size: true
277 | Focal Shape Size: 0.0500000007
278 | Invert Z Axis: false
279 | Name: Current View
280 | Near Clip Distance: 0.00999999978
281 | Pitch: 0.440401167
282 | Target Frame: base_footprint
283 | Value: OrbitOriented (rtabmap)
284 | Yaw: 3.52086997
285 | Saved: ~
286 | Window Geometry:
287 | Camera:
288 | collapsed: false
289 | Displays:
290 | collapsed: false
291 | Height: 628
292 | Hide Left Dock: false
293 | Hide Right Dock: false
294 | QMainWindow State: 000000ff00000000fd0000000400000000000001d30000022efc020000000bfb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000000280000022e000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d006100670065010000020a000001280000000000000000fb0000000c00430061006d00650072006103000002ca000000430000013b0000010cfb0000001800640065007000740068005f00430061006d006500720061030000041d0000004c0000011e0000010e000000010000010f000002edfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a005600690065007700730000000028000002ed000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004c40000003efc0100000002fb0000000800540069006d00650000000000000004c40000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000002d10000022e00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
295 | Selection:
296 | collapsed: false
297 | Time:
298 | collapsed: false
299 | Tool Properties:
300 | collapsed: false
301 | Views:
302 | collapsed: false
303 | Width: 1194
304 | X: 723
305 | Y: 317
306 | depth_Camera:
307 | collapsed: false
308 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/copy_paste_files/YoloObjectDetector.cpp:
--------------------------------------------------------------------------------
1 | /*
2 | * YoloObjectDetector.cpp
3 | *
4 | * Created on: Dec 19, 2016
5 | * Author: Marko Bjelonic
6 | * Institute: ETH Zurich, Robotic Systems Lab
7 | */
8 |
9 | /*
10 | * Edited on: Aug 20, 2019
11 | * Author: Hyeonjun Park
12 | * Institute: Human-Robot Interaction LAB, Kyung Hee Unviersity, South Korea
13 | */
14 |
15 | // yolo object detector
16 | #include "darknet_ros/YoloObjectDetector.hpp"
17 |
18 | // Check for xServer
19 | #include
20 |
21 | #ifdef DARKNET_FILE_PATH
22 | std::string darknetFilePath_ = DARKNET_FILE_PATH;
23 | #else
24 | #error Path of darknet repository is not defined in CMakeLists.txt.
25 | #endif
26 |
27 | namespace darknet_ros {
28 |
29 | char *cfg;
30 | char *weights;
31 | char *data;
32 | char **detectionNames;
33 | //std::string targetObjectList[] = {"apple"};
34 | std::string targetObjectList[] = {"hammer", "plier"};
35 |
36 | YoloObjectDetector::YoloObjectDetector(ros::NodeHandle nh)
37 | : nodeHandle_(nh),
38 | imageTransport_(nodeHandle_),
39 | numClasses_(0),
40 | classLabels_(0),
41 | rosBoxes_(0),
42 | rosBoxCounter_(0)
43 | {
44 | ROS_INFO("[YoloObjectDetector] Node started.");
45 |
46 | // Read parameters from config file.
47 | if (!readParameters()) {
48 | ros::requestShutdown();
49 | }
50 |
51 | init();
52 | }
53 |
54 | YoloObjectDetector::~YoloObjectDetector()
55 | {
56 | {
57 | boost::unique_lock lockNodeStatus(mutexNodeStatus_);
58 | isNodeRunning_ = false;
59 | }
60 | yoloThread_.join();
61 | }
62 |
63 | bool YoloObjectDetector::readParameters()
64 | {
65 | // Load common parameters.
66 | nodeHandle_.param("image_view/enable_opencv", viewImage_, true);
67 | nodeHandle_.param("image_view/wait_key_delay", waitKeyDelay_, 3);
68 | nodeHandle_.param("image_view/enable_console_output", enableConsoleOutput_, false);
69 |
70 | // Check if Xserver is running on Linux.
71 | if (XOpenDisplay(NULL)) {
72 | // Do nothing!
73 | ROS_INFO("[YoloObjectDetector] Xserver is running.");
74 | } else {
75 | ROS_INFO("[YoloObjectDetector] Xserver is not running.");
76 | viewImage_ = false;
77 | }
78 |
79 | // Set vector sizes.
80 | nodeHandle_.param("yolo_model/detection_classes/names", classLabels_,
81 | std::vector(0));
82 | numClasses_ = classLabels_.size();
83 | rosBoxes_ = std::vector >(numClasses_);
84 | rosBoxCounter_ = std::vector(numClasses_);
85 |
86 | return true;
87 | }
88 |
89 | void YoloObjectDetector::init()
90 | {
91 | ROS_INFO("[YoloObjectDetector] init().");
92 |
93 | // Initialize deep network of darknet.
94 | std::string weightsPath;
95 | std::string configPath;
96 | std::string dataPath;
97 | std::string configModel;
98 | std::string weightsModel;
99 |
100 | // Threshold of object detection.
101 | float thresh;
102 | nodeHandle_.param("yolo_model/threshold/value", thresh, (float) 0.3);
103 |
104 | // Path to weights file.
105 | nodeHandle_.param("yolo_model/weight_file/name", weightsModel,
106 | std::string("yolov2-tiny.weights"));
107 | nodeHandle_.param("weights_path", weightsPath, std::string("/default"));
108 | weightsPath += "/" + weightsModel;
109 | weights = new char[weightsPath.length() + 1];
110 | strcpy(weights, weightsPath.c_str());
111 |
112 | // Path to config file.
113 | nodeHandle_.param("yolo_model/config_file/name", configModel, std::string("yolov2-tiny.cfg"));
114 | nodeHandle_.param("config_path", configPath, std::string("/default"));
115 | configPath += "/" + configModel;
116 | cfg = new char[configPath.length() + 1];
117 | strcpy(cfg, configPath.c_str());
118 |
119 |
120 | // Path to data folder.
121 | dataPath = darknetFilePath_;
122 | dataPath += "/data";
123 | data = new char[dataPath.length() + 1];
124 | strcpy(data, dataPath.c_str());
125 |
126 | // Get classes.
127 | detectionNames = (char**) realloc((void*) detectionNames, (numClasses_ + 1) * sizeof(char*));
128 | for (int i = 0; i < numClasses_; i++) {
129 | detectionNames[i] = new char[classLabels_[i].length() + 1];
130 | strcpy(detectionNames[i], classLabels_[i].c_str());
131 | }
132 |
133 | // Load network.
134 | setupNetwork(cfg, weights, data, thresh, detectionNames, numClasses_,
135 | 0, 0, 1, 0.5, 0, 0, 0, 0);
136 | yoloThread_ = std::thread(&YoloObjectDetector::yolo, this);
137 |
138 | // Initialize publisher and subscriber.
139 | std::string cameraTopicName;
140 | int cameraQueueSize;
141 | std::string objectDetectorTopicName;
142 | int objectDetectorQueueSize;
143 | bool objectDetectorLatch;
144 | std::string boundingBoxesTopicName;
145 | int boundingBoxesQueueSize;
146 | bool boundingBoxesLatch;
147 | std::string detectionImageTopicName;
148 | int detectionImageQueueSize;
149 | bool detectionImageLatch;
150 | std::string labelImageTopicName;
151 | int labelImageQueueSize;
152 | bool labelImageLatch;
153 |
154 | nodeHandle_.param("subscribers/camera_reading/topic", cameraTopicName,
155 | std::string("/camera/image_raw"));
156 | nodeHandle_.param("subscribers/camera_reading/queue_size", cameraQueueSize, 1);
157 | nodeHandle_.param("publishers/object_detector/topic", objectDetectorTopicName,
158 | std::string("found_object"));
159 | nodeHandle_.param("publishers/object_detector/queue_size", objectDetectorQueueSize, 1);
160 | nodeHandle_.param("publishers/object_detector/latch", objectDetectorLatch, false);
161 | nodeHandle_.param("publishers/bounding_boxes/topic", boundingBoxesTopicName,
162 | std::string("bounding_boxes"));
163 | nodeHandle_.param("publishers/bounding_boxes/queue_size", boundingBoxesQueueSize, 1);
164 | nodeHandle_.param("publishers/bounding_boxes/latch", boundingBoxesLatch, false);
165 | nodeHandle_.param("publishers/detection_image/topic", detectionImageTopicName,
166 | std::string("detection_image"));
167 | nodeHandle_.param("publishers/detection_image/queue_size", detectionImageQueueSize, 1);
168 | nodeHandle_.param("publishers/detection_image/latch", detectionImageLatch, true);
169 | nodeHandle_.param("publishers/label_image/topic", labelImageTopicName,
170 | std::string("label_image"));
171 | nodeHandle_.param("publishers/label_image/queue_size", labelImageQueueSize, 1);
172 | nodeHandle_.param("publishers/label_image/latch", labelImageLatch, true);
173 |
174 | imageSubscriber_ = imageTransport_.subscribe(cameraTopicName, cameraQueueSize,
175 | &YoloObjectDetector::cameraCallback, this);
176 | objectPublisher_ = nodeHandle_.advertise(objectDetectorTopicName,
177 | objectDetectorQueueSize,
178 | objectDetectorLatch);
179 | boundingBoxesPublisher_ = nodeHandle_.advertise(
180 | boundingBoxesTopicName, boundingBoxesQueueSize, boundingBoxesLatch);
181 | detectionImagePublisher_ = nodeHandle_.advertise(detectionImageTopicName,
182 | detectionImageQueueSize,
183 | detectionImageLatch);
184 | labelImagePublisher_ = nodeHandle_.advertise(labelImageTopicName,
185 | labelImageQueueSize,
186 | labelImageLatch);
187 | targetObjectSubscriber_ = nodeHandle_.subscribe("/targetObjectSubscriber", 10,
188 | &YoloObjectDetector::targetObjectCallback,this);
189 | //! hj editing
190 | targetObjectSubscriber2_ = nodeHandle_.subscribe("/targetObjectSubscriber2", 10,
191 | &YoloObjectDetector::targetObjectCallback2,this);
192 |
193 | // Action servers.
194 | std::string checkForObjectsActionName;
195 | nodeHandle_.param("actions/camera_reading/topic", checkForObjectsActionName,
196 | std::string("check_for_objects"));
197 | checkForObjectsActionServer_.reset(
198 | new CheckForObjectsActionServer(nodeHandle_, checkForObjectsActionName, false));
199 | checkForObjectsActionServer_->registerGoalCallback(
200 | boost::bind(&YoloObjectDetector::checkForObjectsActionGoalCB, this));
201 | checkForObjectsActionServer_->registerPreemptCallback(
202 | boost::bind(&YoloObjectDetector::checkForObjectsActionPreemptCB, this));
203 | checkForObjectsActionServer_->start();
204 | }
205 |
206 | void YoloObjectDetector::targetObjectCallback(const std_msgs::String::ConstPtr& msg)
207 | {
208 | //ROS_INFO("I heard: [%s]", msg->data.c_str());
209 | targetObjectList[0] = msg->data.c_str();
210 | return;
211 | }
212 |
213 | //! hj editing
214 | void YoloObjectDetector::targetObjectCallback2(const std_msgs::String::ConstPtr& msg)
215 | {
216 | //ROS_INFO("I heard: [%s]", msg->data.c_str());
217 | targetObjectList[1] = msg->data.c_str();
218 | return;
219 | }
220 |
221 | void YoloObjectDetector::cameraCallback(const sensor_msgs::ImageConstPtr& msg)
222 | {
223 | ROS_DEBUG("[YoloObjectDetector] USB image received.");
224 |
225 | cv_bridge::CvImagePtr cam_image;
226 |
227 | try {
228 | cam_image = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
229 | imageHeader_ = msg->header;
230 | } catch (cv_bridge::Exception& e) {
231 | ROS_ERROR("cv_bridge exception: %s", e.what());
232 | return;
233 | }
234 |
235 | if (cam_image) {
236 | {
237 | boost::unique_lock lockImageCallback(mutexImageCallback_);
238 | camImageCopy_ = cam_image->image.clone();
239 | }
240 | {
241 | boost::unique_lock lockImageStatus(mutexImageStatus_);
242 | imageStatus_ = true;
243 | }
244 | frameWidth_ = cam_image->image.size().width;
245 | frameHeight_ = cam_image->image.size().height;
246 | }
247 | return;
248 | }
249 |
250 | void YoloObjectDetector::checkForObjectsActionGoalCB()
251 | {
252 | ROS_DEBUG("[YoloObjectDetector] Start check for objects action.");
253 |
254 | boost::shared_ptr imageActionPtr =
255 | checkForObjectsActionServer_->acceptNewGoal();
256 | sensor_msgs::Image imageAction = imageActionPtr->image;
257 |
258 | cv_bridge::CvImagePtr cam_image;
259 |
260 | try {
261 | cam_image = cv_bridge::toCvCopy(imageAction, sensor_msgs::image_encodings::BGR8);
262 | } catch (cv_bridge::Exception& e) {
263 | ROS_ERROR("cv_bridge exception: %s", e.what());
264 | return;
265 | }
266 |
267 | if (cam_image) {
268 | {
269 | boost::unique_lock lockImageCallback(mutexImageCallback_);
270 | camImageCopy_ = cam_image->image.clone();
271 | }
272 | {
273 | boost::unique_lock lockImageCallback(mutexActionStatus_);
274 | actionId_ = imageActionPtr->id;
275 | }
276 | {
277 | boost::unique_lock lockImageStatus(mutexImageStatus_);
278 | imageStatus_ = true;
279 | }
280 | frameWidth_ = cam_image->image.size().width;
281 | frameHeight_ = cam_image->image.size().height;
282 | }
283 | return;
284 | }
285 |
286 | void YoloObjectDetector::checkForObjectsActionPreemptCB()
287 | {
288 | ROS_DEBUG("[YoloObjectDetector] Preempt check for objects action.");
289 | checkForObjectsActionServer_->setPreempted();
290 | }
291 |
292 | bool YoloObjectDetector::isCheckingForObjects() const
293 | {
294 | return (ros::ok() && checkForObjectsActionServer_->isActive()
295 | && !checkForObjectsActionServer_->isPreemptRequested());
296 | }
297 |
298 | bool YoloObjectDetector::publishDetectionImage(const cv::Mat& detectionImage)
299 | {
300 | if (detectionImagePublisher_.getNumSubscribers() < 1)
301 | return false;
302 | cv_bridge::CvImage cvImage;
303 | cvImage.header.stamp = ros::Time::now();
304 | cvImage.header.frame_id = "detection_image";
305 | cvImage.encoding = sensor_msgs::image_encodings::BGR8;
306 | cvImage.image = detectionImage;
307 | detectionImagePublisher_.publish(*cvImage.toImageMsg());
308 | ROS_DEBUG("Detection image has been published.");
309 | return true;
310 | }
311 |
312 | bool YoloObjectDetector::publishLabelImage(const cv::Mat& labelImage)
313 | {
314 | if (labelImagePublisher_.getNumSubscribers() < 1)
315 | return false;
316 | cv_bridge::CvImage cvImage;
317 | cvImage.header.stamp = ros::Time::now();
318 | cvImage.header.frame_id = "detection_image";
319 | cvImage.encoding = sensor_msgs::image_encodings::TYPE_32SC1;
320 | cvImage.image = labelImage;
321 | labelImagePublisher_.publish(*cvImage.toImageMsg());
322 | ROS_DEBUG("Label image has been published.");
323 | return true;
324 | }
325 |
326 | // double YoloObjectDetector::getWallTime()
327 | // {
328 | // struct timeval time;
329 | // if (gettimeofday(&time, NULL)) {
330 | // return 0;
331 | // }
332 | // return (double) time.tv_sec + (double) time.tv_usec * .000001;
333 | // }
334 |
335 | int YoloObjectDetector::sizeNetwork(network *net)
336 | {
337 | int i;
338 | int count = 0;
339 | for(i = 0; i < net->n; ++i){
340 | layer l = net->layers[i];
341 | if(l.type == YOLO || l.type == REGION || l.type == DETECTION){
342 | count += l.outputs;
343 | }
344 | }
345 | return count;
346 | }
347 |
348 | void YoloObjectDetector::rememberNetwork(network *net)
349 | {
350 | int i;
351 | int count = 0;
352 | for(i = 0; i < net->n; ++i){
353 | layer l = net->layers[i];
354 | if(l.type == YOLO || l.type == REGION || l.type == DETECTION){
355 | memcpy(predictions_[demoIndex_] + count, net->layers[i].output, sizeof(float) * l.outputs);
356 | count += l.outputs;
357 | }
358 | }
359 | }
360 |
361 | detection *YoloObjectDetector::avgPredictions(network *net, int *nboxes)
362 | {
363 | int i, j;
364 | int count = 0;
365 | fill_cpu(demoTotal_, 0, avg_, 1);
366 | for(j = 0; j < demoFrame_; ++j){
367 | axpy_cpu(demoTotal_, 1./demoFrame_, predictions_[j], 1, avg_, 1);
368 | }
369 | for(i = 0; i < net->n; ++i){
370 | layer l = net->layers[i];
371 | if(l.type == YOLO || l.type == REGION || l.type == DETECTION){
372 | memcpy(l.output, avg_ + count, sizeof(float) * l.outputs);
373 | count += l.outputs;
374 | }
375 | }
376 | detection *dets = get_network_boxes(net, buff_[0].w, buff_[0].h, demoThresh_, demoHier_, 0, 1, nboxes);
377 | return dets;
378 | }
379 |
380 | void *YoloObjectDetector::detectInThread()
381 | {
382 | running_ = 1;
383 | float nms = .4;
384 |
385 | layer l = net_->layers[net_->n - 1];
386 | float *X = buffLetter_[(buffIndex_ + 2) % 3].data;
387 | float *prediction = network_predict(net_, X);
388 |
389 | rememberNetwork(net_);
390 | detection *dets = 0;
391 | int nboxes = 0;
392 | dets = avgPredictions(net_, &nboxes);
393 |
394 | if (nms > 0) do_nms_obj(dets, nboxes, l.classes, nms);
395 |
396 | if (enableConsoleOutput_) {
397 | printf("\033[2J");
398 | printf("\033[1;1H");
399 | printf("\nFPS:%.1f\n",fps_);
400 | printf("Objects:\n\n");
401 | }
402 | image display = buff_[(buffIndex_+2) % 3];
403 | cv::Mat label_im(display.h, display.w, CV_32SC1, cv::Scalar::all(0));
404 |
405 | draw_detections(display, dets, nboxes, demoThresh_, demoNames_, demoAlphabet_, demoClasses_);
406 |
407 | // extract the bounding boxes and send them to ROS
408 | int i, j;
409 | int count = 0;
410 | for (i = 0; i < nboxes; ++i) {
411 | float xmin = dets[i].bbox.x - dets[i].bbox.w / 2.;
412 | float xmax = dets[i].bbox.x + dets[i].bbox.w / 2.;
413 | float ymin = dets[i].bbox.y - dets[i].bbox.h / 2.;
414 | float ymax = dets[i].bbox.y + dets[i].bbox.h / 2.;
415 |
416 | if (xmin < 0)
417 | xmin = 0;
418 | if (ymin < 0)
419 | ymin = 0;
420 | if (xmax > 1)
421 | xmax = 1;
422 | if (ymax > 1)
423 | ymax = 1;
424 |
425 | // iterate through possible boxes and collect the bounding boxes
426 | for (j = 0; j < demoClasses_; ++j)
427 | {
428 | if (dets[i].prob[j]) {
429 | if ( strcmp( demoNames_[j],targetObjectList[0].c_str()) != 0 ) {
430 | //printf("targetObjectList: %s\n", targetObjectList[j].c_str());
431 | continue ;
432 | //printf("j: %d, demoNames: %d, targetObjects: %s\n", j, demoNames_, targetObjectList[j].c_str());
433 | }
434 | //printf("j: %d\n", j);
435 | //printf("%d, %s: %.0f%%\n", strcmp( demoNames_[j],targetObjectList[0].c_str()), demoNames_[j], dets[i].prob[j]*100);
436 | float x_center = (xmin + xmax) / 2;
437 | float y_center = (ymin + ymax) / 2;
438 | float BoundingBox_width = xmax - xmin;
439 | float BoundingBox_height = ymax - ymin;
440 |
441 | // define bounding box
442 | // BoundingBox must be 1% size of frame (3.2x2.4 pixels)
443 | if (BoundingBox_width > 0.01 && BoundingBox_height > 0.01) {
444 | // set label image
445 | cv::rectangle(label_im,
446 | cv::Point(xmin*label_im.cols,ymin*label_im.rows),
447 | cv::Point(xmax*label_im.cols,ymax*label_im.rows),
448 | cv::Scalar::all(count + 1), CV_FILLED);
449 | // The correct is to use labels correspondent to the original class number, as in cv::Scalar::all(j)
450 | // However, since this causes jsk_pcl/ClusterPointIndicesDecomposer to be extremely slow, here we use count + 1
451 | // This problem has been solved in https://github.com/jsk-ros-pkg/jsk_recognition/pull/2326
452 | // Using the above and compiling jsk_recognition from source allows fast execution even with sparse labeling
453 |
454 | roiBoxes_[count].x = x_center;
455 | roiBoxes_[count].y = y_center;
456 | roiBoxes_[count].w = BoundingBox_width;
457 | roiBoxes_[count].h = BoundingBox_height;
458 | roiBoxes_[count].Class = j;
459 | roiBoxes_[count].prob = dets[i].prob[j];
460 | count++;
461 | }
462 | }
463 | }
464 | }
465 |
466 | if (!publishLabelImage(label_im)) {
467 | ROS_DEBUG("Label image has not been broadcasted.");
468 | }
469 |
470 | // create array to store found bounding boxes
471 | // if no object detected, make sure that ROS knows that num = 0
472 | if (count == 0) {
473 | roiBoxes_[0].num = 0;
474 | } else {
475 | roiBoxes_[0].num = count;
476 | }
477 |
478 | label_im.release();
479 | free_detections(dets, nboxes);
480 | demoIndex_ = (demoIndex_ + 1) % demoFrame_;
481 | running_ = 0;
482 | return 0;
483 | }
484 |
485 | void *YoloObjectDetector::fetchInThread()
486 | {
487 | IplImage* ROS_img = getIplImage();
488 | ipl_into_image(ROS_img, buff_[buffIndex_]);
489 | {
490 | boost::shared_lock lock(mutexImageCallback_);
491 | buffId_[buffIndex_] = actionId_;
492 | }
493 | rgbgr_image(buff_[buffIndex_]);
494 | letterbox_image_into(buff_[buffIndex_], net_->w, net_->h, buffLetter_[buffIndex_]);
495 | return 0;
496 | }
497 |
498 | void *YoloObjectDetector::displayInThread(void *ptr)
499 | {
500 | show_image_cv(buff_[(buffIndex_ + 1)%3], "YOLO V3", ipl_);
501 | int c = cvWaitKey(waitKeyDelay_);
502 | if (c != -1) c = c%256;
503 | if (c == 27) {
504 | demoDone_ = 1;
505 | return 0;
506 | } else if (c == 82) {
507 | demoThresh_ += .02;
508 | } else if (c == 84) {
509 | demoThresh_ -= .02;
510 | if(demoThresh_ <= .02) demoThresh_ = .02;
511 | } else if (c == 83) {
512 | demoHier_ += .02;
513 | } else if (c == 81) {
514 | demoHier_ -= .02;
515 | if(demoHier_ <= .0) demoHier_ = .0;
516 | }
517 | return 0;
518 | }
519 |
520 | void *YoloObjectDetector::displayLoop(void *ptr)
521 | {
522 | while (1) {
523 | displayInThread(0);
524 | }
525 | }
526 |
527 | void *YoloObjectDetector::detectLoop(void *ptr)
528 | {
529 | while (1) {
530 | detectInThread();
531 | }
532 | }
533 |
534 | void YoloObjectDetector::setupNetwork(char *cfgfile, char *weightfile, char *datafile, float thresh,
535 | char **names, int classes,
536 | int delay, char *prefix, int avg_frames, float hier, int w, int h,
537 | int frames, int fullscreen)
538 | {
539 | demoPrefix_ = prefix;
540 | demoDelay_ = delay;
541 | demoFrame_ = avg_frames;
542 | image **alphabet = load_alphabet_with_file(datafile);
543 | demoNames_ = names;
544 | demoAlphabet_ = alphabet;
545 | demoClasses_ = classes;
546 | demoThresh_ = thresh;
547 | demoHier_ = hier;
548 | fullScreen_ = fullscreen;
549 | printf("YOLO V3\n");
550 | net_ = load_network(cfgfile, weightfile, 0);
551 | set_batch_network(net_, 1);
552 | }
553 |
554 | void YoloObjectDetector::yolo()
555 | {
556 | const auto wait_duration = std::chrono::milliseconds(2000);
557 | while (!getImageStatus()) {
558 | printf("Waiting for image.\n");
559 | if (!isNodeRunning()) {
560 | return;
561 | }
562 | std::this_thread::sleep_for(wait_duration);
563 | }
564 |
565 | std::thread detect_thread;
566 | std::thread fetch_thread;
567 |
568 | srand(2222222);
569 |
570 | int i;
571 | demoTotal_ = sizeNetwork(net_);
572 | predictions_ = (float **) calloc(demoFrame_, sizeof(float*));
573 | for (i = 0; i < demoFrame_; ++i){
574 | predictions_[i] = (float *) calloc(demoTotal_, sizeof(float));
575 | }
576 | avg_ = (float *) calloc(demoTotal_, sizeof(float));
577 |
578 | layer l = net_->layers[net_->n - 1];
579 | roiBoxes_ = (darknet_ros::RosBox_ *) calloc(l.w * l.h * l.n, sizeof(darknet_ros::RosBox_));
580 |
581 | IplImage* ROS_img = getIplImage();
582 | buff_[0] = ipl_to_image(ROS_img);
583 | buff_[1] = copy_image(buff_[0]);
584 | buff_[2] = copy_image(buff_[0]);
585 | buffLetter_[0] = letterbox_image(buff_[0], net_->w, net_->h);
586 | buffLetter_[1] = letterbox_image(buff_[0], net_->w, net_->h);
587 | buffLetter_[2] = letterbox_image(buff_[0], net_->w, net_->h);
588 | ipl_ = cvCreateImage(cvSize(buff_[0].w, buff_[0].h), IPL_DEPTH_8U, buff_[0].c);
589 |
590 | int count = 0;
591 |
592 | if (!demoPrefix_ && viewImage_) {
593 | cvNamedWindow("YOLO V3", CV_WINDOW_NORMAL);
594 | if (fullScreen_) {
595 | cvSetWindowProperty("YOLO V3", CV_WND_PROP_FULLSCREEN, CV_WINDOW_FULLSCREEN);
596 | } else {
597 | cvMoveWindow("YOLO V3", 0, 0);
598 | cvResizeWindow("YOLO V3", 640, 480);
599 | }
600 | }
601 |
602 | demoTime_ = what_time_is_it_now();
603 |
604 | while (!demoDone_) {
605 | buffIndex_ = (buffIndex_ + 1) % 3;
606 | fetch_thread = std::thread(&YoloObjectDetector::fetchInThread, this);
607 | detect_thread = std::thread(&YoloObjectDetector::detectInThread, this);
608 | if (!demoPrefix_) {
609 | fps_ = 1./(what_time_is_it_now() - demoTime_);
610 | demoTime_ = what_time_is_it_now();
611 | if (viewImage_) {
612 | displayInThread(0);
613 | }
614 | publishInThread();
615 | } else {
616 | char name[256];
617 | sprintf(name, "%s_%08d", demoPrefix_, count);
618 | save_image(buff_[(buffIndex_ + 1) % 3], name);
619 | }
620 | fetch_thread.join();
621 | detect_thread.join();
622 | ++count;
623 | if (!isNodeRunning()) {
624 | demoDone_ = true;
625 | }
626 | }
627 |
628 | }
629 |
630 | IplImage* YoloObjectDetector::getIplImage()
631 | {
632 | boost::shared_lock lock(mutexImageCallback_);
633 | IplImage* ROS_img = new IplImage(camImageCopy_);
634 | return ROS_img;
635 | }
636 |
637 | bool YoloObjectDetector::getImageStatus(void)
638 | {
639 | boost::shared_lock lock(mutexImageStatus_);
640 | return imageStatus_;
641 | }
642 |
643 | bool YoloObjectDetector::isNodeRunning(void)
644 | {
645 | boost::shared_lock lock(mutexNodeStatus_);
646 | return isNodeRunning_;
647 | }
648 |
649 | void *YoloObjectDetector::publishInThread()
650 | {
651 | // Publish image.
652 | cv::Mat cvImage = cv::cvarrToMat(ipl_);
653 | if (!publishDetectionImage(cv::Mat(cvImage))) {
654 | ROS_DEBUG("Detection image has not been broadcasted.");
655 | }
656 |
657 | // Publish bounding boxes and detection result.
658 | int num = roiBoxes_[0].num;
659 | if (num > 0 && num <= 100) {
660 | for (int i = 0; i < num; i++) {
661 | for (int j = 0; j < numClasses_; j++) {
662 | if (roiBoxes_[i].Class == j) {
663 | rosBoxes_[j].push_back(roiBoxes_[i]);
664 | rosBoxCounter_[j]++;
665 | }
666 | }
667 | }
668 |
669 | std_msgs::Int8 msg;
670 | msg.data = num;
671 | objectPublisher_.publish(msg);
672 |
673 | for (int i = 0; i < numClasses_; i++) {
674 | if (rosBoxCounter_[i] > 0) {
675 | darknet_ros_msgs::BoundingBox boundingBox;
676 |
677 | for (int j = 0; j < rosBoxCounter_[i]; j++) {
678 | int xmin = (rosBoxes_[i][j].x - rosBoxes_[i][j].w / 2) * frameWidth_;
679 | int ymin = (rosBoxes_[i][j].y - rosBoxes_[i][j].h / 2) * frameHeight_;
680 | int xmax = (rosBoxes_[i][j].x + rosBoxes_[i][j].w / 2) * frameWidth_;
681 | int ymax = (rosBoxes_[i][j].y + rosBoxes_[i][j].h / 2) * frameHeight_;
682 |
683 | boundingBox.Class = classLabels_[i];
684 | boundingBox.probability = rosBoxes_[i][j].prob;
685 | boundingBox.xmin = xmin;
686 | boundingBox.ymin = ymin;
687 | boundingBox.xmax = xmax;
688 | boundingBox.ymax = ymax;
689 | boundingBoxesResults_.bounding_boxes.push_back(boundingBox);
690 | }
691 | }
692 | }
693 | boundingBoxesResults_.header.stamp = ros::Time::now();
694 | boundingBoxesResults_.header.frame_id = "detection";
695 | boundingBoxesResults_.image_header = imageHeader_;
696 | boundingBoxesPublisher_.publish(boundingBoxesResults_);
697 | } else {
698 | std_msgs::Int8 msg;
699 | msg.data = 0;
700 | objectPublisher_.publish(msg);
701 | }
702 | if (isCheckingForObjects()) {
703 | ROS_DEBUG("[YoloObjectDetector] check for objects in image.");
704 | darknet_ros_msgs::CheckForObjectsResult objectsActionResult;
705 | objectsActionResult.id = buffId_[0];
706 | objectsActionResult.bounding_boxes = boundingBoxesResults_;
707 | checkForObjectsActionServer_->setSucceeded(objectsActionResult, "Send bounding boxes.");
708 | }
709 | boundingBoxesResults_.bounding_boxes.clear();
710 | for (int i = 0; i < numClasses_; i++) {
711 | rosBoxes_[i].clear();
712 | rosBoxCounter_[i] = 0;
713 | }
714 |
715 | return 0;
716 | }
717 |
718 |
719 | } /* namespace darknet_ros*/
720 |
--------------------------------------------------------------------------------