├── .gitignore
├── CMakeLists.txt
├── README.md
├── arduino
└── ros_servo_3dof.ino
├── launch
├── launch_seq.txt
├── object_detection.launch
├── realsense_mapping.launch
├── robot_mapping_seq.launch
├── robot_visualize_control.launch
├── servo_control.launch
├── test.launch
└── yolo_detect.launch
├── nodes
├── joint_state_servo.py
├── mapping_seq.py
├── mapping_seq2.py
├── object_follow.py
├── servo_test.py
└── yolo_pixel_to_3Dpoint.py
├── package.xml
├── robot_controller
└── robot_control.yaml
├── rviz
├── urdf.rviz
└── urdf.vcg
├── sessions
└── ses.bin
├── srv
└── Floats_array.srv
└── urdf
├── realsense_bot.urdf
├── realsense_bot.urdf.xacro
├── realsense_bot.xacro
├── robot.urdf
└── robot_temp.urdf
/.gitignore:
--------------------------------------------------------------------------------
1 | media
2 | maps
3 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.0.2)
2 | project(realsense_bot)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | # add_compile_options(-std=c++11)
6 |
7 | ## Find catkin macros and libraries
8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | ## is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | roscpp
12 | rospy
13 | std_msgs
14 | tf
15 | )
16 |
17 | ## System dependencies are found with CMake's conventions
18 | # find_package(Boost REQUIRED COMPONENTS system)
19 |
20 |
21 | ## Uncomment this if the package has a setup.py. This macro ensures
22 | ## modules and global scripts declared therein get installed
23 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
24 | # catkin_python_setup()
25 |
26 | ################################################
27 | ## Declare ROS messages, services and actions ##
28 | ################################################
29 |
30 | ## To declare and build messages, services or actions from within this
31 | ## package, follow these steps:
32 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in
33 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
34 | ## * In the file package.xml:
35 | ## * add a build_depend tag for "message_generation"
36 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
37 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
38 | ## but can be declared for certainty nonetheless:
39 | ## * add a exec_depend tag for "message_runtime"
40 | ## * In this file (CMakeLists.txt):
41 | ## * add "message_generation" and every package in MSG_DEP_SET to
42 | ## find_package(catkin REQUIRED COMPONENTS ...)
43 | ## * add "message_runtime" and every package in MSG_DEP_SET to
44 | ## catkin_package(CATKIN_DEPENDS ...)
45 | ## * uncomment the add_*_files sections below as needed
46 | ## and list every .msg/.srv/.action file to be processed
47 | ## * uncomment the generate_messages entry below
48 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
49 |
50 | ## Generate messages in the 'msg' folder
51 | # add_message_files(
52 | # FILES
53 | # Message1.msg
54 | # Message2.msg
55 | # )
56 |
57 | ## Generate services in the 'srv' folder
58 | # add_service_files(
59 | # FILES
60 | # Service1.srv
61 | # Service2.srv
62 | # )
63 |
64 | ## Generate actions in the 'action' folder
65 | # add_action_files(
66 | # FILES
67 | # Action1.action
68 | # Action2.action
69 | # )
70 |
71 | ## Generate added messages and services with any dependencies listed here
72 | # generate_messages(
73 | # DEPENDENCIES
74 | # std_msgs
75 | # )
76 |
77 | ################################################
78 | ## Declare ROS dynamic reconfigure parameters ##
79 | ################################################
80 |
81 | ## To declare and build dynamic reconfigure parameters within this
82 | ## package, follow these steps:
83 | ## * In the file package.xml:
84 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
85 | ## * In this file (CMakeLists.txt):
86 | ## * add "dynamic_reconfigure" to
87 | ## find_package(catkin REQUIRED COMPONENTS ...)
88 | ## * uncomment the "generate_dynamic_reconfigure_options" section below
89 | ## and list every .cfg file to be processed
90 |
91 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
92 | # generate_dynamic_reconfigure_options(
93 | # cfg/DynReconf1.cfg
94 | # cfg/DynReconf2.cfg
95 | # )
96 |
97 | ###################################
98 | ## catkin specific configuration ##
99 | ###################################
100 | ## The catkin_package macro generates cmake config files for your package
101 | ## Declare things to be passed to dependent projects
102 | ## INCLUDE_DIRS: uncomment this if your package contains header files
103 | ## LIBRARIES: libraries you create in this project that dependent projects also need
104 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need
105 | ## DEPENDS: system dependencies of this project that dependent projects also need
106 | catkin_package(
107 | # INCLUDE_DIRS include
108 | # LIBRARIES realsense_bot
109 | # CATKIN_DEPENDS roscpp rospy std_msgs
110 | # DEPENDS system_lib
111 | )
112 |
113 | ###########
114 | ## Build ##
115 | ###########
116 |
117 | ## Specify additional locations of header files
118 | ## Your package locations should be listed before other locations
119 | include_directories(
120 | # include
121 | ${catkin_INCLUDE_DIRS}
122 | )
123 |
124 | # add_executable(pixel_to_3Dpoint src/pixel_to_3Dpoint.cpp)
125 | # target_link_libraries(pixel_to_3Dpoint ${catkin_LIBRARIES})
126 |
127 | ## Declare a C++ library
128 | # add_library(${PROJECT_NAME}
129 | # src/${PROJECT_NAME}/realsense_bot.cpp
130 | # )
131 |
132 | ## Add cmake target dependencies of the library
133 | ## as an example, code may need to be generated before libraries
134 | ## either from message generation or dynamic reconfigure
135 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
136 |
137 | ## Declare a C++ executable
138 | ## With catkin_make all packages are built within a single CMake context
139 | ## The recommended prefix ensures that target names across packages don't collide
140 | # add_executable(${PROJECT_NAME}_node src/realsense_bot_node.cpp)
141 |
142 | ## Rename C++ executable without prefix
143 | ## The above recommended prefix causes long target names, the following renames the
144 | ## target back to the shorter version for ease of user use
145 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
146 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
147 |
148 | ## Add cmake target dependencies of the executable
149 | ## same as for the library above
150 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
151 |
152 | ## Specify libraries to link a library or executable target against
153 | # target_link_libraries(${PROJECT_NAME}_node
154 | # ${catkin_LIBRARIES}
155 | # )
156 |
157 | #############
158 | ## Install ##
159 | #############
160 |
161 | # all install targets should use catkin DESTINATION variables
162 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
163 |
164 | ## Mark executable scripts (Python etc.) for installation
165 | ## in contrast to setup.py, you can choose the destination
166 | # catkin_install_python(PROGRAMS
167 | # scripts/my_python_script
168 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
169 | # )
170 |
171 | ## Mark executables for installation
172 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
173 | # install(TARGETS ${PROJECT_NAME}_node
174 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
175 | # )
176 |
177 | ## Mark libraries for installation
178 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
179 | # install(TARGETS ${PROJECT_NAME}
180 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
181 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
182 | # RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
183 | # )
184 |
185 | ## Mark cpp header files for installation
186 | # install(DIRECTORY include/${PROJECT_NAME}/
187 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
188 | # FILES_MATCHING PATTERN "*.h"
189 | # PATTERN ".svn" EXCLUDE
190 | # )
191 |
192 | ## Mark other files for installation (e.g. launch and bag files, etc.)
193 | # install(FILES
194 | # # myfile1
195 | # # myfile2
196 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
197 | # )
198 |
199 | #############
200 | ## Testing ##
201 | #############
202 |
203 | ## Add gtest based cpp test target and link libraries
204 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_servo_3dof.cpp)
205 | # if(TARGET ${PROJECT_NAME}-test)
206 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
207 | # endif()
208 |
209 | ## Add folders to be run by python nosetests
210 | # catkin_add_nosetests(test)
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # realsense_bot
2 | This is a ROS package for Intel realsense D435i with 3-DOF Manipulator robot that can be used for Indoor Mapping and localization of objects in the world frame with an added advantage of the robot's dexterity. The 3-DOF Manipulator is a self-built custom robot where the URDF with the depth sensor is included. The package covers the Rosserial communication with Arduino nodes or I2C with the Jetson Nano to control the robot's Joint States and PCL pipelines required for autonomous mapping/Localization/Tracking of the objects in real-time.
3 |
4 |
5 | ## Robot URDF and Actual Model
6 | The Robot's URDF model represents the exact simplified version of the actual model with measured offsets from joint to joint.
7 | The RRR Robot is built using standard servo 3 x Clamps,1 x Long U Mount and 45deg U Mount.
8 |
9 |
10 | ## Jetson Nano Standalone Implementation
11 | ```
12 | cd catkin_ws/src
13 | git clone https://github.com/fazildgr8/realsense_bot -b jetson
14 | ```
15 | ## PC Arduino Implementation
16 | ```
17 | cd catkin_ws/src
18 | git clone https://github.com/fazildgr8/realsense_bot -b master
19 | ```
20 |
21 | ## Manual Joint Control with PCL Perception
22 | **Launch the Robot in RVIZ with Manual Joint Control.** This Launch file starts the Robot state publisher which updates the Transformation tree using the Joint states from Joint state publisher. The Robot can be Controlled manually by publishing the Joint states message to the Joint State Controller.
23 |
24 |
25 | For **Jetson Nano** Implementation execute the below mentioned
26 | ```
27 | roslaunch realsense_bot robot_visualize_control_jetson.launch
28 | ```
29 | For **PC+Arduino** Implementation execute the below mentioned
30 | ```
31 | roslaunch realsense_bot robot_visualize_control.launch
32 | ```
33 |
34 | **Launch the IntelRealsense Camera node to start the PCL Perception** This Launch file starts the IntelRealsense camera node which publishes all the messages w.r.t to the Realsense Camera.
35 | ```
36 | roslaunch realsense2_camera rs_camera.launch filters:=pointcloud align_depth:=true depth_width:=640 depth_height:=480 depth_fps:=30 color_width:=640 color_height:=480 color_fps:=30
37 | ```
38 |
39 |
40 | ## Autonomous Object Tracking using YoloV3 and 2D to 3D deprojection with Pose Estimation
41 | Launch the Yolo detector node to produce Object Detection and Bounding Boxes Messages
42 | ```
43 | roslaunch yolo_detect.launch
44 | ```
45 | Launch the node which deprojects the 2D detected object in 3D using rs2_pixel_to_point_deprojection(...) with the Node **/yolo_pixel_to_3Dpoint**
46 | 
47 | 
48 | 
49 | ```
50 | rosrun realsense_bot yolo_pixel_to_3Dpoint.py
51 | ```
52 |
53 | Launch the node which follows and tracks the Object using the 3-DOF Manipulator
54 | ```
55 | rosrun realsense_bot object_follow.py
56 | ```
57 |
58 |
59 |
60 | ## 3-DOF Dexterity based RTAB Mapping with Madwick IMU Filter
61 | Launch the below mentioned seperately to begin RTAB-Mapping with Madwick IMU Filter and RVIZ representation of the Cloud Map.
62 | ```
63 | roslaunch realsense_bot robot_visualize_control.launch
64 | roslaunch realsense_bot realsense_mapping.launch
65 | ```
66 |
67 |
68 |
69 |
70 | ## Robot Setup
71 | - **Direct Implementation** - The setup for PC with Arduino control uses **serial communication** to control the servos through the Arduino which is capable producing PWM signals.
72 | - **Jetson Nano Standalone Implementation** - The setup and installation differs for the Jeston boards for using it standalone as GPIO pins present would not be able to produce PWM signals for the servo motors instead we will be using a Servo Controller board which uses **I2C** protocol to control the Servos. Added to it RTAB Mapping is currently unstable with Realsense D435i in Jetson Nano due to its instability in publishing PCL data.
73 |
74 |
75 | ### Setup Info (PC + Arduino Control)
76 | #### ROS/Python Library Prerequisites
77 | ```
78 | sudo apt-get install ros-$ROS_DISTRO-realsense2-camera
79 | sudo apt-get install ros-$ROS_DISTRO-realsense2-description
80 | sudo apt install ros-$ROS_DISTRO-image-transport-plugins
81 | sudo apt install ros-$ROS_DISTRO-rtabmap-ros
82 | sudo apt install ros-$ROS_DISTRO-rosserial
83 | sudo apt install ros-$ROS_DISTRO-joint-state-publisher-gui
84 | pip install pyrealsense2
85 | pip install opencv-python
86 | cd catkin_ws/src
87 | git clone https://github.com/ccny-ros-pkg/imu_tools -b $ROS_DISTRO
88 | cd ..
89 | catkin_make
90 | ```
91 | **Installing darknet_ros package** which is the YoloV3 implementation as a ROS Node.
92 | On Cuda set up systems make sure to modify the `CMakeLists.txt` within the `darknet_ros` package to support compute capability of your systems. For details please go through
93 | [darknet_ros Official Package](https://github.com/leggedrobotics/darknet_ros)
94 | ```
95 | cd catkin_ws/src
96 | git clone https://github.com/ccny-ros-pkg/imu_tools -b $ROS_DISTRO
97 | git clone --recursive git@github.com:leggedrobotics/darknet_ros.git
98 | cd ..
99 | catkin_make
100 | ```
101 | #### Arduino
102 | The **Arduino** code file required for flashing can be found in `arduino/ros_servo_3dof.ino`
103 | Connect the **three Servo's** signal pin the to `9 10 11` pins of the Arduino. The pin configuration can be modified within `ros_servo_3dof.ino`.
104 |
105 |
106 |
107 | ### Setup Info - Jetson Nano
108 | **[To Be Updated]**
109 | #### Jetson Nano
110 |
111 |
112 |
--------------------------------------------------------------------------------
/arduino/ros_servo_3dof.ino:
--------------------------------------------------------------------------------
1 | /*
2 | * rosserial Servo Control Example
3 | *
4 | * This sketch demonstrates the control of hobby R/C servos
5 | * using ROS and the arduiono
6 | *
7 | * For the full tutorial write up, visit
8 | * www.ros.org/wiki/rosserial_arduino_demos
9 | *
10 | * For more information on the Arduino Servo Library
11 | * Checkout :
12 | * http://www.arduino.cc/en/Reference/Servo
13 | */
14 |
15 | #if defined(ARDUINO) && ARDUINO >= 100
16 | #include "Arduino.h"
17 | #else
18 | #include
19 | #endif
20 |
21 | #include
22 | #include
23 | //#include
24 | #include
25 |
26 |
27 | ros::NodeHandle nh;
28 |
29 | Servo Bservo,Mservo,Uservo;
30 |
31 | int servo_b = 9;
32 | int servo_m = 10;
33 | int servo_u = 11;
34 |
35 |
36 | void servo_cb( const std_msgs::Int16MultiArray& cmd_msg){
37 |
38 | int b_angle = cmd_msg.data[0], m_angle = cmd_msg.data[1], u_angle = cmd_msg.data[2];
39 |
40 | Bservo.write(b_angle);
41 | Mservo.write(m_angle);
42 | Uservo.write(u_angle);
43 |
44 | digitalWrite(13, HIGH-digitalRead(13)); //toggle led
45 | }
46 |
47 |
48 | ros::Subscriber sub("servo", servo_cb);
49 |
50 | void setup(){
51 | pinMode(13, OUTPUT);
52 |
53 | nh.initNode();
54 | nh.subscribe(sub);
55 |
56 | Bservo.attach(servo_b);
57 | Mservo.attach(servo_m);
58 | Uservo.attach(servo_u);
59 | }
60 |
61 | void loop(){
62 |
63 | nh.spinOnce();
64 | delay(15);
65 | }
66 |
--------------------------------------------------------------------------------
/launch/launch_seq.txt:
--------------------------------------------------------------------------------
1 | Realsennse Bot Launches/Commands
2 | ********************************
3 |
4 | roslaunch realsense2_camera rs_camera.launch filters:=pointcloud align_depth:=true depth_width:=640 depth_height:=480 depth_fps:=30 color_width:=640 color_height:=480 color_fps:=30
5 | roslaunch realsense_bot robot_visualize_control.launch
6 | roslaunch realsense_bot realsense_mapping.launch
7 | rosrun realsense_bot mapping_seq.py
8 | rosrun rqt_reconfigure rqt_reconfigure
9 |
10 |
11 | roslaunch yolo_detect.launch
12 | rosrun realsense_bot yolo_pixel_to_3Dpoint.py
13 | rosrun realsense_bot object_follow.py
--------------------------------------------------------------------------------
/launch/object_detection.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/launch/realsense_mapping.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/launch/robot_mapping_seq.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
10 |
11 |
12 | ["joint_states_ct"]
13 |
14 |
15 |
17 |
18 |
--------------------------------------------------------------------------------
/launch/robot_visualize_control.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
16 |
17 |
20 |
21 |
22 | ["joint_states_ct"]
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/launch/servo_control.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
11 |
12 |
--------------------------------------------------------------------------------
/launch/test.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/launch/yolo_detect.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/nodes/joint_state_servo.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | import roslib
3 | import rospy
4 | import tf
5 | import math
6 | import random
7 | from std_msgs.msg import Int16MultiArray
8 | import numpy as np
9 | import time
10 | from sensor_msgs.msg import JointState
11 | global joint_poses, servo
12 |
13 | joint_poses = [90,90,90]
14 |
15 |
16 |
17 | def callback(msg):
18 | global joint_poses, servo
19 | cmd = Int16MultiArray()
20 | joint_1 = msg.position[0]
21 | joint_2 = msg.position[1]
22 | joint_3 = msg.position[2]
23 | joint_poses = [math.degrees(joint_1)+90,math.degrees(-joint_2)+90,math.degrees(joint_3)+90]
24 | cmd.data = joint_poses
25 | servo.publish(cmd)
26 | # print(msg.name)
27 |
28 | if __name__=='__main__':
29 | rospy.init_node('JointState_Servo')
30 |
31 | rate = rospy.Rate(10.0)
32 | cmd = Int16MultiArray()
33 | servo = rospy.Publisher('/servo', Int16MultiArray,queue_size=10) # Publish Command
34 | rospy.Subscriber("joint_states", JointState, callback)
35 | rospy.spin()
36 | # cmd.data = [90,90,90] # Robot Home POSITION
37 | # servo.publish(cmd)
38 | # while not rospy.is_shutdown():
39 | # cmd.data = joint_poses
40 | # servo.publish(cmd)
41 | # # rospy.spin()
42 | # rate.sleep()
43 |
--------------------------------------------------------------------------------
/nodes/mapping_seq.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | from numpy.lib.function_base import angle
3 | import roslib
4 | import rospy
5 | import tf
6 | import math
7 | import random
8 | from std_msgs.msg import Int16MultiArray
9 | from sensor_msgs.msg import JointState
10 | import numpy as np
11 | import time
12 |
13 |
14 |
15 | def look_updown(joint_control,b=0,start=0,d='up'):
16 | cmd = JointState()
17 | cmd.position = [b,0,0]
18 | cmd.name = ['Joint_1','Joint_2','Joint_3']
19 | delay = 0.2
20 | time.sleep(0.8)
21 | if(d=='up'):
22 | angles = np.linspace(-1.57,start,180)
23 | else:
24 | angles = np.linspace(start,1.57,180)
25 |
26 | for i in range(len(angles)):
27 | cmd.position = [b,angles[i],0]
28 | joint_control.publish(cmd)
29 | time.sleep(delay)
30 |
31 | def look_rl(joint_control,angles,m=0):
32 | cmd = JointState()
33 | cmd.position = [0,m,0]
34 | cmd.name = ['Joint_1','Joint_2','Joint_3']
35 | delay = 0.01
36 | time.sleep(0.8)
37 |
38 | for i in range(len(angles)):
39 | cmd.position = [angles[i],m,0]
40 | joint_control.publish(cmd)
41 | time.sleep(delay)
42 |
43 | def look_ud(joint_control,angles,m=0):
44 | cmd = JointState()
45 | cmd.position = [m,0,0]
46 | cmd.name = ['Joint_1','Joint_2','Joint_3']
47 | delay = 0.01
48 | time.sleep(0.8)
49 |
50 | for i in range(len(angles)):
51 | cmd.position = [m,angles[i],0]
52 | joint_control.publish(cmd)
53 | time.sleep(delay)
54 |
55 |
56 |
57 |
58 | def map_sequence(joint_control):
59 | m_range = np.linspace(1.5,-1.5,5)
60 | look_rl(joint_control,np.linspace(0,1.57,180))
61 | look_rl(joint_control,np.linspace(1.57,-1.57,180))
62 | look_rl(joint_control,np.linspace(-1.57,0,180))
63 |
64 | look_ud(joint_control,np.linspace(-1.57,0,180))
65 | look_ud(joint_control,np.linspace(0,1.57,180))
66 |
67 | look_ud(joint_control,np.linspace(1.57,0,180),m=3)
68 | look_ud(joint_control,np.linspace(0,-1.57,180),m=3)
69 |
70 | look_ud(joint_control,np.linspace(-1.57,0,180),m=4)
71 | look_ud(joint_control,np.linspace(0,1.57,180),m=4)
72 |
73 | look_ud(joint_control,np.linspace(1.57,0,180),m=1)
74 | look_ud(joint_control,np.linspace(0,-1.57,180),m=1)
75 |
76 | look_ud(joint_control,np.linspace(-1.57,0,180),m=0)
77 | look_ud(joint_control,np.linspace(0,1.57,180),m=0)
78 |
79 | def robot_home(joint_control):
80 | cmd = JointState()
81 | cmd.name = ['Joint_1','Joint_2','Joint_3']
82 | cmd.position = [0,0,0] # Robot Home POSITION
83 | joint_control.publish(cmd)
84 |
85 | def move(joint_control,arr):
86 | cmd = JointState()
87 | cmd.name = ['Joint_1','Joint_2','Joint_3']
88 | cmd.position = arr # Robot Home POSITION
89 | joint_control.publish(cmd)
90 |
91 |
92 | if __name__=='__main__':
93 | rospy.init_node('look_around')
94 |
95 | rate = rospy.Rate(10.0)
96 | cmd = JointState()
97 |
98 | joint_control = rospy.Publisher('/joint_states_ct',JointState,queue_size=10) # Publish Joint States
99 | cmd.name = ['Joint_1','Joint_2','Joint_3']
100 | cmd.position = [0,0,0] # Robot Home POSITION
101 | time.sleep(5)
102 | joint_control.publish(cmd)
103 | map_sequence(joint_control)
104 | robot_home(joint_control)
105 | # while not rospy.is_shutdown():
106 | # cmd.data = [90,90,90]
107 | # servo.publish(cmd)
108 | # joint_control.publish(cmd)
109 | # rate.sleep()
110 | # rospy.spin()
--------------------------------------------------------------------------------
/nodes/mapping_seq2.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | import roslib
3 | import rospy
4 | import tf
5 | import math
6 | import random
7 | from std_msgs.msg import Int16MultiArray
8 | from sensor_msgs.msg import JointState
9 | import numpy as np
10 | import time
11 |
12 | global joint_control
13 |
14 | joint_control = rospy.Publisher('/joint_states_ct',JointState,queue_size=10) # Publish Joint States
15 |
16 | def move(arr):
17 | global joint_control
18 | cmd = JointState()
19 | cmd.name = ['Joint_1','Joint_2','Joint_3']
20 | cmd.position = list(arr) # Robot Home POSITION
21 | joint_control.publish(cmd)
22 |
23 | def look_around_ccw(m,u=1.57):
24 | delay = 0.1
25 | time.sleep(0.8)
26 | move([0,m,u])
27 | angles = np.linspace(-1.57,1.57,180)
28 | for i in angles:
29 | move([i,m,u])
30 | time.sleep(delay)
31 |
32 |
33 | def look_around_cw(m,u=1.57):
34 | delay = 0.1
35 | time.sleep(0.8)
36 | move([0,m,u])
37 | angles = np.linspace(-1.57,1.57,180)
38 | angles = np.flip(angles)
39 | for i in angles:
40 | move([i,m,u])
41 | time.sleep(delay)
42 |
43 | def look_around_m():
44 | m_range = np.linspace(-1.25,1.25,10)
45 | u = 1.57
46 | for i in range(len(m_range)):
47 | # if m_range[i]<100:
48 | if i%2 == 0:
49 | look_around_ccw(m_range[i])
50 | else:
51 | look_around_cw(m_range[i])
52 | # else:
53 | # if i%2 == 0:
54 | # look_around_ccw(servo,m_range[i],m_range[i])
55 | # else:
56 | # look_around_cw(servo,m_range[i],180-m_range[i])
57 |
58 |
59 | def robot_home():
60 | move([0,0,0])
61 |
62 |
63 | if __name__=='__main__':
64 | rospy.init_node('look_around')
65 |
66 | rate = rospy.Rate(10.0)
67 | robot_home()
68 | look_around_m()
69 | robot_home()
70 | # while not rospy.is_shutdown():
71 | # # cmd.data = [90,90,90]
72 | # # servo.publish(cmd)
73 | # rate.sleep()
--------------------------------------------------------------------------------
/nodes/object_follow.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | from pickle import TRUE
3 | import roslib
4 | import rospy
5 | import tf
6 | import math
7 | import random
8 | from darknet_ros_msgs.msg import BoundingBoxes,ObjectCount
9 | import numpy as np
10 | from rospy.numpy_msg import numpy_msg
11 | import time
12 | from sensor_msgs.msg import JointState
13 | import threading
14 | import cv2
15 |
16 | global obj,loc,cnt,rect,flag
17 | global joint_control,curr
18 |
19 |
20 | curr = [0,0,0]
21 | obj = rospy.get_param('class','bottle')
22 |
23 | rect = None
24 | loc = [0,0]
25 | cnt = 0
26 | flag = False
27 |
28 | def callback(msg):
29 | global obj,loc,cnt,rect,flag
30 | bounding_boxes = msg.bounding_boxes
31 | if(cnt>0):
32 | for box in bounding_boxes:
33 | if(box.Class==obj):
34 | flag = True
35 | xmin = box.xmin
36 | ymin = box.ymin
37 | xmax = box.xmax
38 | ymax = box.ymax
39 | rect = [xmin,ymin,xmax,ymax]
40 | x = rect[2] - (rect[2]-rect[0])/2
41 | y = rect[3] - (rect[3]-rect[1])/2
42 | loc = [x,y]
43 | else:
44 | flag = False
45 | # print(obj,' loc = ',loc)
46 |
47 | def object_count(msg):
48 | global cnt
49 | cnt = msg.count
50 |
51 | def curr_pos(msg):
52 | global curr
53 | msg.position = curr
54 |
55 | if __name__=='__main__':
56 | rospy.init_node('object_follower')
57 | rospy.Subscriber("/darknet_ros/bounding_boxes", BoundingBoxes, callback)
58 | rospy.Subscriber("/darknet_ros/found_object", ObjectCount, object_count)
59 | rospy.Subscriber("/joint_states", JointState, curr_pos)
60 | joint_control = rospy.Publisher('/joint_states_ct',JointState,queue_size=10) # Publish Joint States
61 | rate = rospy.Rate(10.0)
62 |
63 | while not rospy.is_shutdown():
64 | center_x = 640/2
65 | center_y = 480/2
66 | x = loc[0]
67 | y = loc[1]
68 | ex = center_x-x
69 | ey = center_y-y
70 | b = ex*0.0001
71 | m = -ey*0.0001
72 | arr = curr
73 | curr[0] = curr[0]+b
74 | curr[1] = curr[1]+m
75 |
76 | print(curr)
77 | if(abs(ex)<20 and abs(ey)<20):
78 | print('Centered')
79 | else:
80 |
81 | cmd = JointState()
82 | cmd.name = ['Joint_1','Joint_2','Joint_3']
83 | cmd.position = curr
84 | joint_control.publish(cmd)
85 | time.sleep(0.2)
86 | rospy.spin()
--------------------------------------------------------------------------------
/nodes/servo_test.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | import roslib
3 | import rospy
4 | import tf
5 | import math
6 | import random
7 | from std_msgs.msg import Int16MultiArray
8 | import numpy as np
9 | import time
10 |
11 |
12 | if __name__=='__main__':
13 | rospy.init_node('servo_control')
14 |
15 | rate = rospy.Rate(10.0)
16 | cmd = Int16MultiArray()
17 | servo = rospy.Publisher('/servo', Int16MultiArray,queue_size=10) # Publish Command
18 | cmd.data = [90,90,90] # Robot Home POSITION
19 | servo.publish(cmd)
20 | while not rospy.is_shutdown():
21 | # cmd.data = [90,90,90]
22 | # servo.publish(cmd)
23 | rate.sleep()
24 |
--------------------------------------------------------------------------------
/nodes/yolo_pixel_to_3Dpoint.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | import roslib
3 | import rospy
4 | import tf
5 | from darknet_ros_msgs.msg import BoundingBoxes,ObjectCount
6 | from sensor_msgs.msg import Image,CameraInfo
7 | import numpy as np
8 | from rospy.numpy_msg import numpy_msg
9 | import time
10 | import pyrealsense2
11 | from cv_bridge import CvBridge, CvBridgeError
12 | import cv2
13 | global obj,loc,cnt,depth_img,pose,rect
14 |
15 | obj = rospy.get_param('class','bottle')
16 |
17 | rect = None
18 | pose = [0,0,0]
19 | depth_img = np.zeros((480,640))
20 | loc = [None,None]
21 | cnt = 0
22 |
23 | def callback(msg):
24 | global obj,loc,cnt,pose,depth_img,rect
25 | bounding_boxes = msg.bounding_boxes
26 | if(cnt>0):
27 | for box in bounding_boxes:
28 | if(box.Class==obj):
29 | xmin = box.xmin
30 | ymin = box.ymin
31 | xmax = box.xmax
32 | ymax = box.ymax
33 | rect = [xmin,ymin,xmax,ymax]
34 | x = rect[2] - (rect[2]-rect[0])/2
35 | y = rect[3] - (rect[3]-rect[1])/2
36 | loc = [x,y]
37 | # else:
38 | # loc = [None,None]
39 | print(obj,' loc = ',loc,' 3dPose=',pose)
40 |
41 |
42 |
43 | def object_count(msg):
44 | global cnt
45 | cnt = msg.count
46 |
47 | def convert_depth_to_phys_coord_using_realsense(x, y, depth, cameraInfo):
48 | _intrinsics = pyrealsense2.intrinsics()
49 | _intrinsics.width = cameraInfo.width
50 | _intrinsics.height = cameraInfo.height
51 | _intrinsics.ppx = cameraInfo.K[2]
52 | _intrinsics.ppy = cameraInfo.K[5]
53 | _intrinsics.fx = cameraInfo.K[0]
54 | _intrinsics.fy = cameraInfo.K[4]
55 | #_intrinsics.model = cameraInfo.distortion_model
56 | _intrinsics.model = pyrealsense2.distortion.none
57 | _intrinsics.coeffs = [i for i in cameraInfo.D]
58 | result = pyrealsense2.rs2_deproject_pixel_to_point(_intrinsics, [x, y], depth)
59 | #result[0]: right, result[1]: down, result[2]: forward
60 | return result
61 |
62 | def cam_info(msg):
63 | global depth_img,loc,pose
64 | d = depth_img[loc[0]][loc[1]]
65 | pose = convert_depth_to_phys_coord_using_realsense(loc[0],loc[1],d,msg)
66 |
67 |
68 | def vis_callback(msg):
69 | global depth_img
70 | track_im = np.frombuffer(msg.data, dtype=np.uint8).reshape(msg.height, msg.width, -1)
71 | bridge = CvBridge()
72 | try:
73 | im = bridge.imgmsg_to_cv2(msg, "passthrough")
74 | except CvBridgeError as e:
75 | print(e)
76 | depth_img = im
77 | show_image(im)
78 |
79 | def show_image(img):
80 | global rect,pose, obj
81 | img = cv2.normalize(img, dst=None, alpha=0, beta=65535, norm_type=cv2.NORM_MINMAX)
82 | img = cv2.rectangle(img, (rect[0],rect[1]), (rect[2],rect[3]), (255, 0, 0), 2)
83 | text = obj+' '+str(np.around(pose,decimals=3))
84 | image = cv2.putText(img, text, (rect[2] - (rect[2]-rect[0])/2,rect[3] - (rect[3]-rect[1])/2), cv2.FONT_HERSHEY_SIMPLEX,
85 | 0.8, (255, 0, 0), 1, cv2.LINE_AA)
86 | cv2.imshow("Pose Track (mm)", img)
87 | cv2.waitKey(3)
88 |
89 | if __name__=='__main__':
90 | rospy.init_node('yolo_pixel_to_3Dpoint')
91 | rospy.Subscriber("/darknet_ros/bounding_boxes", BoundingBoxes, callback)
92 | rospy.Subscriber("/darknet_ros/found_object", ObjectCount, object_count)
93 | rospy.Subscriber("/camera/aligned_depth_to_color/image_raw", Image, vis_callback)
94 | rospy.Subscriber("/camera/color/camera_info", CameraInfo, cam_info)
95 | br = tf.TransformBroadcaster()
96 | rate = rospy.Rate(10.0)
97 | while not rospy.is_shutdown():
98 | pose_tf = np.array(pose)
99 | br.sendTransform((pose_tf[2]/1000, -pose_tf[0]/1000, pose_tf[1]/1000),
100 | (0.0, 0.0, 0.0, 1.0),
101 | rospy.Time.now(),
102 | "object",
103 | "camera_link")
104 | # rate.sleep()
105 | rospy.spin()
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | realsense_bot
4 | 0.0.0
5 | The realsense_bot package
6 |
7 |
8 |
9 |
10 | Mohamed Fazil
11 |
12 |
13 |
14 |
15 |
16 | TODO
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | catkin
52 | roscpp
53 | rospy
54 | std_msgs
55 | roscpp
56 | rospy
57 | std_msgs
58 | roscpp
59 | rospy
60 | std_msgs
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
--------------------------------------------------------------------------------
/robot_controller/robot_control.yaml:
--------------------------------------------------------------------------------
1 | realsense_bot:
2 | # Publish all joint states -----------------------------------
3 | joints_update:
4 | type: joint_state_controller/JointStateController
5 | publish_rate: 50
6 |
7 | # Position Controllers ---------------------------------------
8 | joint1:
9 | type: position_controllers/JointPositionController
10 | joint: joint1
11 | pid: {p: 1.0, i: 1.0, d: 0.0}
12 | joint2:
13 | type: position_controllers/JointPositionController
14 | joint: joint2
15 | pid: {p: 1.0, i: 1.0, d: 0.0}
16 | joint3:
17 | type: position_controllers/JointPositionController
18 | joint: joint2
19 | pid: {p: 1.0, i: 1.0, d: 0.0}
--------------------------------------------------------------------------------
/rviz/urdf.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 0
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /TF1/Frames1
8 | Splitter Ratio: 0.5
9 | Tree Height: 472
10 | - Class: rviz/Views
11 | Expanded:
12 | - /Current View1
13 | Name: Views
14 | Splitter Ratio: 0.5
15 | Preferences:
16 | PromptSaveOnExit: true
17 | Toolbars:
18 | toolButtonStyle: 2
19 | Visualization Manager:
20 | Class: ""
21 | Displays:
22 | - Alpha: 0.5
23 | Cell Size: 1
24 | Class: rviz/Grid
25 | Color: 160; 160; 164
26 | Enabled: true
27 | Line Style:
28 | Line Width: 0.029999999329447746
29 | Value: Lines
30 | Name: Grid
31 | Normal Cell Count: 0
32 | Offset:
33 | X: 0
34 | Y: 0
35 | Z: 0
36 | Plane: XY
37 | Plane Cell Count: 10
38 | Reference Frame:
39 | Value: true
40 | - Alpha: 0.800000011920929
41 | Class: rviz/RobotModel
42 | Collision Enabled: false
43 | Enabled: true
44 | Links:
45 | All Links Enabled: true
46 | Expand Joint Details: false
47 | Expand Link Details: false
48 | Expand Tree: false
49 | Link Tree Style: Links in Alphabetic Order
50 | U_Link:
51 | Alpha: 1
52 | Show Axes: false
53 | Show Trail: false
54 | Value: true
55 | base_link:
56 | Alpha: 1
57 | Show Axes: false
58 | Show Trail: false
59 | camera_bottom_screw_frame:
60 | Alpha: 1
61 | Show Axes: false
62 | Show Trail: false
63 | camera_color_frame:
64 | Alpha: 1
65 | Show Axes: false
66 | Show Trail: false
67 | camera_color_optical_frame:
68 | Alpha: 1
69 | Show Axes: false
70 | Show Trail: false
71 | camera_depth_frame:
72 | Alpha: 1
73 | Show Axes: false
74 | Show Trail: false
75 | camera_depth_optical_frame:
76 | Alpha: 1
77 | Show Axes: false
78 | Show Trail: false
79 | camera_infra1_frame:
80 | Alpha: 1
81 | Show Axes: false
82 | Show Trail: false
83 | camera_infra1_optical_frame:
84 | Alpha: 1
85 | Show Axes: false
86 | Show Trail: false
87 | camera_infra2_frame:
88 | Alpha: 1
89 | Show Axes: false
90 | Show Trail: false
91 | camera_infra2_optical_frame:
92 | Alpha: 1
93 | Show Axes: false
94 | Show Trail: false
95 | camera_link:
96 | Alpha: 1
97 | Show Axes: false
98 | Show Trail: false
99 | Value: true
100 | servo_bottom:
101 | Alpha: 1
102 | Show Axes: false
103 | Show Trail: false
104 | Value: true
105 | servo_mid:
106 | Alpha: 1
107 | Show Axes: false
108 | Show Trail: false
109 | Value: true
110 | servo_up:
111 | Alpha: 1
112 | Show Axes: false
113 | Show Trail: false
114 | Value: true
115 | up_mount:
116 | Alpha: 1
117 | Show Axes: false
118 | Show Trail: false
119 | Value: true
120 | world:
121 | Alpha: 1
122 | Show Axes: false
123 | Show Trail: false
124 | Value: true
125 | Name: RobotModel
126 | Robot Description: robot_description
127 | TF Prefix: ""
128 | Update Interval: 0
129 | Value: true
130 | Visual Enabled: true
131 | - Class: rviz/TF
132 | Enabled: true
133 | Frame Timeout: 15
134 | Frames:
135 | All Enabled: false
136 | U_Link:
137 | Value: true
138 | base_link:
139 | Value: false
140 | camera_bottom_screw_frame:
141 | Value: false
142 | camera_color_frame:
143 | Value: false
144 | camera_color_optical_frame:
145 | Value: false
146 | camera_depth_frame:
147 | Value: false
148 | camera_depth_optical_frame:
149 | Value: false
150 | camera_infra1_frame:
151 | Value: false
152 | camera_infra1_optical_frame:
153 | Value: false
154 | camera_infra2_frame:
155 | Value: false
156 | camera_infra2_optical_frame:
157 | Value: false
158 | camera_link:
159 | Value: true
160 | servo_bottom:
161 | Value: true
162 | servo_mid:
163 | Value: true
164 | servo_up:
165 | Value: true
166 | up_mount:
167 | Value: true
168 | world:
169 | Value: true
170 | Marker Scale: 0.10000000149011612
171 | Name: TF
172 | Show Arrows: true
173 | Show Axes: true
174 | Show Names: true
175 | Tree:
176 | world:
177 | servo_bottom:
178 | U_Link:
179 | servo_mid:
180 | servo_up:
181 | up_mount:
182 | base_link:
183 | camera_bottom_screw_frame:
184 | camera_link:
185 | camera_color_frame:
186 | camera_color_optical_frame:
187 | {}
188 | camera_depth_frame:
189 | camera_depth_optical_frame:
190 | {}
191 | camera_infra1_frame:
192 | camera_infra1_optical_frame:
193 | {}
194 | camera_infra2_frame:
195 | camera_infra2_optical_frame:
196 | {}
197 | Update Interval: 0
198 | Value: true
199 | - Alpha: 1
200 | Autocompute Intensity Bounds: true
201 | Autocompute Value Bounds:
202 | Max Value: 10
203 | Min Value: -10
204 | Value: true
205 | Axis: Z
206 | Channel Name: intensity
207 | Class: rviz/PointCloud2
208 | Color: 255; 255; 255
209 | Color Transformer: RGB8
210 | Decay Time: 0
211 | Enabled: true
212 | Invert Rainbow: false
213 | Max Color: 255; 255; 255
214 | Min Color: 0; 0; 0
215 | Name: PointCloud2
216 | Position Transformer: XYZ
217 | Queue Size: 10
218 | Selectable: true
219 | Size (Pixels): 3
220 | Size (m): 0.009999999776482582
221 | Style: Flat Squares
222 | Topic: /camera/depth/color/points
223 | Unreliable: false
224 | Use Fixed Frame: true
225 | Use rainbow: true
226 | Value: true
227 | Enabled: true
228 | Global Options:
229 | Background Color: 48; 48; 48
230 | Default Light: true
231 | Fixed Frame: world
232 | Frame Rate: 30
233 | Name: root
234 | Tools:
235 | - Class: rviz/MoveCamera
236 | Value: true
237 | Views:
238 | Current:
239 | Class: rviz/Orbit
240 | Distance: 0.49387338757514954
241 | Enable Stereo Rendering:
242 | Stereo Eye Separation: 0.05999999865889549
243 | Stereo Focal Distance: 1
244 | Swap Stereo Eyes: false
245 | Value: false
246 | Focal Point:
247 | X: 0.019495904445648193
248 | Y: 0.020731648430228233
249 | Z: 0.11162014305591583
250 | Focal Shape Fixed Size: false
251 | Focal Shape Size: 0.05000000074505806
252 | Invert Z Axis: false
253 | Name: Current View
254 | Near Clip Distance: 0.009999999776482582
255 | Pitch: 0.45039844512939453
256 | Target Frame:
257 | Value: Orbit (rviz)
258 | Yaw: 0.8104003667831421
259 | Saved: ~
260 | Window Geometry:
261 | Displays:
262 | collapsed: false
263 | Height: 974
264 | Hide Left Dock: false
265 | Hide Right Dock: false
266 | QMainWindow State: 000000ff00000000fd00000001000000000000023b00000374fc0200000002fb000000100044006900730070006c006100790073010000003d00000215000000c900fffffffb0000000a00560069006500770073010000025800000159000000a400ffffff0000053f0000037400000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
267 | Views:
268 | collapsed: false
269 | Width: 1920
270 | X: 0
271 | Y: 27
272 |
--------------------------------------------------------------------------------
/rviz/urdf.vcg:
--------------------------------------------------------------------------------
1 | Background\ ColorR=0.0941176
2 | Background\ ColorG=0
3 | Background\ ColorB=0.466667
4 | Fixed\ Frame=/base_link
5 | Target\ Frame=
6 | Grid.Alpha=0.5
7 | Grid.Cell\ Size=0.5
8 | Grid.ColorR=0.898039
9 | Grid.ColorG=0.898039
10 | Grid.ColorB=0.898039
11 | Grid.Enabled=1
12 | Grid.Line\ Style=0
13 | Grid.Line\ Width=0.03
14 | Grid.Normal\ Cell\ Count=0
15 | Grid.OffsetX=0
16 | Grid.OffsetY=0
17 | Grid.OffsetZ=0
18 | Grid.Plane=0
19 | Grid.Plane\ Cell\ Count=10
20 | Grid.Reference\ Frame=
21 | Robot\ Model.Alpha=1
22 | Robot\ Model.Collision\ Enabled=0
23 | Robot\ Model.Enabled=1
24 | Robot\ Model.Robot\ Description=robot_description
25 | Robot\ Model.TF\ Prefix=
26 | Robot\ Model.Update\ Interval=0
27 | Robot\ Model.Visual\ Enabled=1
28 | Robot\:\ Robot\ Model\ Link\ base_linkShow\ Axes=0
29 | Robot\:\ Robot\ Model\ Link\ base_linkShow\ Trail=0
30 | Robot\:\ Robot\ Model\ Link\ legShow\ Axes=0
31 | Robot\:\ Robot\ Model\ Link\ legShow\ Trail=0
32 | Tool\ 2D\ Nav\ GoalTopic=goal
33 | Tool\ 2D\ Pose\ EstimateTopic=initialpose
34 | Camera\ Type=rviz::OrbitViewController
35 | Camera\ Config=1.15779 3.76081 2.16475 0 0 0
36 | Property\ Grid\ State=selection=.Global StatusTopStatus;expanded=.Global Options,Grid.Enabled,Robot Model.Enabled;scrollpos=0,0;splitterpos=150,285;ispageselected=1
37 | [Display0]
38 | Name=Grid
39 | Package=rviz
40 | ClassName=rviz::GridDisplay
41 | [Display1]
42 | Name=Robot Model
43 | Package=rviz
44 | ClassName=rviz::RobotModelDisplay
45 |
--------------------------------------------------------------------------------
/sessions/ses.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fazildgr8/realsense_bot/3811c3a413aafd0c3b84e3c24fe299ed9f00b4ec/sessions/ses.bin
--------------------------------------------------------------------------------
/srv/Floats_array.srv:
--------------------------------------------------------------------------------
1 | float32 req
2 | ---
3 | float32[] res
--------------------------------------------------------------------------------
/urdf/realsense_bot.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
--------------------------------------------------------------------------------
/urdf/realsense_bot.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/urdf/realsense_bot.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
--------------------------------------------------------------------------------
/urdf/robot.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
--------------------------------------------------------------------------------
/urdf/robot_temp.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
--------------------------------------------------------------------------------