├── my_aubo_i5_robot_config
├── launch
│ ├── aubo_i5_moveit_sensor_manager.launch.xml
│ ├── aubo_i5_moveit_controller_manager.launch.xml
│ ├── planning_pipeline.launch.xml
│ ├── ros_controllers.launch
│ ├── fake_moveit_controller_manager.launch.xml
│ ├── aubo_i5_moveit_controller_manager.launch
│ ├── chomp_planning_pipeline.launch.xml
│ ├── warehouse.launch
│ ├── setup_assistant.launch
│ ├── joystick_control.launch
│ ├── moveit_rviz.launch
│ ├── warehouse_settings.launch.xml
│ ├── default_warehouse_db.launch
│ ├── sensor_manager.launch.xml
│ ├── run_benchmark_ompl.launch
│ ├── ompl_planning_pipeline.launch.xml
│ ├── gazebo.launch
│ ├── planning_context.launch
│ ├── trajectory_execution.launch.xml
│ ├── moveit_planning_execution.launch
│ ├── demo.launch
│ ├── demo_gazebo.launch
│ ├── move_group.launch
│ └── moveit.rviz
├── config
│ ├── sensors_3d.yaml
│ ├── joint_names.yaml
│ ├── kinematics.yaml
│ ├── fake_controllers.yaml
│ ├── chomp_planning.yaml
│ ├── controllers.yaml
│ ├── ros_controllers.yaml
│ ├── joint_limits.yaml
│ ├── ompl_planning.yaml
│ └── aubo_i5.srdf
├── CMakeLists.txt
├── package.xml
└── src
│ ├── pick_place.cpp
│ └── pass_through.cpp
├── ur5.launch
├── preprocess.py
├── single.launch
├── darknet_ros.launch
├── static_frame.launch
├── pick_place.cpp
├── passthrough.cpp
├── aubo_i5_gripper.urdf
└── README.md
/my_aubo_i5_robot_config/launch/aubo_i5_moveit_sensor_manager.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/sensors_3d.yaml:
--------------------------------------------------------------------------------
1 | # The name of this file shouldn't be changed, or else the Setup Assistant won't detect it
2 | sensors:
3 | - {}
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/joint_names.yaml:
--------------------------------------------------------------------------------
1 | controller_joint_names: [shoulder_joint, upperArm_joint, foreArm_joint, wrist1_joint, wrist2_joint, wrist3_joint]
2 | robot_name: aubo_i5
3 |
4 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/aubo_i5_moveit_controller_manager.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/kinematics.yaml:
--------------------------------------------------------------------------------
1 | aubo_arm:
2 | kinematics_solver: kdl_kinematics_plugin/KDLKinematicsPlugin
3 | kinematics_solver_search_resolution: 0.005
4 | kinematics_solver_timeout: 0.005
5 | kinematics_solver_attempts: 3
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/fake_controllers.yaml:
--------------------------------------------------------------------------------
1 | controller_list:
2 | - name: fake_aubo_arm_controller
3 | joints:
4 | - shoulder_joint
5 | - upperArm_joint
6 | - foreArm_joint
7 | - wrist1_joint
8 | - wrist2_joint
9 | - wrist3_joint
10 | - name: fake_gripper_controller
11 | joints:
12 | - gripper_finger1_joint
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/planning_pipeline.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/ros_controllers.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/fake_moveit_controller_manager.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/aubo_i5_moveit_controller_manager.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/chomp_planning_pipeline.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/chomp_planning.yaml:
--------------------------------------------------------------------------------
1 | planning_time_limit: 10.0
2 | max_iterations: 200
3 | max_iterations_after_collision_free: 5
4 | smoothness_cost_weight: 0.1
5 | obstacle_cost_weight: 1.0
6 | learning_rate: 0.01
7 | smoothness_cost_velocity: 0.0
8 | smoothness_cost_acceleration: 1.0
9 | smoothness_cost_jerk: 0.0
10 | ridge_factor: 0.01
11 | use_pseudo_inverse: false
12 | pseudo_inverse_ridge_factor: 1e-4
13 | joint_update_limit: 0.1
14 | collision_clearence: 0.2
15 | collision_threshold: 0.07
16 | use_stochastic_descent: true
17 | enable_failure_recovery: true
18 | max_recovery_attempts: 5
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/controllers.yaml:
--------------------------------------------------------------------------------
1 | controller_list:
2 | - name: aubo_i5_controller
3 | action_ns: follow_joint_trajectory
4 | type: FollowJointTrajectory
5 | default: true
6 | joints:
7 | - shoulder_joint
8 | - upperArm_joint
9 | - foreArm_joint
10 | - wrist1_joint
11 | - wrist2_joint
12 | - wrist3_joint
13 | - name: gripper_controller
14 | action_ns: follow_joint_trajectory
15 | type: FollowJointTrajectory
16 | default: true
17 | parallel: true
18 | joints:
19 | - gripper_finger1_joint
20 | - gripper_finger2_joint
21 |
22 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/warehouse.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/setup_assistant.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/joystick_control.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/moveit_rviz.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/warehouse_settings.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/default_warehouse_db.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/sensor_manager.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/ur5.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/run_benchmark_ompl.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/ompl_planning_pipeline.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
8 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/gazebo.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/preprocess.py:
--------------------------------------------------------------------------------
1 | import glob, os
2 |
3 | #Current directory
4 | current_dir = os.path.dirname(os.path.abspath(__file__))
5 |
6 | print(current_dir)
7 |
8 | current_dir = '/home/nicholasward2/darknet/build/darknet/x64/data/obj'
9 |
10 | # Per of images to be used for the test set
11 |
12 | percentage_test = 10
13 |
14 | #Create and/or truncate train.txt and test.txt
15 |
16 | file_train = open('/home/nicholasward2/darknet/build/darknet/x64/data/train.txt', 'w')
17 | file_test = open('/home/nicholasward2/darknet/build/darknet/x64/data/test.txt', 'w')
18 |
19 |
20 | # Add images to train.txt and test.txt
21 |
22 | counter = 1
23 | index_text = round(100 / percentage_test)
24 |
25 | for path_and_filename in glob.iglob(os.path.join(current_dir, '*.jpg')):
26 | title, ext = os.path.splitext(os.path.basename(path_and_filename))
27 |
28 | if counter == index_test:
29 | counter = 1
30 | file_test.write('/home/nicholasward2/darknet/build/darknet/x64/data/obj' + '/' + title + '.jpg' + '\n')
31 |
32 | else:
33 | file_train.write('/home/nicholasward2/darknet/build/darknet/x64/data/obj' + '/' + title + '.jpg' + '\n')
34 | counter = counter + 1
35 |
--------------------------------------------------------------------------------
/single.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/planning_context.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/trajectory_execution.launch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/ros_controllers.yaml:
--------------------------------------------------------------------------------
1 | # Simulation settings for using moveit_sim_controllers
2 | moveit_sim_hw_interface:
3 | joint_model_group: aubo_arm
4 | joint_model_group: gripper
5 | joint_model_group_pose: arm_home
6 | # Settings for ros_control_boilerplate control loop
7 | generic_hw_control_loop:
8 | loop_hz: 300
9 | cycle_time_error_threshold: 0.01
10 | # Settings for ros_control hardware interface
11 | hardware_interface:
12 | joints:
13 | - shoulder_joint
14 | - upperArm_joint
15 | - foreArm_joint
16 | - wrist1_joint
17 | - wrist2_joint
18 | - wrist3_joint
19 | - gripper_finger1_joint
20 | - gripper_finger2_joint
21 |
22 | sim_control_mode: 1 # 0: position, 1: velocity
23 | # Publish all joint states
24 | # Creates the /joint_states topic necessary in ROS
25 | joint_state_controller:
26 | type: joint_state_controller/JointStateController
27 | publish_rate: 50
28 | controller_list:
29 | - name: aubo_i5_controller
30 | action_ns: follow_joint_trajectory
31 | default: True
32 | type: FollowJointTrajectory
33 | joints:
34 | - shoulder_joint
35 | - upperArm_joint
36 | - foreArm_joint
37 | - wrist1_joint
38 | - wrist2_joint
39 | - wrist3_joint
40 | - name: gripper_controller
41 | action_ns: follow_joint_trajectory
42 | default: True
43 | type: follow_joint_trajectory
44 | joints:
45 | - gripper_finger1_joint
46 | - gripper_finger2_joint
47 |
--------------------------------------------------------------------------------
/darknet_ros.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/joint_limits.yaml:
--------------------------------------------------------------------------------
1 | # joint_limits.yaml allows the dynamics properties specified in the URDF to be overwritten or augmented as needed
2 | # Specific joint properties can be changed with the keys [max_position, min_position, max_velocity, max_acceleration]
3 | # Joint limits can be turned off with [has_velocity_limits, has_acceleration_limits]
4 | joint_limits:
5 | foreArm_joint:
6 | has_velocity_limits: false
7 | max_velocity: 0
8 | has_acceleration_limits: false
9 | max_acceleration: 0
10 | gripper_finger1_joint:
11 | has_velocity_limits: true
12 | max_velocity: 2
13 | has_acceleration_limits: false
14 | max_acceleration: 0
15 | gripper_finger2_joint:
16 | has_velocity_limits: true
17 | max_velocity: 100
18 | has_acceleration_limits: false
19 | max_acceleration: 0
20 | shoulder_joint:
21 | has_velocity_limits: false
22 | max_velocity: 0
23 | has_acceleration_limits: false
24 | max_acceleration: 0
25 | upperArm_joint:
26 | has_velocity_limits: false
27 | max_velocity: 0
28 | has_acceleration_limits: false
29 | max_acceleration: 0
30 | wrist1_joint:
31 | has_velocity_limits: false
32 | max_velocity: 0
33 | has_acceleration_limits: false
34 | max_acceleration: 0
35 | wrist2_joint:
36 | has_velocity_limits: false
37 | max_velocity: 0
38 | has_acceleration_limits: false
39 | max_acceleration: 0
40 | wrist3_joint:
41 | has_velocity_limits: false
42 | max_velocity: 0
43 | has_acceleration_limits: false
44 | max_acceleration: 0
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/moveit_planning_execution.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/demo.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 | [move_group/fake_controller_joint_states]
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/demo_gazebo.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | [/joint_states]
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(my_aubo_i5_robot_config)
3 | add_compile_options(-std=c++11)
4 |
5 | find_package(Eigen3 REQUIRED)
6 |
7 | # Eigen 3.2 (Wily) only provides EIGEN3_INCLUDE_DIR, not EIGEN3_INCLUDE_DIRS
8 | if(NOT EIGEN3_INCLUDE_DIRS)
9 | set(EIGEN3_INCLUDE_DIRS ${EIGEN3_INCLUDE_DIR})
10 | endif()
11 | find_package(PCL 1.9 REQUIRED)
12 |
13 |
14 | find_package(catkin REQUIRED COMPONENTS
15 | interactive_markers
16 | moveit_core
17 | moveit_visual_tools
18 | moveit_ros_planning
19 | moveit_ros_planning_interface
20 | pluginlib
21 | geometric_shapes
22 | pcl_ros
23 | gpd_ros
24 | pcl_conversions
25 | rosbag
26 | tf2_ros
27 | tf2_eigen
28 | darknet_ros_msgs
29 | tf2_geometry_msgs
30 | roscpp
31 | rospy
32 | std_msgs
33 | message_filters
34 | geometry_msgs
35 | message_generation
36 | )
37 |
38 | catkin_package(
39 | LIBRARIES
40 | interactivity_utils
41 | INCLUDE_DIRS
42 | ${THIS_PACKAGE_INCLUDE_DIRS}
43 | CATKIN_DEPENDS
44 | moveit_core
45 | moveit_visual_tools
46 | moveit_ros_planning_interface
47 | interactive_markers
48 | DEPENDS
49 | EIGEN3
50 | )
51 |
52 | include_directories(${catkin_INCLUDE_DIRS})
53 | ##catkin_package()
54 | find_package(Boost REQUIRED system filesystem date_time thread)
55 | add_executable(pick_place src/pick_place.cpp)
56 | target_link_libraries(pick_place ${catkin_LIBRARIES} ${Boost_LIBRARIES})
57 |
58 | include_directories(${PCL_INCLUDE_DIRS})
59 | link_directories(${PCL_LIBRARY_DIRS})
60 | add_definitions(${PCL_DEFINITIONS})
61 |
62 | add_executable (pass_through src/pass_through.cpp)
63 | target_link_libraries (pass_through ${catkin_LIBRARIES} ${PCL_LIBRARIES})
64 |
65 | include_directories(SYSTEM ${THIS_PACKAGE_INCLUDE_DIRS} ${Boost_INCLUDE_DIR} ${EIGEN3_INCLUDE_DIRS})
66 | include_directories(${catkin_INCLUDE_DIRS})
67 | link_directories(${catkin_LIBRARY_DIRS})
68 |
69 | install(TARGETS pick_place DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
70 |
71 | install(DIRECTORY launch DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
72 | PATTERN "setup_assistant.launch" EXCLUDE)
73 | install(DIRECTORY config DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION})
74 |
75 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/kinematics)
76 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/robot_model_and_robot_state)
77 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/planning)
78 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/planning_scene)
79 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/planning_scene_ros_api)
80 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/motion_planning_api)
81 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/motion_planning_pipeline)
82 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/visualizing_collisions)
83 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/move_group_interface)
84 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/move_group_python_interface)
85 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/state_display)
86 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/interactivity)
87 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/pick_place)
88 | ##add_subdirectory(/home/nicholasward2/catkin_ws/src/moveit_tutorials/doc/perception_pipeline)
89 |
90 |
91 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | my_aubo_i5_robot_config
4 | 0.3.0
5 |
6 | An automatically generated package with all the configuration and launch files for using the aubo_i5 with the MoveIt! Motion Planning Framework
7 |
8 | nick
9 | nick
10 |
11 | BSD
12 |
13 | http://moveit.ros.org/
14 | https://github.com/ros-planning/moveit/issues
15 | https://github.com/ros-planning/moveit
16 |
17 | catkin
18 |
19 | pluginlib
20 | eigen
21 | moveit_core
22 | moveit_ros_planning_interface
23 | moveit_ros_perception
24 | interactive_markers
25 | geometric_shapes
26 | moveit_visual_tools
27 | pcl_ros
28 | pcl_conversions
29 | rosbag
30 | tf2_ros
31 | tf2_eigen
32 | tf2_geometry_msgs
33 | libpcl-all-dev
34 | message_filters
35 | message_filters
36 | gpd_ros
37 | gpd_ros
38 |
39 |
40 | panda_moveit_config
41 | pluginlib
42 | moveit_core
43 | moveit_commander
44 | moveit_fake_controller_manager
45 | moveit_ros_planning_interface
46 | moveit_ros_perception
47 | interactive_markers
48 | moveit_visual_tools
49 | joy
50 | pcl_ros
51 | pcl_conversions
52 | rosbag
53 | tf2_ros
54 | tf2_eigen
55 | tf2_geometry_msgs
56 | darknet_ros
57 | darknet_ros
58 | darknet_ros_msgs
59 | darknet_ros_msgs
60 |
61 | roscpp
62 | rospy
63 | std_msgs
64 | geometry_msgs
65 | message_generation
66 |
67 | roscpp
68 | rospy
69 | std_msgs
70 | geometry_msgs
71 | message_runtime
72 |
73 | moveit_ros_move_group
74 | moveit_fake_controller_manager
75 | moveit_kinematics
76 | moveit_planners_ompl
77 | moveit_ros_visualization
78 | moveit_setup_assistant
79 | joint_state_publisher
80 | robot_state_publisher
81 | xacro
82 |
83 |
84 | aubo_description
85 | aubo_description
86 | libpcl-all
87 |
88 |
89 |
90 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/move_group.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
32 |
33 |
34 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/static_frame.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/pick_place.cpp:
--------------------------------------------------------------------------------
1 | /* Author: Ioan Sucan, Ridhwan Luthra*/
2 |
3 | // ROS
4 | #include
5 |
6 | // MoveIt
7 | #include
8 | #include
9 | #include
10 |
11 | // TF2
12 | #include
13 |
14 | /*
15 |
16 | void cloud_cb(const visualization_msgs::MarkerArrayConstPtr& grasp_pose_msg)
17 | {
18 | std::cout << "This is the grasp pose::::::::::::::::::::::::::::::::::::::::::: ";
19 | std::cout << grasp_pose_msg->pose.position.x << std::endl;
20 | }
21 | */
22 |
23 | void openGripper(trajectory_msgs::JointTrajectory& posture)
24 | {
25 | // BEGIN_SUB_TUTORIAL open_gripper
26 | /* Add both finger joints of panda robot. */
27 | posture.joint_names.resize(2);
28 | posture.joint_names[0] = "gripper_finger1_joint";
29 | posture.joint_names[1] = "gripper_finger2_joint";
30 |
31 | /* Set them as open, wide enough for the object to fit. */
32 | posture.points.resize(1);
33 | posture.points[0].positions.resize(2);
34 | posture.points[0].positions[0] = 0.04;
35 | posture.points[0].positions[1] = 0.04;
36 | posture.points[0].time_from_start = ros::Duration(0.5);
37 | // END_SUB_TUTORIAL
38 | }
39 |
40 | void closedGripper(trajectory_msgs::JointTrajectory& posture)
41 | {
42 | // BEGIN_SUB_TUTORIAL closed_gripper
43 | /* Add both finger joints of aubo robot. */
44 | posture.joint_names.resize(2);
45 | posture.joint_names[0] = "gripper_finger1_joint";
46 | posture.joint_names[1] = "gripper_finger2_joint";
47 |
48 | /* Set them as closed. */
49 | posture.points.resize(1);
50 | posture.points[0].positions.resize(2);
51 | posture.points[0].positions[0] = 0.00;
52 | posture.points[0].positions[1] = 0.00;
53 | posture.points[0].time_from_start = ros::Duration(0.5);
54 | // END_SUB_TUTORIAL
55 | }
56 |
57 | void pick(moveit::planning_interface::MoveGroupInterface& move_group)
58 | {
59 | // BEGIN_SUB_TUTORIAL pick1
60 | // Create a vector of grasps to be attempted, currently only creating single grasp.
61 | // This is essentially useful when using a grasp generator to generate and test multiple grasps.
62 | std::vector grasps;
63 | grasps.resize(1);
64 |
65 | // Setting grasp pose
66 | // ++++++++++++++++++++++
67 |
68 | grasps[0].grasp_pose.header.frame_id = "base_link";
69 | //tf2::Quaternion orientation;
70 | tf2::Quaternion orientation;
71 | orientation.setRPY(1.489, -0.059, -0.637);
72 |
73 | grasps[0].grasp_pose.pose.orientation = tf2::toMsg(orientation);
74 | grasps[0].grasp_pose.pose.position.x = -0.750;
75 | grasps[0].grasp_pose.pose.position.y = 0.041;
76 | grasps[0].grasp_pose.pose.position.z = 0.062;
77 |
78 |
79 | // Setting pre-grasp approach
80 | // ++++++++++++++++++++++++++
81 | /* Defined with respect to frame_id */
82 | grasps[0].pre_grasp_approach.direction.header.frame_id = "base_link";
83 | /* Direction is set as positive x axis */
84 | grasps[0].pre_grasp_approach.direction.vector.z = -1.0;
85 | grasps[0].pre_grasp_approach.direction.vector.y = -1.0;
86 | grasps[0].pre_grasp_approach.direction.vector.x = -1.0;
87 |
88 | //grasps[0].pre_grasp_approach.min_distance = 0.20;
89 | grasps[0].pre_grasp_approach.desired_distance = 0.10;
90 |
91 |
92 |
93 | // Set support surface as table1.
94 | move_group.setSupportSurfaceName("table1");
95 | // Call pick to pick up the object using the grasps given
96 | move_group.pick("object", grasps);
97 | // END_SUB_TUTORIAL
98 | }
99 |
100 | void place(moveit::planning_interface::MoveGroupInterface& group)
101 | {
102 | //
103 | // - Calling place function m rrently only creating single place location.
104 | std::vector place_location;
105 | place_location.resize(1);
106 |
107 | // Setting place location pose
108 | // +++++++++++++++++++++++++++
109 | place_location[0].place_pose.header.frame_id = "world";
110 | tf2::Quaternion orientation;
111 | orientation.setRPY(0, 0, M_PI / 2);
112 | place_location[0].place_pose.pose.orientation = tf2::toMsg(orientation);
113 |
114 | /* While placing it is the exact location of the center of the object. */
115 | place_location[0].place_pose.pose.position.x = 0;
116 | place_location[0].place_pose.pose.position.y = 0.5;
117 | place_location[0].place_pose.pose.position.z = 0.5;
118 |
119 | // Setting pre-place approach
120 | // ++++++++++++++++++++++++++
121 | /* Defined with respect to frame_id */
122 | place_location[0].pre_place_approach.direction.header.frame_id = "world";
123 | /* Direction is set as negative z axis */
124 | place_location[0].pre_place_approach.direction.vector.z = -1.0;
125 |
126 | place_location[0].pre_place_approach.min_distance = 0.095;
127 | place_location[0].pre_place_approach.desired_distance = 0.115;
128 |
129 | // Setting post-grasp retreat
130 | // ++++++++++++++++++++++++++
131 | /* Defined with respect to frame_id */
132 | place_location[0].post_place_retreat.direction.header.frame_id = "world";
133 | /* Direction is set as negative y axis */
134 | place_location[0].post_place_retreat.direction.vector.x = -1.0;
135 | place_location[0].post_place_retreat.min_distance = 0.1;
136 | place_location[0].post_place_retreat.desired_distance = 0.25;
137 |
138 | // Setting posture of eef after placing object
139 | // +++++++++++++++++++++++++++++++++++++++++++
140 | /* Similar to the pick case */
141 | openGripper(place_location[0].post_place_posture);
142 |
143 | // Set support surface as table2.
144 | group.setSupportSurfaceName("table2");
145 | // Call place to place the object using the place locations given.
146 | group.place("object", place_location);
147 | // END_SUB_TUTORIAL
148 | }
149 |
150 | void addCollisionObjects(moveit::planning_interface::PlanningSceneInterface& planning_scene_interface)
151 | {
152 | // BEGIN_SUB_TUTORIAL table1
153 | //
154 | // Creating Environment
155 | // ^^^^^^^^^^^^^^^^^^^^
156 | // Create vector to hold 3 collision objects.
157 | std::vector collision_objects;
158 | collision_objects.resize(1);
159 |
160 | // Add the first table where the cube will originally be kept.
161 | collision_objects[0].id = "table1";
162 | collision_objects[0].header.frame_id = "base_link";
163 |
164 | /* Define the primitive and its dimensions. */
165 | collision_objects[0].primitives.resize(1);
166 | collision_objects[0].primitives[0].type = collision_objects[0].primitives[0].BOX;
167 | collision_objects[0].primitives[0].dimensions.resize(3);
168 | collision_objects[0].primitives[0].dimensions[0] = 1.0;
169 | collision_objects[0].primitives[0].dimensions[1] = 1.0;
170 | collision_objects[0].primitives[0].dimensions[2] = 0.47;
171 |
172 | /* Define the pose of the table. */
173 | collision_objects[0].primitive_poses.resize(1);
174 | collision_objects[0].primitive_poses[0].position.x = -1.0;
175 | collision_objects[0].primitive_poses[0].position.y = 0.0;
176 | collision_objects[0].primitive_poses[0].position.z = -0.30;
177 | collision_objects[0].primitive_poses[0].orientation.x = 5;
178 | // END_SUB_TUTORIAL
179 |
180 | collision_objects[0].operation = collision_objects[0].ADD;
181 |
182 |
183 |
184 | planning_scene_interface.applyCollisionObjects(collision_objects);
185 | }
186 |
187 | int main(int argc, char** argv)
188 | {
189 | ros::init(argc, argv, "aubo_arm_pick_place");
190 | ros::NodeHandle nh;
191 | //ros::NodeHandle m_nh;
192 |
193 | // ros::Subscriber sub = m_nh.subscribe("/detect_grasps/plot_grasps", 1, cloud_cb);
194 | //ros::spin();
195 | //ros::WallDuration(10.0).sleep();
196 |
197 | ros::AsyncSpinner spinner(1);
198 | spinner.start();
199 |
200 | ros::WallDuration(1.0).sleep();
201 | moveit::planning_interface::PlanningSceneInterface planning_scene_interface;
202 | moveit::planning_interface::MoveGroupInterface group("aubo_arm");
203 | group.setPlanningTime(60.0);
204 |
205 | //addCollisionObjects(planning_scene_interface);
206 |
207 | // Wait a bit for ROS things to initialize
208 | ros::WallDuration(1.0).sleep();
209 |
210 | pick(group);
211 |
212 | ros::WallDuration(1.0).sleep();
213 |
214 | //place(group);
215 |
216 | ros::waitForShutdown();
217 | return 0;
218 | }
219 |
220 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/src/pick_place.cpp:
--------------------------------------------------------------------------------
1 | /* Author: Ioan Sucan, Ridhwan Luthra*/
2 |
3 | // ROS
4 | #include
5 |
6 | // MoveIt
7 | #include
8 | #include
9 | #include
10 |
11 | // TF2
12 | #include
13 |
14 | /*
15 |
16 | void cloud_cb(const visualization_msgs::MarkerArrayConstPtr& grasp_pose_msg)
17 | {
18 | std::cout << "This is the grasp pose::::::::::::::::::::::::::::::::::::::::::: ";
19 | std::cout << grasp_pose_msg->pose.position.x << std::endl;
20 | }
21 | */
22 |
23 | void openGripper(trajectory_msgs::JointTrajectory& posture)
24 | {
25 | // BEGIN_SUB_TUTORIAL open_gripper
26 | /* Add both finger joints of panda robot. */
27 | posture.joint_names.resize(2);
28 | posture.joint_names[0] = "gripper_finger1_joint";
29 | posture.joint_names[1] = "gripper_finger2_joint";
30 |
31 | /* Set them as open, wide enough for the object to fit. */
32 | posture.points.resize(1);
33 | posture.points[0].positions.resize(2);
34 | posture.points[0].positions[0] = 0.04;
35 | posture.points[0].positions[1] = 0.04;
36 | posture.points[0].time_from_start = ros::Duration(0.5);
37 | // END_SUB_TUTORIAL
38 | }
39 |
40 | void closedGripper(trajectory_msgs::JointTrajectory& posture)
41 | {
42 | // BEGIN_SUB_TUTORIAL closed_gripper
43 | /* Add both finger joints of aubo robot. */
44 | posture.joint_names.resize(2);
45 | posture.joint_names[0] = "gripper_finger1_joint";
46 | posture.joint_names[1] = "gripper_finger2_joint";
47 |
48 | /* Set them as closed. */
49 | posture.points.resize(1);
50 | posture.points[0].positions.resize(2);
51 | posture.points[0].positions[0] = 0.00;
52 | posture.points[0].positions[1] = 0.00;
53 | posture.points[0].time_from_start = ros::Duration(0.5);
54 | // END_SUB_TUTORIAL
55 | }
56 |
57 | void pick(moveit::planning_interface::MoveGroupInterface& move_group)
58 | {
59 | // BEGIN_SUB_TUTORIAL pick1
60 | // Create a vector of grasps to be attempted, currently only creating single grasp.
61 | // This is essentially useful when using a grasp generator to generate and test multiple grasps.
62 | std::vector grasps;
63 | grasps.resize(1);
64 |
65 | // Setting grasp pose
66 | // ++++++++++++++++++++++
67 |
68 | grasps[0].grasp_pose.header.frame_id = "base_link";
69 | //tf2::Quaternion orientation;
70 | tf2::Quaternion orientation;
71 | orientation.setRPY(1.489, -0.059, -0.637);
72 |
73 | grasps[0].grasp_pose.pose.orientation = tf2::toMsg(orientation);
74 | grasps[0].grasp_pose.pose.position.x = -0.750;
75 | grasps[0].grasp_pose.pose.position.y = 0.041;
76 | grasps[0].grasp_pose.pose.position.z = 0.062;
77 |
78 |
79 | // Setting pre-grasp approach
80 | // ++++++++++++++++++++++++++
81 | /* Defined with respect to frame_id */
82 | grasps[0].pre_grasp_approach.direction.header.frame_id = "base_link";
83 | /* Direction is set as positive x axis */
84 | grasps[0].pre_grasp_approach.direction.vector.z = -1.0;
85 | grasps[0].pre_grasp_approach.direction.vector.y = -1.0;
86 | grasps[0].pre_grasp_approach.direction.vector.x = -1.0;
87 |
88 | //grasps[0].pre_grasp_approach.min_distance = 0.20;
89 | grasps[0].pre_grasp_approach.desired_distance = 0.10;
90 |
91 |
92 |
93 | // Set support surface as table1.
94 | move_group.setSupportSurfaceName("table1");
95 | // Call pick to pick up the object using the grasps given
96 | move_group.pick("object", grasps);
97 | // END_SUB_TUTORIAL
98 | }
99 |
100 | void place(moveit::planning_interface::MoveGroupInterface& group)
101 | {
102 | //
103 | // - Calling place function m rrently only creating single place location.
104 | std::vector place_location;
105 | place_location.resize(1);
106 |
107 | // Setting place location pose
108 | // +++++++++++++++++++++++++++
109 | place_location[0].place_pose.header.frame_id = "world";
110 | tf2::Quaternion orientation;
111 | orientation.setRPY(0, 0, M_PI / 2);
112 | place_location[0].place_pose.pose.orientation = tf2::toMsg(orientation);
113 |
114 | /* While placing it is the exact location of the center of the object. */
115 | place_location[0].place_pose.pose.position.x = 0;
116 | place_location[0].place_pose.pose.position.y = 0.5;
117 | place_location[0].place_pose.pose.position.z = 0.5;
118 |
119 | // Setting pre-place approach
120 | // ++++++++++++++++++++++++++
121 | /* Defined with respect to frame_id */
122 | place_location[0].pre_place_approach.direction.header.frame_id = "world";
123 | /* Direction is set as negative z axis */
124 | place_location[0].pre_place_approach.direction.vector.z = -1.0;
125 |
126 | place_location[0].pre_place_approach.min_distance = 0.095;
127 | place_location[0].pre_place_approach.desired_distance = 0.115;
128 |
129 | // Setting post-grasp retreat
130 | // ++++++++++++++++++++++++++
131 | /* Defined with respect to frame_id */
132 | place_location[0].post_place_retreat.direction.header.frame_id = "world";
133 | /* Direction is set as negative y axis */
134 | place_location[0].post_place_retreat.direction.vector.x = -1.0;
135 | place_location[0].post_place_retreat.min_distance = 0.1;
136 | place_location[0].post_place_retreat.desired_distance = 0.25;
137 |
138 | // Setting posture of eef after placing object
139 | // +++++++++++++++++++++++++++++++++++++++++++
140 | /* Similar to the pick case */
141 | openGripper(place_location[0].post_place_posture);
142 |
143 | // Set support surface as table2.
144 | group.setSupportSurfaceName("table2");
145 | // Call place to place the object using the place locations given.
146 | group.place("object", place_location);
147 | // END_SUB_TUTORIAL
148 | }
149 |
150 | void addCollisionObjects(moveit::planning_interface::PlanningSceneInterface& planning_scene_interface)
151 | {
152 | // BEGIN_SUB_TUTORIAL table1
153 | //
154 | // Creating Environment
155 | // ^^^^^^^^^^^^^^^^^^^^
156 | // Create vector to hold 3 collision objects.
157 | std::vector collision_objects;
158 | collision_objects.resize(1);
159 |
160 | // Add the first table where the cube will originally be kept.
161 | collision_objects[0].id = "table1";
162 | collision_objects[0].header.frame_id = "base_link";
163 |
164 | /* Define the primitive and its dimensions. */
165 | collision_objects[0].primitives.resize(1);
166 | collision_objects[0].primitives[0].type = collision_objects[0].primitives[0].BOX;
167 | collision_objects[0].primitives[0].dimensions.resize(3);
168 | collision_objects[0].primitives[0].dimensions[0] = 1.0;
169 | collision_objects[0].primitives[0].dimensions[1] = 1.0;
170 | collision_objects[0].primitives[0].dimensions[2] = 0.47;
171 |
172 | /* Define the pose of the table. */
173 | collision_objects[0].primitive_poses.resize(1);
174 | collision_objects[0].primitive_poses[0].position.x = -1.0;
175 | collision_objects[0].primitive_poses[0].position.y = 0.0;
176 | collision_objects[0].primitive_poses[0].position.z = -0.30;
177 | collision_objects[0].primitive_poses[0].orientation.x = 5;
178 | // END_SUB_TUTORIAL
179 |
180 | collision_objects[0].operation = collision_objects[0].ADD;
181 |
182 |
183 |
184 | planning_scene_interface.applyCollisionObjects(collision_objects);
185 | }
186 |
187 | int main(int argc, char** argv)
188 | {
189 | ros::init(argc, argv, "aubo_arm_pick_place");
190 | ros::NodeHandle nh;
191 | //ros::NodeHandle m_nh;
192 |
193 | // ros::Subscriber sub = m_nh.subscribe("/detect_grasps/plot_grasps", 1, cloud_cb);
194 | //ros::spin();
195 | //ros::WallDuration(10.0).sleep();
196 |
197 | ros::AsyncSpinner spinner(1);
198 | spinner.start();
199 |
200 | ros::WallDuration(1.0).sleep();
201 | moveit::planning_interface::PlanningSceneInterface planning_scene_interface;
202 | moveit::planning_interface::MoveGroupInterface group("aubo_arm");
203 | group.setPlanningTime(60.0);
204 |
205 | //addCollisionObjects(planning_scene_interface);
206 |
207 | // Wait a bit for ROS things to initialize
208 | ros::WallDuration(1.0).sleep();
209 |
210 | pick(group);
211 |
212 | ros::WallDuration(1.0).sleep();
213 |
214 | //place(group);
215 |
216 | ros::waitForShutdown();
217 | return 0;
218 | }
219 |
220 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/ompl_planning.yaml:
--------------------------------------------------------------------------------
1 | planner_configs:
2 | SBL:
3 | type: geometric::SBL
4 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
5 | EST:
6 | type: geometric::EST
7 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0 setup()
8 | goal_bias: 0.05 # When close to goal select goal, with this probability. default: 0.05
9 | LBKPIECE:
10 | type: geometric::LBKPIECE
11 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
12 | border_fraction: 0.9 # Fraction of time focused on boarder default: 0.9
13 | min_valid_path_fraction: 0.5 # Accept partially valid moves above fraction. default: 0.5
14 | BKPIECE:
15 | type: geometric::BKPIECE
16 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
17 | border_fraction: 0.9 # Fraction of time focused on boarder default: 0.9
18 | failed_expansion_score_factor: 0.5 # When extending motion fails, scale score by factor. default: 0.5
19 | min_valid_path_fraction: 0.5 # Accept partially valid moves above fraction. default: 0.5
20 | KPIECE:
21 | type: geometric::KPIECE
22 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
23 | goal_bias: 0.05 # When close to goal select goal, with this probability. default: 0.05
24 | border_fraction: 0.9 # Fraction of time focused on boarder default: 0.9 (0.0,1.]
25 | failed_expansion_score_factor: 0.5 # When extending motion fails, scale score by factor. default: 0.5
26 | min_valid_path_fraction: 0.5 # Accept partially valid moves above fraction. default: 0.5
27 | RRT:
28 | type: geometric::RRT
29 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
30 | goal_bias: 0.05 # When close to goal select goal, with this probability? default: 0.05
31 | RRTConnect:
32 | type: geometric::RRTConnect
33 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
34 | RRTstar:
35 | type: geometric::RRTstar
36 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
37 | goal_bias: 0.05 # When close to goal select goal, with this probability? default: 0.05
38 | delay_collision_checking: 1 # Stop collision checking as soon as C-free parent found. default 1
39 | TRRT:
40 | type: geometric::TRRT
41 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
42 | goal_bias: 0.05 # When close to goal select goal, with this probability? default: 0.05
43 | max_states_failed: 10 # when to start increasing temp. default: 10
44 | temp_change_factor: 2.0 # how much to increase or decrease temp. default: 2.0
45 | min_temperature: 10e-10 # lower limit of temp change. default: 10e-10
46 | init_temperature: 10e-6 # initial temperature. default: 10e-6
47 | frountier_threshold: 0.0 # dist new state to nearest neighbor to disqualify as frontier. default: 0.0 set in setup()
48 | frountierNodeRatio: 0.1 # 1/10, or 1 nonfrontier for every 10 frontier. default: 0.1
49 | k_constant: 0.0 # value used to normalize expresssion. default: 0.0 set in setup()
50 | PRM:
51 | type: geometric::PRM
52 | max_nearest_neighbors: 10 # use k nearest neighbors. default: 10
53 | PRMstar:
54 | type: geometric::PRMstar
55 | FMT:
56 | type: geometric::FMT
57 | num_samples: 1000 # number of states that the planner should sample. default: 1000
58 | radius_multiplier: 1.1 # multiplier used for the nearest neighbors search radius. default: 1.1
59 | nearest_k: 1 # use Knearest strategy. default: 1
60 | cache_cc: 1 # use collision checking cache. default: 1
61 | heuristics: 0 # activate cost to go heuristics. default: 0
62 | extended_fmt: 1 # activate the extended FMT*: adding new samples if planner does not finish successfully. default: 1
63 | BFMT:
64 | type: geometric::BFMT
65 | num_samples: 1000 # number of states that the planner should sample. default: 1000
66 | radius_multiplier: 1.0 # multiplier used for the nearest neighbors search radius. default: 1.0
67 | nearest_k: 1 # use the Knearest strategy. default: 1
68 | balanced: 0 # exploration strategy: balanced true expands one tree every iteration. False will select the tree with lowest maximum cost to go. default: 1
69 | optimality: 1 # termination strategy: optimality true finishes when the best possible path is found. Otherwise, the algorithm will finish when the first feasible path is found. default: 1
70 | heuristics: 1 # activates cost to go heuristics. default: 1
71 | cache_cc: 1 # use the collision checking cache. default: 1
72 | extended_fmt: 1 # Activates the extended FMT*: adding new samples if planner does not finish successfully. default: 1
73 | PDST:
74 | type: geometric::PDST
75 | STRIDE:
76 | type: geometric::STRIDE
77 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
78 | goal_bias: 0.05 # When close to goal select goal, with this probability. default: 0.05
79 | use_projected_distance: 0 # whether nearest neighbors are computed based on distances in a projection of the state rather distances in the state space itself. default: 0
80 | degree: 16 # desired degree of a node in the Geometric Near-neightbor Access Tree (GNAT). default: 16
81 | max_degree: 18 # max degree of a node in the GNAT. default: 12
82 | min_degree: 12 # min degree of a node in the GNAT. default: 12
83 | max_pts_per_leaf: 6 # max points per leaf in the GNAT. default: 6
84 | estimated_dimension: 0.0 # estimated dimension of the free space. default: 0.0
85 | min_valid_path_fraction: 0.2 # Accept partially valid moves above fraction. default: 0.2
86 | BiTRRT:
87 | type: geometric::BiTRRT
88 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
89 | temp_change_factor: 0.1 # how much to increase or decrease temp. default: 0.1
90 | init_temperature: 100 # initial temperature. default: 100
91 | frountier_threshold: 0.0 # dist new state to nearest neighbor to disqualify as frontier. default: 0.0 set in setup()
92 | frountier_node_ratio: 0.1 # 1/10, or 1 nonfrontier for every 10 frontier. default: 0.1
93 | cost_threshold: 1e300 # the cost threshold. Any motion cost that is not better will not be expanded. default: inf
94 | LBTRRT:
95 | type: geometric::LBTRRT
96 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
97 | goal_bias: 0.05 # When close to goal select goal, with this probability. default: 0.05
98 | epsilon: 0.4 # optimality approximation factor. default: 0.4
99 | BiEST:
100 | type: geometric::BiEST
101 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
102 | ProjEST:
103 | type: geometric::ProjEST
104 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
105 | goal_bias: 0.05 # When close to goal select goal, with this probability. default: 0.05
106 | LazyPRM:
107 | type: geometric::LazyPRM
108 | range: 0.0 # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
109 | LazyPRMstar:
110 | type: geometric::LazyPRMstar
111 | SPARS:
112 | type: geometric::SPARS
113 | stretch_factor: 3.0 # roadmap spanner stretch factor. multiplicative upper bound on path quality. It does not make sense to make this parameter more than 3. default: 3.0
114 | sparse_delta_fraction: 0.25 # delta fraction for connection distance. This value represents the visibility range of sparse samples. default: 0.25
115 | dense_delta_fraction: 0.001 # delta fraction for interface detection. default: 0.001
116 | max_failures: 1000 # maximum consecutive failure limit. default: 1000
117 | SPARStwo:
118 | type: geometric::SPARStwo
119 | stretch_factor: 3.0 # roadmap spanner stretch factor. multiplicative upper bound on path quality. It does not make sense to make this parameter more than 3. default: 3.0
120 | sparse_delta_fraction: 0.25 # delta fraction for connection distance. This value represents the visibility range of sparse samples. default: 0.25
121 | dense_delta_fraction: 0.001 # delta fraction for interface detection. default: 0.001
122 | max_failures: 5000 # maximum consecutive failure limit. default: 5000
123 | aubo_arm:
124 | default_planner_config: RRTConnect
125 | planner_configs:
126 | - SBL
127 | - EST
128 | - LBKPIECE
129 | - BKPIECE
130 | - KPIECE
131 | - RRT
132 | - RRTConnect
133 | - RRTstar
134 | - TRRT
135 | - PRM
136 | - PRMstar
137 | - FMT
138 | - BFMT
139 | - PDST
140 | - STRIDE
141 | - BiTRRT
142 | - LBTRRT
143 | - BiEST
144 | - ProjEST
145 | - LazyPRM
146 | - LazyPRMstar
147 | - SPARS
148 | - SPARStwo
149 | projection_evaluator: joints(shoulder_joint,upperArm_joint)
150 | longest_valid_segment_fraction: 0.005
151 | gripper:
152 | default_planner_config: None
153 | planner_configs:
154 | - SBL
155 | - EST
156 | - LBKPIECE
157 | - BKPIECE
158 | - KPIECE
159 | - RRT
160 | - RRTConnect
161 | - RRTstar
162 | - TRRT
163 | - PRM
164 | - PRMstar
165 | - FMT
166 | - BFMT
167 | - PDST
168 | - STRIDE
169 | - BiTRRT
170 | - LBTRRT
171 | - BiEST
172 | - ProjEST
173 | - LazyPRM
174 | - LazyPRMstar
175 | - SPARS
176 | - SPARStwo
177 | projection_evaluator: joints(gripper_finger1_joint,gripper_finger2_joint)
178 | longest_valid_segment_fraction: 0.005
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/src/pass_through.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | // PCL specific includes
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include
16 | #include
17 | #include
18 | #include
19 | #include
20 | #include
21 | #include
22 | #include
23 | #include
24 | #include
25 | #include
26 | #include
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 |
34 | #include
35 | //using namespace message_filters;
36 |
37 | ros::Publisher pub;
38 |
39 | int minX;
40 | int minY;
41 | int maxX;
42 | int maxY;
43 | int YoloCenterPointX;
44 | int YoloCenterPointY;
45 | int endCallback2 = 0;
46 | int endCallback1 = 0;
47 |
48 |
49 | void cloud_cb_2(const darknet_ros_msgs::BoundingBoxesConstPtr& msg)
50 | {
51 |
52 | if(endCallback2 == 0){
53 | std::string my_choice = "";
54 | std::cout << "Choose what object you want to grasp: ";
55 | std::cin >> my_choice;
56 | std::cout << std::endl;
57 |
58 |
59 | for(int i = 0; i < 5; i++){
60 |
61 | if(msg->bounding_boxes[i].Class == my_choice)
62 | {
63 |
64 | minX = msg->bounding_boxes[i].xmin;
65 | minY = msg->bounding_boxes[i].ymin;
66 | maxX = msg->bounding_boxes[i].xmax;
67 | maxY = msg->bounding_boxes[i].ymax;
68 |
69 | std::cout << "bounding box_boxes: " << msg->bounding_boxes[i].Class << std::endl;
70 | std::cout << "bounding box minX" << minX << std::endl;
71 | std::cout << "bounding box minY" << minY << std::endl;
72 | std::cout << "bounding box maxX" << maxX << std::endl;
73 | std::cout << "bounding box maxY" << maxY << std::endl;
74 |
75 | YoloCenterPointX = (maxX + minX)/2;
76 | YoloCenterPointY = (maxY + minY)/2;
77 |
78 |
79 | }
80 |
81 | }
82 | endCallback2 = 1;
83 | }
84 |
85 |
86 | }
87 |
88 | void cloud_cb (const sensor_msgs::PointCloud2ConstPtr& cloud_msg){
89 | if(endCallback1 == 0){
90 |
91 | // Container for original & filtered data
92 | pcl::PCLPointCloud2* cloud = new pcl::PCLPointCloud2;
93 | pcl::PCLPointCloud2ConstPtr cloudPtr(cloud);
94 | pcl::PCLPointCloud2* cloud_filtered = new pcl::PCLPointCloud2;
95 | pcl::PCLPointCloud2Ptr cloudFilteredPtr (cloud_filtered);
96 |
97 |
98 | // Convert to PCL data type
99 | pcl_conversions::toPCL(*cloud_msg, *cloud);
100 |
101 |
102 | // Perform voxel grid downsampling filtering
103 | pcl::VoxelGrid sor;
104 | sor.setInputCloud (cloudPtr);
105 | sor.setLeafSize (0.01f, 0.01f, 0.01f);
106 | sor.filter (*cloudFilteredPtr);
107 |
108 |
109 | pcl::PointCloud *xyz_cloud = new pcl::PointCloud;
110 | pcl::PointCloud::Ptr xyzCloudPtr (xyz_cloud); // need a boost shared pointer for pcl function inputs
111 |
112 | // convert the pcl::PointCloud2 tpye to pcl::PointCloud
113 | pcl::fromPCLPointCloud2(*cloudFilteredPtr, *xyzCloudPtr);
114 |
115 |
116 | //perform passthrough filtering to remove table leg
117 |
118 | // create a pcl object to hold the passthrough filtered results
119 | pcl::PointCloud *xyz_cloud_filtered = new pcl::PointCloud;
120 | pcl::PointCloud::Ptr xyzCloudPtrFiltered (xyz_cloud_filtered);
121 |
122 | // Create the filtering object
123 | pcl::PassThrough pass;
124 | pass.setInputCloud (xyzCloudPtr);
125 | pass.setFilterFieldName ("z");
126 | pass.setFilterLimits (.78, 1.1);
127 | //pass.setFilterLimitsNegative (true);
128 | pass.filter (*xyzCloudPtrFiltered);
129 |
130 |
131 | // create a pcl object to hold the ransac filtered results
132 | pcl::PointCloud *xyz_cloud_ransac_filtered = new pcl::PointCloud;
133 | pcl::PointCloud::Ptr xyzCloudPtrRansacFiltered (xyz_cloud_ransac_filtered);
134 |
135 |
136 | // perform ransac planar filtration to remove table top
137 | pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients);
138 | pcl::PointIndices::Ptr inliers (new pcl::PointIndices);
139 | // Create the segmentation object
140 | pcl::SACSegmentation seg1;
141 | // Optional
142 | seg1.setOptimizeCoefficients (true);
143 | // Mandatory
144 | seg1.setModelType (pcl::SACMODEL_PLANE);
145 | seg1.setMethodType (pcl::SAC_RANSAC);
146 | seg1.setDistanceThreshold (0.005);
147 |
148 | seg1.setInputCloud (xyzCloudPtrFiltered);
149 | seg1.segment (*inliers, *coefficients);
150 |
151 |
152 | // Create the filtering object
153 | pcl::ExtractIndices extract;
154 |
155 | //extract.setInputCloud (xyzCloudPtrFiltered);
156 | extract.setInputCloud (xyzCloudPtrFiltered);
157 | extract.setIndices (inliers);
158 | extract.setNegative (true);
159 | extract.filter (*xyzCloudPtrRansacFiltered);
160 |
161 |
162 |
163 | // perform euclidean cluster segmentation to seporate individual objects
164 |
165 | // Create the KdTree object for the search method of the extraction
166 | pcl::search::KdTree::Ptr tree (new pcl::search::KdTree);
167 | tree->setInputCloud (xyzCloudPtrRansacFiltered);
168 |
169 | // create the extraction object for the clusters
170 | std::vector cluster_indices;
171 | pcl::EuclideanClusterExtraction ec;
172 | // specify euclidean cluster parameters
173 | ec.setClusterTolerance (0.02); // 2cm
174 | ec.setMinClusterSize (100);
175 | ec.setMaxClusterSize (25000);
176 | ec.setSearchMethod (tree);
177 | ec.setInputCloud (xyzCloudPtrRansacFiltered);
178 | // exctract the indices pertaining to each cluster and store in a vector of pcl::PointIndices
179 | ec.extract (cluster_indices);
180 | sensor_msgs::PointCloud2 output;
181 |
182 |
183 | pcl::PCLPointCloud2 outputPCL;
184 |
185 | int EuclideanDistance, distance_x, distance_y;
186 | int threshold = 30;
187 |
188 | pcl::PointCloud::Ptr cloud_cluster (new pcl::PointCloud);
189 | for (std::vector::const_iterator it = cluster_indices.begin (); it != cluster_indices.end (); ++it)
190 | {
191 | Eigen::Vector4f centroid3D;
192 | Eigen::Vector2i centroid2D;
193 | Eigen::Matrix3f camera_matrix;
194 | camera_matrix << 547.471175, 0.000000, 313.045026, 0.000000, 547.590335, 237.016225, 0.000000, 0.000000, 1.000000;
195 |
196 | for (std::vector::const_iterator pit = it->indices.begin (); pit != it->indices.end (); ++pit)
197 | {
198 | cloud_cluster->points.push_back (xyzCloudPtrRansacFiltered->points[*pit]);
199 | }
200 | cloud_cluster->width = cloud_cluster->points.size ();
201 | cloud_cluster->height = 1;
202 | cloud_cluster->is_dense = true;
203 | //std::cout << "PointCloud representing the Cluster: " << cloud_cluster->points.size () << " data points." << std::endl;
204 | pcl::compute3DCentroid(*xyzCloudPtrRansacFiltered, *it, centroid3D);
205 |
206 | Eigen::Vector2i pixel_position;
207 |
208 | pixel_position(0) = (int)(centroid3D(0)*camera_matrix(0,0)/centroid3D(2) + camera_matrix(0,2));
209 | pixel_position(1) = (int)(centroid3D(1)*camera_matrix(1,1)/centroid3D(2) + camera_matrix(1,2));
210 |
211 | centroid2D = pixel_position;
212 |
213 |
214 | distance_x = abs(YoloCenterPointX - centroid2D(0));
215 | std::cout << "YOlo centerx " << YoloCenterPointX << std::endl;
216 | std::cout << "controidx " << centroid2D(0) << std::endl;
217 | std::cout << "The distance in x axis: " << distance_x << std::endl;
218 | distance_y = abs(YoloCenterPointY - centroid2D(1));
219 | std::cout << "The distance in y axis: " << distance_y << std::endl;
220 | EuclideanDistance = sqrt(pow(distance_x, 2) + pow(distance_y, 2));
221 | std::cout << "The aggregated distance: " << EuclideanDistance << std::endl;
222 |
223 | if(EuclideanDistance < threshold){
224 | pcl::toPCLPointCloud2 (*cloud_cluster, outputPCL);
225 | pcl_conversions::fromPCL(outputPCL, output);
226 | output.header.frame_id = "/camera_rgb_optical_frame";
227 |
228 | ros::Rate loop_rate(1);
229 | while(ros::ok()){
230 | pub.publish(output);
231 | ros::spinOnce();
232 | loop_rate.sleep();
233 | }
234 |
235 | }
236 |
237 | //centroid2D = point3Dto_pixel(centroid3D, camera_matrix);
238 |
239 |
240 | }
241 | endCallback1++;
242 |
243 | }
244 | }
245 |
246 |
247 |
248 |
249 | int main (int argc, char** argv){
250 | // Initialize ROS
251 | ros::init (argc, argv, "my_pcl_tutorial");
252 | ros::NodeHandle nh;
253 | ros::NodeHandle m_nh;
254 |
255 | //subscribe to the pointcloud
256 | ros::Subscriber sub = nh.subscribe ("/camera_remote/depth_registered/points", 1, cloud_cb);
257 | //subscribe to the bouding boxes from darknet_ros
258 | ros::Subscriber object_detection = m_nh.subscribe("/darknet_ros/bounding_boxes", 1, cloud_cb_2);
259 |
260 | pub = nh.advertise ("output_filtered_cloud", 10);
261 |
262 | ros::spin();
263 |
264 |
265 |
266 |
267 | }
268 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/config/aubo_i5.srdf:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
--------------------------------------------------------------------------------
/passthrough.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | // PCL specific includes
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include
16 | #include
17 | #include
18 | #include
19 | #include
20 | #include
21 | #include
22 | #include
23 | #include
24 | #include
25 | #include
26 | #include
27 | #include
28 | #include
29 | #include
30 | #include
31 | #include
32 | #include
33 |
34 | #include
35 | //using namespace message_filters;
36 |
37 | ros::Publisher pub;
38 | //min and max pixel points from the bounding boxes created by yolo.
39 | int minX;
40 | int minY;
41 | int maxX;
42 | int maxY;
43 | int YoloCenterPointX;
44 | int YoloCenterPointY;
45 | //These end the callback functions so that they don't run infinite many times.
46 | int endCallback2 = 0;
47 | int endCallback1 = 0;
48 |
49 | //callback function to get the bounding boxe coordinates
50 | void cloud_cb_2(const darknet_ros_msgs::BoundingBoxesConstPtr& msg)
51 | {
52 |
53 | if(endCallback2 == 0){
54 | //gets user input to choose what object they want
55 | std::string my_choice = "";
56 | std::cout << "Choose what object you want to grasp: ";
57 | std::cin >> my_choice;
58 | std::cout << std::endl;
59 |
60 |
61 | for(int i = 0; i < 5; i++){
62 |
63 | if(msg->bounding_boxes[i].Class == my_choice)
64 | {
65 |
66 |
67 | gets the minx and y and max x and y from the darknet_ros topic
68 | minX = msg->bounding_boxes[i].xmin;
69 | minY = msg->bounding_boxes[i].ymin;
70 | maxX = msg->bounding_boxes[i].xmax;
71 | maxY = msg->bounding_boxes[i].ymax;
72 |
73 | std::cout << "bounding box_boxes: " << msg->bounding_boxes[i].Class << std::endl;
74 | std::cout << "bounding box minX" << minX << std::endl;
75 | std::cout << "bounding box minY" << minY << std::endl;
76 | std::cout << "bounding box maxX" << maxX << std::endl;
77 | std::cout << "bounding box maxY" << maxY << std::endl;
78 |
79 | //Finds the center point of x and y.
80 | YoloCenterPointX = (maxX + minX)/2;
81 | YoloCenterPointY = (maxY + minY)/2;
82 |
83 |
84 | }
85 |
86 | }
87 | endCallback2 = 1;
88 | }
89 |
90 |
91 | }
92 |
93 | //callback function to get the pointcloud2 from the rostopic of /camera_remote/depth_registered/points
94 | void cloud_cb (const sensor_msgs::PointCloud2ConstPtr& cloud_msg){
95 | if(endCallback1 == 0){
96 |
97 | // Container for original & filtered data
98 | pcl::PCLPointCloud2* cloud = new pcl::PCLPointCloud2;
99 | pcl::PCLPointCloud2ConstPtr cloudPtr(cloud);
100 | pcl::PCLPointCloud2* cloud_filtered = new pcl::PCLPointCloud2;
101 | pcl::PCLPointCloud2Ptr cloudFilteredPtr (cloud_filtered);
102 |
103 |
104 | // Convert to PCL data type
105 | pcl_conversions::toPCL(*cloud_msg, *cloud);
106 |
107 |
108 | // Perform voxel grid downsampling filtering
109 | pcl::VoxelGrid sor;
110 | sor.setInputCloud (cloudPtr);
111 | sor.setLeafSize (0.01f, 0.01f, 0.01f);
112 | sor.filter (*cloudFilteredPtr);
113 |
114 |
115 | pcl::PointCloud *xyz_cloud = new pcl::PointCloud;
116 | pcl::PointCloud::Ptr xyzCloudPtr (xyz_cloud); // need a boost shared pointer for pcl function inputs
117 |
118 | // convert the pcl::PointCloud2 tpye to pcl::PointCloud
119 | pcl::fromPCLPointCloud2(*cloudFilteredPtr, *xyzCloudPtr);
120 |
121 |
122 | //perform passthrough filtering to remove table leg
123 |
124 | // create a pcl object to hold the passthrough filtered results
125 | pcl::PointCloud *xyz_cloud_filtered = new pcl::PointCloud;
126 | pcl::PointCloud::Ptr xyzCloudPtrFiltered (xyz_cloud_filtered);
127 |
128 | // Create the filtering object
129 | pcl::PassThrough pass;
130 | pass.setInputCloud (xyzCloudPtr);
131 | pass.setFilterFieldName ("z");
132 | pass.setFilterLimits (.78, 1.1);
133 | //pass.setFilterLimitsNegative (true);
134 | pass.filter (*xyzCloudPtrFiltered);
135 |
136 |
137 | // create a pcl object to hold the ransac filtered results
138 | pcl::PointCloud *xyz_cloud_ransac_filtered = new pcl::PointCloud;
139 | pcl::PointCloud::Ptr xyzCloudPtrRansacFiltered (xyz_cloud_ransac_filtered);
140 |
141 |
142 | // perform ransac planar filtration to remove table top
143 | pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients);
144 | pcl::PointIndices::Ptr inliers (new pcl::PointIndices);
145 | // Create the segmentation object
146 | pcl::SACSegmentation seg1;
147 | // Optional
148 | seg1.setOptimizeCoefficients (true);
149 | // Mandatory
150 | seg1.setModelType (pcl::SACMODEL_PLANE);
151 | seg1.setMethodType (pcl::SAC_RANSAC);
152 | seg1.setDistanceThreshold (0.005);
153 |
154 | seg1.setInputCloud (xyzCloudPtrFiltered);
155 | seg1.segment (*inliers, *coefficients);
156 |
157 |
158 | // Create the filtering object
159 | pcl::ExtractIndices extract;
160 |
161 | //extract.setInputCloud (xyzCloudPtrFiltered);
162 | extract.setInputCloud (xyzCloudPtrFiltered);
163 | extract.setIndices (inliers);
164 | extract.setNegative (true);
165 | extract.filter (*xyzCloudPtrRansacFiltered);
166 |
167 |
168 |
169 | // perform euclidean cluster segmentation to seporate individual objects
170 |
171 | // Create the KdTree object for the search method of the extraction
172 | pcl::search::KdTree::Ptr tree (new pcl::search::KdTree);
173 | tree->setInputCloud (xyzCloudPtrRansacFiltered);
174 |
175 | // create the extraction object for the clusters
176 | std::vector cluster_indices;
177 | pcl::EuclideanClusterExtraction ec;
178 | // specify euclidean cluster parameters
179 | ec.setClusterTolerance (0.02); // 2cm
180 | ec.setMinClusterSize (100);
181 | ec.setMaxClusterSize (25000);
182 | ec.setSearchMethod (tree);
183 | ec.setInputCloud (xyzCloudPtrRansacFiltered);
184 | // exctract the indices pertaining to each cluster and store in a vector of pcl::PointIndices
185 | ec.extract (cluster_indices);
186 | sensor_msgs::PointCloud2 output;
187 |
188 |
189 | pcl::PCLPointCloud2 outputPCL;
190 |
191 | int EuclideanDistance, distance_x, distance_y;
192 | int threshold = 30;
193 |
194 | pcl::PointCloud::Ptr cloud_cluster (new pcl::PointCloud);
195 | for (std::vector::const_iterator it = cluster_indices.begin (); it != cluster_indices.end (); ++it)
196 | {
197 | Eigen::Vector4f centroid3D;
198 | Eigen::Vector2i centroid2D;
199 | Eigen::Matrix3f camera_matrix;
200 | //camera matrix that was created by the intrisic camera calibration step. This will be needed to translate from 3D centroid to 2D centroid
201 | camera_matrix << 547.471175, 0.000000, 313.045026, 0.000000, 547.590335, 237.016225, 0.000000, 0.000000, 1.000000;
202 |
203 | //puts the clusters into cloud_cluster
204 | for (std::vector::const_iterator pit = it->indices.begin (); pit != it->indices.end (); ++pit)
205 | {
206 | cloud_cluster->points.push_back (xyzCloudPtrRansacFiltered->points[*pit]);
207 | }
208 | cloud_cluster->width = cloud_cluster->points.size ();
209 | cloud_cluster->height = 1;
210 | cloud_cluster->is_dense = true;
211 | //std::cout << "PointCloud representing the Cluster: " << cloud_cluster->points.size () << " data points." << std::endl;
212 | //a pcl funciton to comput the 3D centroid and save it as centroid3D
213 | pcl::compute3DCentroid(*xyzCloudPtrRansacFiltered, *it, centroid3D);
214 |
215 | Eigen::Vector2i pixel_position;
216 | //next two lines convert centroid3D to pixel cordinates, and saves into variable pixel_position
217 | pixel_position(0) = (int)(centroid3D(0)*camera_matrix(0,0)/centroid3D(2) + camera_matrix(0,2));
218 | pixel_position(1) = (int)(centroid3D(1)*camera_matrix(1,1)/centroid3D(2) + camera_matrix(1,2));
219 |
220 | centroid2D = pixel_position;
221 |
222 | //This is the calculation to find the Euclidean Distance between centroids and yolo bounding box center points
223 | distance_x = abs(YoloCenterPointX - centroid2D(0));
224 | std::cout << "YOlo centerx " << YoloCenterPointX << std::endl;
225 | std::cout << "controidx " << centroid2D(0) << std::endl;
226 | std::cout << "The distance in x axis: " << distance_x << std::endl;
227 | distance_y = abs(YoloCenterPointY - centroid2D(1));
228 | std::cout << "The distance in y axis: " << distance_y << std::endl;
229 | EuclideanDistance = sqrt(pow(distance_x, 2) + pow(distance_y, 2));
230 | std::cout << "The aggregated distance: " << EuclideanDistance << std::endl;
231 | //if object euclidean distance is less then threshhold the cluster will be published as a rostopic that we can now visualize in rviz.
232 | if(EuclideanDistance < threshold){
233 | pcl::toPCLPointCloud2 (*cloud_cluster, outputPCL);
234 | pcl_conversions::fromPCL(outputPCL, output);
235 | output.header.frame_id = "/camera_rgb_optical_frame";
236 |
237 | ros::Rate loop_rate(1);
238 | while(ros::ok()){
239 | pub.publish(output);
240 | ros::spinOnce();
241 | loop_rate.sleep();
242 | }
243 |
244 | }
245 |
246 | //centroid2D = point3Dto_pixel(centroid3D, camera_matrix);
247 |
248 |
249 | }
250 | //end of call back function
251 | endCallback1++;
252 |
253 | }
254 | }
255 |
256 |
257 |
258 |
259 | int main (int argc, char** argv){
260 | // Initialize ROS
261 | ros::init (argc, argv, "my_pcl_tutorial");
262 | ros::NodeHandle nh;
263 | ros::NodeHandle m_nh;
264 |
265 | //subscribe to the pointcloud
266 | ros::Subscriber sub = nh.subscribe ("/camera_remote/depth_registered/points", 1, cloud_cb);
267 | //subscribe to the bouding boxes from darknet_ros
268 | ros::Subscriber object_detection = m_nh.subscribe("/darknet_ros/bounding_boxes", 1, cloud_cb_2);
269 | //publishes the filterd point cloud clusterd of the selected object and publish is as rostopic /output_filtered_cloud.
270 | pub = nh.advertise ("output_filtered_cloud", 10);
271 |
272 | ros::spin();
273 |
274 |
275 |
276 |
277 | }
278 |
--------------------------------------------------------------------------------
/my_aubo_i5_robot_config/launch/moveit.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 84
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /MotionPlanning1
9 | - /TF1
10 | - /TF1/Frames1
11 | Splitter Ratio: 0.742560029
12 | Tree Height: 403
13 | - Class: rviz/Help
14 | Name: Help
15 | - Class: rviz/Views
16 | Expanded:
17 | - /Current View1
18 | Name: Views
19 | Splitter Ratio: 0.5
20 | Toolbars:
21 | toolButtonStyle: 2
22 | Visualization Manager:
23 | Class: ""
24 | Displays:
25 | - Alpha: 0.5
26 | Cell Size: 1
27 | Class: rviz/Grid
28 | Color: 160; 160; 164
29 | Enabled: true
30 | Line Style:
31 | Line Width: 0.0299999993
32 | Value: Lines
33 | Name: Grid
34 | Normal Cell Count: 0
35 | Offset:
36 | X: 0
37 | Y: 0
38 | Z: 0
39 | Plane: XY
40 | Plane Cell Count: 10
41 | Reference Frame:
42 | Value: true
43 | - Acceleration_Scaling_Factor: 1
44 | Class: moveit_rviz_plugin/MotionPlanning
45 | Enabled: true
46 | Move Group Namespace: ""
47 | MoveIt_Allow_Approximate_IK: false
48 | MoveIt_Allow_External_Program: false
49 | MoveIt_Allow_Replanning: false
50 | MoveIt_Allow_Sensor_Positioning: false
51 | MoveIt_Goal_Tolerance: 0
52 | MoveIt_Planning_Attempts: 10
53 | MoveIt_Planning_Time: 5
54 | MoveIt_Use_Constraint_Aware_IK: true
55 | MoveIt_Warehouse_Host: 127.0.0.1
56 | MoveIt_Warehouse_Port: 33829
57 | MoveIt_Workspace:
58 | Center:
59 | X: 0
60 | Y: 0
61 | Z: 0
62 | Size:
63 | X: 2
64 | Y: 2
65 | Z: 2
66 | Name: MotionPlanning
67 | Planned Path:
68 | Color Enabled: false
69 | Interrupt Display: false
70 | Links:
71 | All Links Enabled: true
72 | Expand Joint Details: false
73 | Expand Link Details: false
74 | Expand Tree: false
75 | Link Tree Style: Links in Alphabetic Order
76 | base_link:
77 | Alpha: 1
78 | Show Axes: false
79 | Show Trail: false
80 | Value: true
81 | foreArm_Link:
82 | Alpha: 1
83 | Show Axes: false
84 | Show Trail: false
85 | Value: true
86 | gripper_base_link:
87 | Alpha: 1
88 | Show Axes: false
89 | Show Trail: false
90 | Value: true
91 | gripper_finger1_finger_link:
92 | Alpha: 1
93 | Show Axes: false
94 | Show Trail: false
95 | Value: true
96 | gripper_finger1_finger_tip_link:
97 | Alpha: 1
98 | Show Axes: false
99 | Show Trail: false
100 | Value: true
101 | gripper_finger1_inner_knuckle_link:
102 | Alpha: 1
103 | Show Axes: false
104 | Show Trail: false
105 | Value: true
106 | gripper_finger1_knuckle_link:
107 | Alpha: 1
108 | Show Axes: false
109 | Show Trail: false
110 | Value: true
111 | gripper_finger2_finger_link:
112 | Alpha: 1
113 | Show Axes: false
114 | Show Trail: false
115 | Value: true
116 | gripper_finger2_finger_tip_link:
117 | Alpha: 1
118 | Show Axes: false
119 | Show Trail: false
120 | Value: true
121 | gripper_finger2_inner_knuckle_link:
122 | Alpha: 1
123 | Show Axes: false
124 | Show Trail: false
125 | Value: true
126 | gripper_finger2_knuckle_link:
127 | Alpha: 1
128 | Show Axes: false
129 | Show Trail: false
130 | Value: true
131 | pedestal_Link:
132 | Alpha: 1
133 | Show Axes: false
134 | Show Trail: false
135 | Value: true
136 | shoulder_Link:
137 | Alpha: 1
138 | Show Axes: false
139 | Show Trail: false
140 | Value: true
141 | upperArm_Link:
142 | Alpha: 1
143 | Show Axes: false
144 | Show Trail: false
145 | Value: true
146 | world:
147 | Alpha: 1
148 | Show Axes: false
149 | Show Trail: false
150 | wrist1_Link:
151 | Alpha: 1
152 | Show Axes: false
153 | Show Trail: false
154 | Value: true
155 | wrist2_Link:
156 | Alpha: 1
157 | Show Axes: false
158 | Show Trail: false
159 | Value: true
160 | wrist3_Link:
161 | Alpha: 1
162 | Show Axes: false
163 | Show Trail: false
164 | Value: true
165 | Loop Animation: true
166 | Robot Alpha: 0.5
167 | Robot Color: 150; 50; 150
168 | Show Robot Collision: false
169 | Show Robot Visual: true
170 | Show Trail: false
171 | State Display Time: 0.05 s
172 | Trail Step Size: 1
173 | Trajectory Topic: move_group/display_planned_path
174 | Planning Metrics:
175 | Payload: 1
176 | Show Joint Torques: false
177 | Show Manipulability: false
178 | Show Manipulability Index: false
179 | Show Weight Limit: false
180 | TextHeight: 0.0799999982
181 | Planning Request:
182 | Colliding Link Color: 255; 0; 0
183 | Goal State Alpha: 1
184 | Goal State Color: 250; 128; 0
185 | Interactive Marker Size: 0
186 | Joint Violation Color: 255; 0; 255
187 | Planning Group: aubo_arm
188 | Query Goal State: true
189 | Query Start State: false
190 | Show Workspace: false
191 | Start State Alpha: 1
192 | Start State Color: 0; 255; 0
193 | Planning Scene Topic: move_group/monitored_planning_scene
194 | Robot Description: robot_description
195 | Scene Geometry:
196 | Scene Alpha: 1
197 | Scene Color: 50; 230; 50
198 | Scene Display Time: 0.200000003
199 | Show Scene Geometry: true
200 | Voxel Coloring: Z-Axis
201 | Voxel Rendering: Occupied Voxels
202 | Scene Robot:
203 | Attached Body Color: 150; 50; 150
204 | Links:
205 | All Links Enabled: true
206 | Expand Joint Details: false
207 | Expand Link Details: false
208 | Expand Tree: false
209 | Link Tree Style: Links in Alphabetic Order
210 | base_link:
211 | Alpha: 1
212 | Show Axes: false
213 | Show Trail: false
214 | Value: true
215 | foreArm_Link:
216 | Alpha: 1
217 | Show Axes: false
218 | Show Trail: false
219 | Value: true
220 | gripper_base_link:
221 | Alpha: 1
222 | Show Axes: false
223 | Show Trail: false
224 | Value: true
225 | gripper_finger1_finger_link:
226 | Alpha: 1
227 | Show Axes: false
228 | Show Trail: false
229 | Value: true
230 | gripper_finger1_finger_tip_link:
231 | Alpha: 1
232 | Show Axes: false
233 | Show Trail: false
234 | Value: true
235 | gripper_finger1_inner_knuckle_link:
236 | Alpha: 1
237 | Show Axes: false
238 | Show Trail: false
239 | Value: true
240 | gripper_finger1_knuckle_link:
241 | Alpha: 1
242 | Show Axes: false
243 | Show Trail: false
244 | Value: true
245 | gripper_finger2_finger_link:
246 | Alpha: 1
247 | Show Axes: false
248 | Show Trail: false
249 | Value: true
250 | gripper_finger2_finger_tip_link:
251 | Alpha: 1
252 | Show Axes: false
253 | Show Trail: false
254 | Value: true
255 | gripper_finger2_inner_knuckle_link:
256 | Alpha: 1
257 | Show Axes: false
258 | Show Trail: false
259 | Value: true
260 | gripper_finger2_knuckle_link:
261 | Alpha: 1
262 | Show Axes: false
263 | Show Trail: false
264 | Value: true
265 | pedestal_Link:
266 | Alpha: 1
267 | Show Axes: false
268 | Show Trail: false
269 | Value: true
270 | shoulder_Link:
271 | Alpha: 1
272 | Show Axes: false
273 | Show Trail: false
274 | Value: true
275 | upperArm_Link:
276 | Alpha: 1
277 | Show Axes: false
278 | Show Trail: false
279 | Value: true
280 | world:
281 | Alpha: 1
282 | Show Axes: false
283 | Show Trail: false
284 | wrist1_Link:
285 | Alpha: 1
286 | Show Axes: false
287 | Show Trail: false
288 | Value: true
289 | wrist2_Link:
290 | Alpha: 1
291 | Show Axes: false
292 | Show Trail: false
293 | Value: true
294 | wrist3_Link:
295 | Alpha: 1
296 | Show Axes: false
297 | Show Trail: false
298 | Value: true
299 | Robot Alpha: 0.5
300 | Show Robot Collision: false
301 | Show Robot Visual: true
302 | Value: true
303 | Velocity_Scaling_Factor: 1
304 | - Class: rviz/TF
305 | Enabled: true
306 | Frame Timeout: 15
307 | Frames:
308 | All Enabled: false
309 | base_link:
310 | Value: true
311 | base_link_calculated:
312 | Value: false
313 | camera_depth_frame:
314 | Value: false
315 | camera_depth_optical_frame:
316 | Value: false
317 | camera_link:
318 | Value: false
319 | camera_rgb_frame:
320 | Value: false
321 | camera_rgb_optical_frame:
322 | Value: true
323 | foreArm_Link:
324 | Value: false
325 | grasp_pose_frame:
326 | Value: true
327 | gripper_base_link:
328 | Value: false
329 | gripper_finger1_finger_link:
330 | Value: false
331 | gripper_finger1_finger_tip_link:
332 | Value: false
333 | gripper_finger1_inner_knuckle_link:
334 | Value: false
335 | gripper_finger1_knuckle_link:
336 | Value: false
337 | gripper_finger2_finger_link:
338 | Value: false
339 | gripper_finger2_finger_tip_link:
340 | Value: false
341 | gripper_finger2_inner_knuckle_link:
342 | Value: false
343 | gripper_finger2_knuckle_link:
344 | Value: false
345 | pedestal_Link:
346 | Value: false
347 | shoulder_Link:
348 | Value: false
349 | upperArm_Link:
350 | Value: false
351 | world:
352 | Value: false
353 | wrist1_Link:
354 | Value: false
355 | wrist2_Link:
356 | Value: false
357 | wrist3_Link:
358 | Value: false
359 | Marker Scale: 1
360 | Name: TF
361 | Show Arrows: true
362 | Show Axes: true
363 | Show Names: true
364 | Tree:
365 | world:
366 | pedestal_Link:
367 | base_link:
368 | camera_rgb_optical_frame:
369 | base_link_calculated:
370 | {}
371 | grasp_pose_frame:
372 | {}
373 | shoulder_Link:
374 | upperArm_Link:
375 | foreArm_Link:
376 | wrist1_Link:
377 | wrist2_Link:
378 | wrist3_Link:
379 | gripper_base_link:
380 | gripper_finger1_inner_knuckle_link:
381 | gripper_finger1_finger_tip_link:
382 | {}
383 | gripper_finger1_knuckle_link:
384 | gripper_finger1_finger_link:
385 | {}
386 | gripper_finger2_inner_knuckle_link:
387 | gripper_finger2_finger_tip_link:
388 | {}
389 | gripper_finger2_knuckle_link:
390 | gripper_finger2_finger_link:
391 | {}
392 | Update Interval: 0
393 | Value: true
394 | - Alpha: 1
395 | Autocompute Intensity Bounds: true
396 | Autocompute Value Bounds:
397 | Max Value: 10
398 | Min Value: -10
399 | Value: true
400 | Axis: Z
401 | Channel Name: intensity
402 | Class: rviz/PointCloud2
403 | Color: 255; 255; 255
404 | Color Transformer: RGB8
405 | Decay Time: 0
406 | Enabled: true
407 | Invert Rainbow: false
408 | Max Color: 255; 255; 255
409 | Max Intensity: 4096
410 | Min Color: 0; 0; 0
411 | Min Intensity: 0
412 | Name: PointCloud2
413 | Position Transformer: XYZ
414 | Queue Size: 10
415 | Selectable: true
416 | Size (Pixels): 3
417 | Size (m): 0.00999999978
418 | Style: Flat Squares
419 | Topic: /camera_remote/depth_registered_quater/points
420 | Unreliable: false
421 | Use Fixed Frame: true
422 | Use rainbow: true
423 | Value: true
424 | Enabled: true
425 | Global Options:
426 | Background Color: 48; 48; 48
427 | Default Light: true
428 | Fixed Frame: camera_rgb_optical_frame
429 | Frame Rate: 30
430 | Name: root
431 | Tools:
432 | - Class: rviz/Interact
433 | Hide Inactive Objects: true
434 | - Class: rviz/MoveCamera
435 | - Class: rviz/Select
436 | Value: true
437 | Views:
438 | Current:
439 | Class: rviz/XYOrbit
440 | Distance: 5.4215889
441 | Enable Stereo Rendering:
442 | Stereo Eye Separation: 0.0599999987
443 | Stereo Focal Distance: 1
444 | Swap Stereo Eyes: false
445 | Value: false
446 | Focal Point:
447 | X: 0.800676584
448 | Y: 0.129632354
449 | Z: 2.23518001e-07
450 | Focal Shape Fixed Size: true
451 | Focal Shape Size: 0.0500000007
452 | Invert Z Axis: false
453 | Name: Current View
454 | Near Clip Distance: 0.00999999978
455 | Pitch: -1.29979658
456 | Target Frame: world
457 | Value: XYOrbit (rviz)
458 | Yaw: 4.55494547
459 | Saved: ~
460 | Window Geometry:
461 | Displays:
462 | collapsed: false
463 | Height: 1026
464 | Help:
465 | collapsed: false
466 | Hide Left Dock: false
467 | Hide Right Dock: false
468 | MotionPlanning:
469 | collapsed: false
470 | MotionPlanning - Trajectory Slider:
471 | collapsed: false
472 | QMainWindow State: 000000ff00000000fd0000000100000000000002ad000003bcfc0200000007fb000000100044006900730070006c006100790073010000002800000228000000d700fffffffb0000000800480065006c00700000000342000000bb0000007300fffffffb0000000a0056006900650077007300000003b0000000b0000000ad00fffffffb0000000c00430061006d00650072006100000002ff000001610000000000000000fb0000001e004d006f00740069006f006e00200050006c0061006e006e0069006e00670100000374000001890000000000000000fb00000044004d006f00740069006f006e0050006c0061006e006e0069006e00670020002d0020005400720061006a006500630074006f0072007900200053006c00690064006500720000000000ffffffff0000004a00fffffffb0000001c004d006f00740069006f006e0050006c0061006e006e0069006e006701000002560000018e0000018300ffffff0000039c000003bc00000001000000020000000100000002fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
473 | Views:
474 | collapsed: false
475 | Width: 1615
476 | X: 2625
477 | Y: 24
478 |
--------------------------------------------------------------------------------
/aubo_i5_gripper.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
234 |
237 |
239 |
241 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
458 |
459 |
460 |
462 |
463 |
464 |
465 |
466 |
469 |
470 |
471 |
472 |
473 |
474 |
475 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Aubo-Deep-Object-Grasping
2 | A way to perform pick and place operations using deep learning algorithms GPD and YOLO.
3 |
4 | ## Required Installations
5 |
6 | The links to where one can install each program are provided.
7 |
8 | - Ubuntu 16.04
9 | - [ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu)
10 | - Python 2.7
11 | - [Darknet_ros](https://github.com/leggedrobotics/darknet_ros) -V: YOLOv4
12 | - [OpenCV](https://www.pyimagesearch.com/2016/10/24/ubuntu-16-04-how-to-install-opencv/) >= 3.4
13 | - [GPD](https://github.com/atenpas/gpd)
14 | - [gpd_ros](https://github.com/atenpas/gpd_ros)
15 | - [Moveit](https://moveit.ros.org/install/)
16 | - [PCL](https://www.programmersought.com/article/52981999118/) >= 1.7
17 | - [Aruco_Ros](https://github.com/pal-robotics/aruco_ros)
18 | - [Aubo_I5_Driver](https://github.com/AuboRobot/aubo_robot)
19 | - [Aubo I5 Model](https://github.com/hai-h-nguyen/aubo-i5-full)
20 | ## Hardware
21 | - Asus Xtion Pro RGB-D Camera
22 | - Aubo I5 Robot
23 | - Robotiq 85 Gripper
24 | ## Running the Programs to Pick Up Object
25 |
26 | 1. Run the camera calibrated launch files in seperate terminals
27 |
28 | ```
29 | roslaunch jsk_pcl_ros openni2_local.launch
30 | roslaunch jsk_pcl_ros openni2_remote.launch
31 | ```
32 |
33 | 2. In a new terminal run object detection through darknet_ros by command:
34 |
35 | ```
36 | roslaunch darknet_ros darknet_ros.launch
37 | ```
38 |
39 | 3. In a new terminal run the program to filter out all point cloud data except the object that was chosen:
40 |
41 | ```
42 | rosrun my_aubo_i5_robot_config pass_through
43 | ```
44 |
45 | 4. To find the transformation of the Aubo Robot base link with respect to the camera frame run:
46 |
47 | ```
48 | roslaunch static_frame.launch
49 | ```
50 |
51 | 5. Launch the Aubo Driver in a new terminal:
52 |
53 | ```
54 | roslaunch aubo_i5_moveit_config moveit_planning_execution.launch sim:=false robot_ip:=your-robot-ip
55 | ```
56 |
57 | 6. Grasp the object by running in a new terminal:
58 |
59 | ```
60 | rosrun my_aubo_i5_robot_config pick_place
61 | ```
62 |
63 | 
64 | 
65 |
66 |
67 | ## Inner Workings of the Algorithm
68 | **Camera Calibration and Running Camera Drivers**
69 |
70 | Camera calibration is a very important step. Follow the steps on how to [calibrate](http://wiki.ros.org/camera_calibration/Tutorials/MonocularCalibration) a Monocular Camera with raw images over ROS(this is called Camera Intrisic Calibration). Once done with the tutorial provided on the ROS wiki, follow the [Depth Camera Calibration](https://jsk-docs.readthedocs.io/projects/jsk_recognition/en/latest/jsk_pcl_ros/calibration.html) tutorial.
71 |
72 | When the camera calibration is finished we should have a depth error of at 1-2 cm between the the ros topics /camera_remote/depth_registered/points and /camera_remote_uncalibrated/depth_registered/points. The error can be visualized on rviz as such:
73 |
74 | 
75 |
76 | Open two terminals, in one terminal launch `roslaunch jsk_pcl_ros openni2_local.launch`, and in the other `roslaunch jsk_pcl_ros openni2_remote.launch`. This will run the Asus camera driver and load all the calibration files that were found in the camera calibration steps.
77 |
78 | **Running Aubo Driver**
79 |
80 | First make sure that the computer can connect to Aubo Robot. To do this go onto the Aubo ipad, go to network settings->tap ifconfig:
81 |
82 | 
83 |
84 | In the image above insert the inet addr number as shown in the highlighted section into the space bellow Network debuging (this is the robots ip address). Then press ping and it will show 0% packet loss.
85 |
86 | Run the next command in the terminal to see if you can connect via the robot ip (in my case the ip is 192.168.1.101; this number will be different for everybody):
87 |
88 | ```
89 | ping 192.168.1.101
90 | ```
91 |
92 | If it shows 0% packet loss in the terminal then the robot can be connected to via the robots ip.
93 |
94 | To connect to the real robot through moveit in the same terminal run:
95 |
96 | ```
97 | roslaunch aubo_i5_moveit_config moveit_planning_execution.launch sim:=false robot_ip:=your-robot-ip
98 | ```
99 |
100 | Rviz will run with your robot model and the real robot will move just by dragging the endeffector arround and clicking Plan and execute in the Planning tab on Rviz.
101 |
102 | 
103 |
104 |
105 | **Cordinate Frames and Transformations**
106 |
107 | ROS provides a very useful package to keep track of multple coordinate frames over time called [tf](http://wiki.ros.org/tf). The position of the robot base frame relative to the camera frame will be needed in order to perform any grasping. To do this, Aruco Tags will be used. First you stick the aruco tag onto the end-effector of the robot. Have the tag visible to the camera and run in a terminal:
108 | ```
109 | roslaunch aruco_ros single.launch
110 | ```
111 | To view the results of the aruco marker launch file run in a new terminal:
112 | ```
113 | rosrun image_view image_view image:=/aruco_single/result
114 | ```
115 | 
116 |
117 | The aruco marker will create the transformation from the camera frame to the marker frame. This is where the static_transform_publisher will be useful. We can then find the transformation from the end effector frame to the base frame of the robot to get its position relative to the camera frame. To do this edit node with name="robot_base_broadcaster" in the file [static_frame.launch](https://github.com/aralab-unr/Aubo-Deep-Object-Grasping/blob/main/static_frame.launch) and insert the the x y z rz ry rx where the numbers are in `args`. These values can be found directly from the teach pendant on the Aubo ipad. Then run the command in a new terminal:
118 |
119 | ```
120 | roslaunch static_frame.launch
121 | ```
122 |
123 | One can visualize the transformed frames in Rviz:
124 |
125 | 
126 |
127 | To find the transformation between the camera frame and robot base frame type in the a terminal:
128 |
129 | ```
130 | rosrun tf tf_echo camera_rgb_optical_frame base_link_calculated
131 | ```
132 |
133 | Then create a node for static_transform_publisher in the static_frame.launch file using the values provided by the tf_echo command (translation[x, y, z] and Quaternion[x, y, z, w]):
134 |
135 | ```
136 |
137 | ```
138 | **Object Detection State of the Art (YOLOv4)**
139 |
140 | YOLOv4 will allow the computer to determine what objects are in the point cloud. Darknet Ros is a convenient way to get bounding boxe pixel coordinates that are published as a rostopic. Subscribing to the topic will allow easy access to continous bounding boxes even if the object is moved. To run YOLOv4 with ros run in a terminal the command:
141 |
142 | ```roslaunch darknet_ros darknet_ros.launch```
143 |
144 | A new window will pop up with bounding boxes around the objects YOLO has been trained on. A full guide on how to train you own custom weights can be found at [this](https://github.com/AlexeyAB/darknet) GitHub repository.
145 |
146 | 
147 |
148 |
149 |
150 | **Cloud Clustering and Centroids**
151 |
152 | The .cpp file [passthrough.cpp](https://github.com/aralab-unr/Aubo-Deep-Object-Grasping/blob/main/passthrough.cpp) filters out the point cloud. It uses a voxel filter, a statistical filter, and a table top filter. Then inorder to only obtain the objects I used a function called Euclidean Cluster Extraction. This will cluster each object, and we can compute the 3D centroids of each object, then convert the centroids into 2D. The 2D Centroids will be compared to the center point of the bounding box from the rostopic(/darknet_ros/bounding_boxes). The object that is selected will have the shortest euclidean distance between the bounding box center point and the 2D centroid.
153 |
154 | 
155 |
156 | To run the point cloud clustering and object selection; in a new terminal run:
157 |
158 | ```
159 | rosrun my_aubo_i5_robot_config pass_through
160 | ```
161 |
162 | This will allow you to enter the object name you want to cluster. To visualize the point cloud in rviz, add the topic /output_cloud_filtered. You should see something similar to the images bellow.
163 |
164 | 
165 | 
166 |
167 | **GPD_ROS**
168 |
169 | We will be inserting the point cloud that is being published from pass_through.cpp on the rostopic names /output_cloud_filtered.
170 |
171 | Run in a new terminal:
172 |
173 | ```
174 | roslaunch gpd_ros ur5.launch
175 | ```
176 |
177 | This will generate the best grasps on the selected object like in the image bellow.
178 |
179 | 
180 |
181 |
182 |
183 |
184 | **Picking Up Object**
185 |
186 | With the Aubo Driver running from the command `roslaunch my_aubo_i5_robot_config moveit_planning_execution.launch robot_ip:=` in a new terminal run:
187 |
188 | ```
189 | rosrun my_aubo_i5_robot_config pick_place
190 | ```
191 |
192 | The real robot will then try to go to the grasp pose by finding ik solutions. If it says failed-No IK_solutions found just restart [pick_place](https://github.com/aralab-unr/Aubo-Deep-Object-Grasping/blob/main/pick_place.cpp) until it works. If you have tried to restart it more than five times you may just have an unreachable grasp for your robot and will have to find another grasp that is reachable.
193 |
194 | An error with the robot controller may occur that says:
195 |
196 | ```
197 | [ERROR] [1415027195.809289492]: Controller is taking too long to execute trajectory (the expected upper bound for the trajectory execution was 4.992567 seconds). Stopping trajectory.
198 | ```
199 |
200 | To fix this issue go into the file [trajectory_execution.launch](https://github.com/aralab-unr/Aubo-Deep-Object-Grasping/blob/main/my_aubo_i5_robot_config/launch/trajectory_execution.launch.xml). Here, change `value="1.2"` -> `value="3"` in the parameter:
201 |
202 | ```
203 |
204 | ```
205 | Keep increasing the value until the error doesn't occur and the robot motion will be smoother as well.
206 |
207 | ## Installation Possible Problems and their Solutions
208 |
209 | **Camera Calibration**
210 |
211 | In order to get the ROS driver for the Asus Xtion Camera run the two commands:
212 |
213 | ```
214 | sudo apt-get install ros-kinetic-openni-launch
215 | sudo apt-get install ros-kinetic-openni2-launch
216 | ```
217 |
218 | So that openni can see the Asus camera; unncomment the line `UsbInterface = 2` in the file Globaldefault.ini. This file cannot be editted by simply clicking on the files icon, you will need to run with root privileges:
219 |
220 | ```
221 | sudo nano /etc/openi/Globaldefault.ini
222 | ```
223 |
224 | To connect to the camera run command:
225 |
226 | ```
227 | roslaunch openni2_launch openni2.launch
228 | ```
229 |
230 |
231 | **Aubo_Driver**
232 |
233 | Make sure to install the ros controllers and controller manager:
234 | ```
235 | sudo apt-get install ros-kinetic-ros-controllers
236 | sudo apt-get install ros-kinetic-controller-manager
237 | ```
238 |
239 | One error when trying to launch the Aubo_Driver was:
240 | ```
241 | error while loading shared libraries: libmoveit_robot_trajectory.so.0.9.15: cannot openshared object file: No such file or directory
242 | ```
243 | The above error means that some of the moveit libraries are not seen by the Aubo driver. To fix this run the following commands in the terminal(make sure to run each command in order):
244 |
245 | ```
246 | cd /opt/ros/kinetic/lib
247 |
248 | sudo cp -r libmoveit_robot_trajectory.so.0.9.17 ..
249 |
250 | sudo cp -r libmoveit_robot_model.so.0.9.17 ..
251 |
252 | sudo cp -r libmoveit_robot_state.so.0.9.17 ..
253 |
254 | cd ..
255 |
256 | sudo mv libmoveit_robot_state.so.0.9.17 libmoveit_robot_state.so.0.9.15
257 |
258 | sudo mv libmoveit_robot_model.so.0.9.17 libmoveit_robot_model.so.0.9.15
259 |
260 | sudo mv libmoveit_robot_trajectory.so.0.9.17 libmoveit_robot_trajectory.so.0.9.15
261 |
262 | sudo cp -r libmoveit_robot_state.so.0.9.15 lib/
263 |
264 | sudo cp -r libmoveit_robot_model.so.0.9.15 lib/
265 |
266 | sudo cp -r libmoveit_robot_trajectory.so.0.9.15 lib/
267 | ```
268 |
269 | If you have a different robot from the Aubo Robot used in this repository I suggest looking at the moveit [setup assistant](http://docs.ros.org/en/kinetic/api/moveit_tutorials/html/doc/setup_assistant/setup_assistant_tutorial.html). Here you just enter your robots urdf and follow the instructions, and then you can run the exact same steps but with a different robot arm.
270 |
271 | **Darknet_ros**
272 |
273 | In order to run git clone with the darknet_ros repository you have to create a ssh key for your GitHub account, otherwise you will get an error saying you do not have permission to download the repository.
274 |
275 | I had a little trouble figuring out how to set up the ssh key so here were my steps. Run in a terminal:
276 |
277 | ```
278 | ssh-keygen
279 | ```
280 | Press ENTER to save the key to the default location. Should be at /.ssh/id_rsa.pub. Enter a password or not... it's up to you and then press ENTER again. This should set up the key in the default location. To view the key run:
281 |
282 | ```
283 | cat .ssh/id_rsa.pub
284 | ```
285 |
286 | Copy the generated key from your terminal then go to your GitHub profile. Follow the links: `Settings -> SSH and GPG keys`. Click "New ssh key", paste the ssh key into the key field. Click "Add key". Now you should be able to clone the darknet ros repository with no problems.
287 |
288 | If training a YOLO model on custom objects is necessary follow [this](https://github.com/AlexeyAB/darknet) guide. I ran into an issue where opencv couldn't find my images. To fix this, I ran the program [preprocess.py](https://github.com/aralab-unr/Aubo-Deep-Object-Grasping/blob/main/preprocess.py) with the command:
289 |
290 | ```
291 | python preprocess.py
292 | ```
293 |
294 | This will create a test.txt file with 10% of the images and 90% of the images will be put into a train.txt file. This should fix the error with opencv and also save you a lot of time of copying and pasting hundreds of paths in the train.txt and test.txt files.
295 |
296 | Training will take a very long time to run if done with CPU only. This is why training on GPU is pretty much a requirement. To do this I downloaded CUDA 10.2 and Cudnn 7.6.5. The downloading is a little tricky and a tutorial can be found [here](https://medium.com/@chami.soufiane/installation-guide-for-cuda-10-x-on-ubuntu-16-04-18-04-20-04-3a826a110ff5). The only issue that I ran into when installing CUDA was when trying to run `make` in the directory /cuddn_samples_v7/mnistCUDNN I recieved the error:
297 |
298 | ```
299 | /usr/local/cuda/include/cuda_runtime_api.h:0110:60: error: 'pGraphExec' was not declared in this scope extern __host__ cudaError_t CUDARTAPI cudaGraphInstantiate{cudaGraphExec_t *pGrapheExec, cudaGraph_t graph, cudaGraphNode_t *pErrorNode, char *pLogBuffer...
300 |
301 | /usr/local/cuda/include/cuda_runtime_api.h:0110:60: error: 'CudaGraph_t' was not declared in this scope extern __host__ cudaError_t CUDARTAPI cudaGraphInstantiate{cudaGraphExec_t *pGrapheExec, cudaGraph_t graph, cudaGraphNode_t *pErrorNode, char *pLogBuffer...
302 |
303 | /usr/local/cuda/include/cuda_runtime_api.h:0110:60: error: 'CudaGraphNode_t' was not declared in this scope extern __host__ cudaError_t CUDARTAPI cudaGraphInstantiate{cudaGraphExec_t *pGrapheExec, cudaGraph_t graph, cudaGraphNode_t *pErrorNode, char *pLogBuffer...
304 | ```
305 | To fix this error open cudnn.h file that should be located in path /usr/include/cudnn.h. To edit run command:
306 | ```
307 | sudo gedit /usr/include/cudnn.h
308 | ```
309 |
310 | Then change the line `#include "driver_types.h"` to `#include `. Now make should run fine.
311 |
312 |
313 | You may run into an issue of opencv being to low as python 2.7 is most likely being used. For example it was showing opencv-dev 3.34. I installed opencv from source, then in order for python 2.7 to see the correct version of opencv I had to run the command:
314 |
315 | ```
316 | sudo cp /home/nicholas/opencv-4.5.2/build/lib/cv2.so /opt/ros/kinetic/lib/python2.7/dist-packages/
317 | ```
318 |
319 |
320 |
321 | **GPD and gpd_ros**
322 |
323 | GPD needs to be installed becuase it is required for gpd_ros as a library. Gpd_ros is just a ros wrapper for GPD.
324 | When installing GPD in order to successfully make the project, go to the CMakeList.txt file and comment out the line:
325 |
326 | ```
327 | set(CMAKE_CXX_FLAGS "-O3 -march=native -mtune=intel -msse4.2 -mavx2 -mfma -flto -fopenmp -fPIC -Wno-deprecated -Wenum-compare -Wno-ignored-attributes -std=c++17")
328 | ```
329 |
330 | Uncomment the line:
331 |
332 | ```
333 | set(CMAKE_CXX_FLAGS "-fopenmp -fPIC -Wno-deprecated -Wenum-compare -Wno-ignored-attributes -std=c++17")
334 | ```
335 |
336 | When making GPD I suggest having pcl version 1.9 installed from source. If done correctly, the CMakeList.txt will find your version of PCL. In the GPD CMakeList.txt I changed the line `find_package(PCL 1.8 REQUIRED)` -> `find_package(PCL 1.9 REQUIRED)`. A common error I got was that the CMakeList couldn't find the `PCLConfig.cmake` file. This can be fixed by added the following line above the find_package line that was changed to: `set(PCL_DIR "/path/to/PCL/1.9.1/build/PCLConfig.cmake)`. You may have to do the same thing with the opencv installation as sometimes the CMakeList will not be able to find the OpenCvConfig.cmake file. This can be fixed by using the same function as used for pcl: `set(OpenCV_DIR "/path/to/opencv/build/OpenCVConfig.cmake")`.
337 |
338 | GPD_ROS can be ran using the command `roslaunch gpd_ros ur5.launch`, it will not work right away unfortunately.
339 |
340 | In the ur5.launch file change the cloud_type to the point cloud type that will be used. Then change the cloud_topic to the correct rostopic that is needed to be used. The config_file value needs to be changed from:
341 |
342 | ```
343 |
344 | ```
345 | To:
346 | ```
347 |
348 | ```
349 |
350 | The file ros_eigen_params.cfg (path to .cfg file = ~/gpd/cfg/ros_eigen_params.cgf) is where most of the finctionality parameters are, such as setting the gripper dimensions, grasp workspaces, how many grasp samples are considered, etc. The only thing that needs to be changed in order for the launch file to work is the line:
351 |
352 | ```
353 | weights_file = /home/andreas/projects/gpd/lenet/15channels/params/
354 | ```
355 |
356 | To:
357 |
358 | ```
359 | weights_file = /path/to/gpd/lenet/15channels/params/
360 | ```
361 | If GPD is running slow on producing grasps for you robot, I suggest lowering the parameter `num_samples = 500` to a number lower than 100 and change the parameter `workspace_graps = -1 1 -1 1 -1 1` to fit exactly the areas you need to grasp.
362 |
363 | **Moveit Tutorials**
364 |
365 | Here are some helpful Moveit tutorials:
366 | - [Pick and Place Tutorial](http://docs.ros.org/en/kinetic/api/moveit_tutorials/html/doc/pick_place/pick_place_tutorial.html)
367 | - [URDF and SRDF Tutorials](http://docs.ros.org/en/kinetic/api/moveit_tutorials/html/doc/urdf_srdf/urdf_srdf_tutorial.html)
368 | - [Motion Planning API Tutorial](http://docs.ros.org/en/kinetic/api/moveit_tutorials/html/doc/motion_planning_api/motion_planning_api_tutorial.html)
369 | - [Motion Planning Pipeline Tutorial](http://docs.ros.org/en/kinetic/api/moveit_tutorials/html/doc/controller_configuration/controller_configuration_tutorial.html)
370 | - [Grasp Tutorial](https://ros-planning.github.io/moveit_tutorials/doc/moveit_grasps/moveit_grasps_tutorial.html)
371 | - [Deep Grasp Tutorial](https://ros-planning.github.io/moveit_tutorials/doc/moveit_deep_grasps/moveit_deep_grasps_tutorial.html)
372 |
373 | **ROS Tutorials**
374 |
375 | Here are some helpful ROS tutorials:
376 | - [Different Levels of ROS Tutorials](http://wiki.ros.org/ROS/Tutorials) (For the basics of ROS)
377 | - [Aubo and Gripper Integration Tutorial](http://wiki.ros.org/aubo_robot/Tutorials/How%20to%20combine%20AUBO%20robot%20and%20gripper)
378 | - [ROS Controllers Tutorials](http://wiki.ros.org/ros_control)
379 |
--------------------------------------------------------------------------------