├── CMakeLists.txt ├── README.md ├── assets ├── POVNav_framework.png ├── POVNav_full.png ├── POVNav_planning_image.png └── rqt_params.png ├── cfg └── pov_nav_dyparam.cfg ├── include ├── common.h ├── global_variables.h ├── pid_IBVS.h ├── pov_nav.h └── pov_plan.h ├── launch └── sim_pov_nav.launch ├── package.xml ├── params └── params.yaml ├── rviz └── pov_nav_viz.rviz └── src ├── pid_IBVS.cpp ├── pov_nav.cpp ├── pov_nav_node.cpp └── pov_plan.cpp /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(pov_nav) 3 | 4 | set (MALLOC_CHECK_ 2) 5 | set (OpenCV_DIR /usr/lib/x86_64-linux-gnu/cmake/opencv4) 6 | 7 | ## Find catkin and any catkin packages 8 | find_package(catkin REQUIRED COMPONENTS 9 | OpenCV REQUIRED 10 | roscpp 11 | rospy 12 | sensor_msgs 13 | geometry_msgs 14 | std_msgs 15 | cv_bridge 16 | image_transport 17 | dynamic_reconfigure 18 | ) 19 | 20 | 21 | ## Generate dynamic reconfigure parameters in the 'cfg' folder 22 | generate_dynamic_reconfigure_options( 23 | cfg/pov_nav_dyparam.cfg 24 | ) 25 | ################################### 26 | ## catkin specific configuration ## 27 | ################################### 28 | ## The catkin_package macro generates cmake config files for your package 29 | ## Declare things to be passed to dependent projects 30 | ## INCLUDE_DIRS: uncomment this if your package contains header files 31 | ## LIBRARIES: libraries you create in this project that dependent projects also need 32 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need 33 | ## DEPENDS: system dependencies of this project that dependent projects also need 34 | catkin_package( 35 | # INCLUDE_DIRS include 36 | # LIBRARIES dynamic_tutorials 37 | CATKIN_DEPENDS dynamic_reconfigure roscpp rospy 38 | # DEPENDS system_lib 39 | ) 40 | 41 | 42 | ## Build ball_detection 43 | include_directories( include ${catkin_INCLUDE_DIRS} ${OpenCV_INCLUDE_DIRS} ) 44 | 45 | file(GLOB UTILITY_FILE 46 | src/pov_nav.cpp 47 | src/pov_plan.cpp 48 | src/pid_IBVS.cpp 49 | ) 50 | 51 | add_executable(sim_pov_nav 52 | ${UTILITY_FILE} 53 | src/pov_nav_node.cpp) 54 | # make sure configure headers are built before any node using them 55 | add_dependencies(sim_pov_nav ${PROJECT_NAME}_gencfg) 56 | target_link_libraries(sim_pov_nav 57 | ${catkin_LIBRARIES} 58 | ${OpenCV_LIBRARIES}) 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # POVNav: A Pareto-Optimal Mapless Visual Navigator 2 | 3 | POVNav is a planning and control framework that uses a novel image-based local representation of the environment using existing image segmentation methods, to navigate a robot to the specified goal point or direction using only a monocular camera without relying on a map. Apart from the collision-free motion, it also shows selective navigation behavior (such as ``Do not walk on the grass!'') which is not possible with the occupancy grid representation. 4 | The navigation task is formulated as a visual servoing problem where the robot is able to directly generate efficient motion according to the visual features representing the navigability. The navigability is represented as a binary image generated from existing segmentation techniques, making it a handy plug-and-play suite for any segmentation method. 5 | 6 | ![Dynamic Reconfigure Window](assets/POVNav_full.png) 7 | 8 | ## This paper has been accepted for publication in ISER 2023. 9 | - **Paper Link:** [https://arxiv.org/abs/2310.14065](https://arxiv.org/abs/2310.14065) 10 | 11 | ### Cite as: 12 | ``` 13 | @article{pushp2023povnav, 14 | title = {POVNav: A Pareto-Optimal Mapless Visual Navigator}, 15 | author = {Durgakant Pushp and Zheng Chen and Chaomin Luo and Jason M. Gregory and Lantao Liu}, 16 | journal = {arXiv}, 17 | year = {2023}, 18 | eprint = {2310.14065}, 19 | archivePrefix = {arXiv}, 20 | primaryClass = {cs.RO}, 21 | } 22 | ``` 23 | 24 | ## Requirements 25 | - Any robot simulator that provides the control of it's linear and angular velocities (`v, w`), e.g. Jackal, Husky, Turtlebot e.t.c. 26 | Use the following instruction to setup Jackal simulator: `sudo apt-get install ros--jackal-simulator ros--jackal-desktop ros--jackal-navigation`. 27 | - Image Segmentation Method: Use any segmetation method of your choice, define the nevigabiliy vector and prodice binary segmentation image such that navigable segments gets a value `'255'` non-navigable gets `'0'`. 28 | 29 | ### Required ROS topics 30 | | Subscribe | Description | Example | 31 | | -------------------- | ------------------------------------------------------------------------------------------------------------- | :-----------------------: | 32 | | ~odom_topic | name of topic that provides the robot's state. | "/odom" 33 | | ~binary_segmented_image | name of the topic that provides the binary segmeted image based on the defined navigability. | "/ground_segmentation" | 34 | 35 | - It publishes linear and angular velocities to the "`/cmd_vel`" topic. 36 | 37 | # Setup 38 | ## Install and run the dependencies 39 | - Launch the jackal simulator (or any other UGV sumulator) with realsence camera on it. Verify that you are able to subscribe to the above mentioned topics. 40 | ``` 41 | export JACKAL_URDF_EXTRAS=/path_to_src/camera_urdf/realsense.urdf.xacro 42 | roslaunch jackal_gazebo empty_world.launch 43 | ``` 44 | 45 | 46 | - Setup segmentation module from [depth2surface_normals_seg](https://github.com/Dpushp/depth2surface_normals_seg). Alternatively, you can run any image segmentation algoritms and implement *Visual Horizon*. Launch the segmentation node. 47 | 48 | ## Setup `POVNav` repository 49 | ### How to Build 50 | Clone and build the repository to your workspace. 51 | ``` 52 | mkdir catkin_ws/src -p 53 | cd catkin_ws/src 54 | ``` 55 | ``` 56 | git clone https://github.com/Dpushp/POVNav.git 57 | ``` 58 | ``` 59 | cd .. 60 | catkin_make 61 | ``` 62 | 63 | ### Lanuch the sim_pov_nav node 64 | ``` 65 | roslaunch roslaunch pov_nav sim_pov_nav.launch 66 | ``` 67 | ### Open rviz to visualise the topics and give goal to the robot 68 | ``` 69 | rosrun rviz rviz -d path_to_pov_nav_src/rviz/pov_nav_viz.rviz 70 | ``` 71 | ![POVNav Planning Image with Gazebo](assets/POVNav_planning_image.png) 72 | 73 | Robot will navigate to the goal given from rviz. 74 | 75 | ## Parameters Tuning 76 | Run Dynamic Recongigure to change the parameters. 77 | ``` 78 | rosrun rqt_reconfigure rqt_reconfigure 79 | ``` 80 | ![Dynamic Reconfigure Window](assets/rqt_params.png) 81 | 82 | -------------------------------------------------------------------------------- /assets/POVNav_framework.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dpushp/POVNav/e304429b98e36a83388a16ee509410ac846da405/assets/POVNav_framework.png -------------------------------------------------------------------------------- /assets/POVNav_full.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dpushp/POVNav/e304429b98e36a83388a16ee509410ac846da405/assets/POVNav_full.png -------------------------------------------------------------------------------- /assets/POVNav_planning_image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dpushp/POVNav/e304429b98e36a83388a16ee509410ac846da405/assets/POVNav_planning_image.png -------------------------------------------------------------------------------- /assets/rqt_params.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dpushp/POVNav/e304429b98e36a83388a16ee509410ac846da405/assets/rqt_params.png -------------------------------------------------------------------------------- /cfg/pov_nav_dyparam.cfg: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | PACKAGE = "pov_nav" 3 | 4 | from dynamic_reconfigure.parameter_generator_catkin import * 5 | 6 | gen = ParameterGenerator() 7 | 8 | # Controller parameters 9 | gen.add("Kp", double_t, 0, "Kp to find V_max", 0.4, 0, 1.0) 10 | gen.add("Kp1", double_t, 0, "Kp to find IBVS", 0.015, 0, 0.050) 11 | gen.add("Kp2", double_t, 0, "Kp to omega", 0.07, 0, 0.1) 12 | gen.add("Kd", double_t, 0, "Kd to find V_max", 0.7, 0, 1.0) 13 | gen.add("Kd1", double_t, 0, "Kd to find IBVS", 0.0011, 0, 0.0050) 14 | gen.add("Kd2", double_t, 0, "Kd to omega", 0.02, 0, 0.1) 15 | gen.add("w1", double_t, 0, "weight assigned to objective 1", 1, 0, 200) 16 | gen.add("w2", double_t, 0, "weight assigned to objective 2", 100, 0, 200) 17 | 18 | exit(gen.generate(PACKAGE, "pov_nav", "pov_nav_dyparam")) 19 | -------------------------------------------------------------------------------- /include/common.h: -------------------------------------------------------------------------------- 1 | #ifndef __COMMON_H 2 | #define __COMMON_H 3 | 4 | #include 5 | 6 | #include 7 | #include 8 | 9 | #include 10 | #include 11 | 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | #include 20 | #include 21 | 22 | #include 23 | 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | 30 | #define C_EARTH (double)6378137.0 31 | #define C_PI (double)3.141592653589793 32 | #define DEG2RAD(DEG) ((DEG) * ((C_PI) / (180.0))) 33 | #define RAD2DEG(RAD) ((RAD) * ((180.0) / (C_PI))) 34 | 35 | #define DEBUG true 36 | 37 | #endif // __COMMON_H -------------------------------------------------------------------------------- /include/global_variables.h: -------------------------------------------------------------------------------- 1 | #ifndef __GLOBAL_VARIABLE_H 2 | #define __GLOBAL_VARIABLE_H 3 | 4 | #include 5 | 6 | /* Goal in World Frame */ 7 | geometry_msgs::PoseStamped goal; 8 | 9 | /* Robot State in World Frame */ 10 | geometry_msgs::PoseStamped robot_state; 11 | double robot_heading; 12 | 13 | /* Camera Info */ 14 | sensor_msgs::CameraInfo cam_info; 15 | 16 | #endif // __GLOBAL_VARIABLE_H -------------------------------------------------------------------------------- /include/pid_IBVS.h: -------------------------------------------------------------------------------- 1 | #ifndef _pid_IBVS_H 2 | #define _pid_IBVS_H 3 | 4 | #include 5 | #include 6 | 7 | class PID_IBVS 8 | { 9 | private: 10 | double _Kp; 11 | double _Kd; 12 | double _Ki; 13 | double _alpha; 14 | double _pre_error; 15 | double _integral; 16 | public: 17 | PID_IBVS(); 18 | PID_IBVS(double _alpha, double Kp, double Ki, double Kd); 19 | ~PID_IBVS(); 20 | 21 | // Setters 22 | void set_alpha(double _alpha); 23 | void set_Kp(double Kp); 24 | void set_Ki(double Ki); 25 | void set_Kd(double Kd); 26 | 27 | // Utils 28 | double getControl(double error, double dt); 29 | 30 | }; 31 | 32 | #endif // _pid_IBVS_H -------------------------------------------------------------------------------- /include/pov_nav.h: -------------------------------------------------------------------------------- 1 | #ifndef __POV_NAV_H 2 | #define __POV_NAV_H 3 | 4 | #include 5 | #include 6 | #include 7 | 8 | class POVNav 9 | { 10 | private: 11 | POVPlan optic_path; 12 | cv::Mat rgb_image; 13 | PID_IBVS ibvs; 14 | PID_IBVS ibvs_v; 15 | PID_IBVS ibvs_w; 16 | 17 | image_transport::ImageTransport it_; 18 | image_transport::Subscriber image_sub_; 19 | image_transport::Subscriber rgb_image_sub_; 20 | image_transport::Publisher optic_path_pub_; 21 | ros::Subscriber goal_sub; 22 | ros::Subscriber odom_sub; 23 | 24 | double t_1; // previous time stamp 25 | double start_time; 26 | 27 | // Smooth control params 28 | bool SMOOTH_CTRL = true; 29 | float b_previous_ctrl_v; 30 | float b_previous_ctrl_omega; 31 | float b_MAX_CTRL_RATE; 32 | ros::Publisher ctrlVelYawPub; 33 | 34 | // Dynamic reconfigure parameters 35 | double Kp, Kd; 36 | double Kp1, Kd1; 37 | double Kp2, Kd2; 38 | double w1 = 100, w2 = 1; // default values 39 | 40 | public: 41 | POVNav(ros::NodeHandle nh_); 42 | ~POVNav(); 43 | 44 | /* Callback Functions */ 45 | /* Goal Subscriber : Rviz */ 46 | void goalCallback(const geometry_msgs::PoseStampedConstPtr& msg); 47 | /* Odom Subscriber */ 48 | void odomCallback(const nav_msgs::OdometryConstPtr& msg); 49 | /* Segmented Image subscriber */ 50 | void imageCallback(const sensor_msgs::ImageConstPtr& msg); 51 | /* RGB Image subscriber */ 52 | void RGBimageCallback(const sensor_msgs::ImageConstPtr& msg); 53 | /* Dynamic Reconfigure */ 54 | void updateConfig(double kp, double kd, double kp1, double kd1, double kp2, double kd2, double w1, double w2); 55 | 56 | // Publish control 57 | void publish(float ctrl_v, float ctrl_omega); 58 | }; 59 | 60 | #endif // __POV_NAV_H -------------------------------------------------------------------------------- /include/pov_plan.h: -------------------------------------------------------------------------------- 1 | #ifndef __POV__PLAN_H 2 | #define __POV__PLAN_H 3 | 4 | #include 5 | 6 | class POVPlan 7 | { 8 | private: 9 | cv::Mat image; // Segmented Navigable Space 10 | cv::Mat optic_path_image; 11 | std::vector path; 12 | std::vector lpath; 13 | std::vector rpath; 14 | 15 | /* Proximity feature */ 16 | int obs_dist; 17 | 18 | float goal_distance; 19 | 20 | /* Assigned weights to the object functions. */ 21 | double w1, w2; 22 | 23 | bool VISUALIZATION; 24 | 25 | public: 26 | POVPlan(); 27 | ~POVPlan(); 28 | 29 | // features (s1, s3) 30 | cv::Point lp_node, rp_node; 31 | 32 | // Getters 33 | cv::Mat get_image(); 34 | cv::Mat get_optic_path_image(); 35 | std::vector get_path(); 36 | std::vector get_lpath(); 37 | std::vector get_rpath(); 38 | 39 | int get_obs_dist(); 40 | 41 | float get_goal_distance(); 42 | 43 | double get_w1(); 44 | double get_w2(); 45 | 46 | // Setters 47 | void set_image(cv::Mat image); 48 | void set_optic_path_image(cv::Mat optic_path_image); 49 | void set_path(std::vector path); 50 | void set_lpath(std::vector lpath); 51 | void set_rpath(std::vector rpath); 52 | 53 | void set_w1(double w1); 54 | void set_w2(double w2); 55 | 56 | void set_visualization(bool); 57 | 58 | // Objective functions 59 | // Objective 1 : Deviation from goal direction (theta) 60 | float objective1(cv::Point p, cv::Point global_optic_goal, cv::Point start); 61 | // Objective 2 : Distance travelled towards the goal (pixels) 62 | float objective2(cv::Point p, cv::Point global_optic_goal); 63 | 64 | // POG 65 | cv::Point get_global_optic_goal(geometry_msgs::PoseStamped goal, geometry_msgs::PoseStamped robot_state); 66 | // HOG 67 | cv::Point get_local_optic_goal(cv::Point global_optic_goal, cv::Point start); 68 | // Image based path planning 69 | cv::Point find_optic_path(int n, cv::Point global_optic_goal, cv::Point local_optic_goal); 70 | }; 71 | 72 | #endif // __POV__PLAN_H 73 | -------------------------------------------------------------------------------- /launch/sim_pov_nav.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | pov_nav 4 | 2.2.8 5 | POVNav: A Pareto-Optimal Mapless Visual Navigator - is an image based local planner 6 | Durgakant Pushp 7 | Durgakant Pushp 8 | Apache 2.0 9 | 10 | catkin 11 | image_transport 12 | cv_bridge 13 | opencv2 14 | nav_msgs 15 | nodelet 16 | genmsg 17 | roscpp 18 | rospy 19 | sensor_msgs 20 | std_msgs 21 | message_runtime 22 | tf 23 | dynamic_reconfigure 24 | diagnostic_updater 25 | librealsense2 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /params/params.yaml: -------------------------------------------------------------------------------- 1 | # select the frame for publishing the images 2 | frame_id: odom 3 | 4 | # subscribers info 5 | goal_from_rviz_topic: "/move_base_simple/goal" 6 | odom_topic: "/jackal_velocity_controller/odom" 7 | navigability_map_topic: "/ground_segmentation" 8 | rgb_image_topic: "/realsense/color/image_raw" 9 | 10 | # publishers info 11 | pov_plan_image: "/optic_path" 12 | -------------------------------------------------------------------------------- /rviz/pov_nav_viz.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Status1 9 | - /Grid1 10 | - /Odometry1 11 | - /Odometry1/Shape1 12 | Splitter Ratio: 0.5 13 | Tree Height: 363 14 | - Class: rviz/Selection 15 | Name: Selection 16 | - Class: rviz/Tool Properties 17 | Expanded: 18 | - /2D Pose Estimate1 19 | - /2D Nav Goal1 20 | - /Publish Point1 21 | Name: Tool Properties 22 | Splitter Ratio: 0.5886790156364441 23 | - Class: rviz/Views 24 | Expanded: 25 | - /Current View1 26 | Name: Views 27 | Splitter Ratio: 0.5 28 | - Class: rviz/Time 29 | Experimental: false 30 | Name: Time 31 | SyncMode: 0 32 | SyncSource: RGB_Image 33 | Preferences: 34 | PromptSaveOnExit: true 35 | Toolbars: 36 | toolButtonStyle: 2 37 | Visualization Manager: 38 | Class: "" 39 | Displays: 40 | - Alpha: 0.5 41 | Cell Size: 1 42 | Class: rviz/Grid 43 | Color: 160; 160; 164 44 | Enabled: true 45 | Line Style: 46 | Line Width: 0.029999999329447746 47 | Value: Lines 48 | Name: Grid 49 | Normal Cell Count: 0 50 | Offset: 51 | X: 0 52 | Y: 0 53 | Z: 0 54 | Plane: XY 55 | Plane Cell Count: 100 56 | Reference Frame: 57 | Value: true 58 | - Alpha: 1 59 | Class: rviz/RobotModel 60 | Collision Enabled: false 61 | Enabled: false 62 | Links: 63 | All Links Enabled: true 64 | Expand Joint Details: false 65 | Expand Link Details: false 66 | Expand Tree: false 67 | Link Tree Style: Links in Alphabetic Order 68 | Name: RobotModel 69 | Robot Description: robot_description 70 | TF Prefix: "" 71 | Update Interval: 0 72 | Value: false 73 | Visual Enabled: true 74 | - Angle Tolerance: 0.10000000149011612 75 | Class: rviz/Odometry 76 | Covariance: 77 | Orientation: 78 | Alpha: 0.5 79 | Color: 255; 255; 127 80 | Color Style: Unique 81 | Frame: Local 82 | Offset: 1 83 | Scale: 1 84 | Value: true 85 | Position: 86 | Alpha: 0.30000001192092896 87 | Color: 204; 51; 204 88 | Scale: 1 89 | Value: true 90 | Value: false 91 | Enabled: true 92 | Keep: 100 93 | Name: Odometry 94 | Position Tolerance: 0.10000000149011612 95 | Queue Size: 10 96 | Shape: 97 | Alpha: 1 98 | Axes Length: 0.20000000298023224 99 | Axes Radius: 0.10000000149011612 100 | Color: 255; 25; 0 101 | Head Length: 0.30000001192092896 102 | Head Radius: 0.10000000149011612 103 | Shaft Length: 1 104 | Shaft Radius: 0.05000000074505806 105 | Value: Axes 106 | Topic: /jackal_velocity_controller/odom 107 | Unreliable: false 108 | Value: true 109 | - Alpha: 1 110 | Axes Length: 1 111 | Axes Radius: 0.10000000149011612 112 | Class: rviz/Pose 113 | Color: 255; 25; 0 114 | Enabled: true 115 | Head Length: 0.30000001192092896 116 | Head Radius: 0.10000000149011612 117 | Name: Pose 118 | Queue Size: 10 119 | Shaft Length: 0.5 120 | Shaft Radius: 0.05000000074505806 121 | Shape: Axes 122 | Topic: /move_base_simple/goal 123 | Unreliable: false 124 | Value: true 125 | - Class: rviz/Image 126 | Enabled: true 127 | Image Topic: /realsense/color/image_raw 128 | Max Value: 1 129 | Median window: 5 130 | Min Value: 0 131 | Name: RGB_Image 132 | Normalize Range: true 133 | Queue Size: 2 134 | Transport Hint: raw 135 | Unreliable: false 136 | Value: true 137 | - Class: rviz/Image 138 | Enabled: true 139 | Image Topic: /ground_segmentation 140 | Max Value: 1 141 | Median window: 5 142 | Min Value: 0 143 | Name: Navigability_Image 144 | Normalize Range: true 145 | Queue Size: 2 146 | Transport Hint: raw 147 | Unreliable: false 148 | Value: true 149 | - Class: rviz/Image 150 | Enabled: true 151 | Image Topic: /normals 152 | Max Value: 1 153 | Median window: 5 154 | Min Value: 0 155 | Name: Surface_Normals_Image 156 | Normalize Range: true 157 | Queue Size: 2 158 | Transport Hint: raw 159 | Unreliable: false 160 | Value: true 161 | - Alpha: 1 162 | Axes Length: 0.4000000059604645 163 | Axes Radius: 0.10000000149011612 164 | Class: rviz/Pose 165 | Color: 255; 25; 0 166 | Enabled: true 167 | Head Length: 0.30000001192092896 168 | Head Radius: 0.10000000149011612 169 | Name: Pose 170 | Queue Size: 10 171 | Shaft Length: 1 172 | Shaft Radius: 0.05000000074505806 173 | Shape: Axes 174 | Topic: /subgoal 175 | Unreliable: false 176 | Value: true 177 | - Class: rviz/Image 178 | Enabled: true 179 | Image Topic: /optic_path 180 | Max Value: 1 181 | Median window: 5 182 | Min Value: 0 183 | Name: POVNav_Planning_Image 184 | Normalize Range: true 185 | Queue Size: 2 186 | Transport Hint: raw 187 | Unreliable: false 188 | Value: true 189 | Enabled: true 190 | Global Options: 191 | Background Color: 48; 48; 48 192 | Default Light: true 193 | Fixed Frame: odom 194 | Frame Rate: 30 195 | Name: root 196 | Tools: 197 | - Class: rviz/Interact 198 | Hide Inactive Objects: true 199 | - Class: rviz/MoveCamera 200 | - Class: rviz/Select 201 | - Class: rviz/FocusCamera 202 | - Class: rviz/Measure 203 | - Class: rviz/SetInitialPose 204 | Theta std deviation: 0.2617993950843811 205 | Topic: /initialpose 206 | X std deviation: 0.5 207 | Y std deviation: 0.5 208 | - Class: rviz/SetGoal 209 | Topic: /move_base_simple/goal 210 | - Class: rviz/PublishPoint 211 | Single click: true 212 | Topic: /clicked_point 213 | Value: true 214 | Views: 215 | Current: 216 | Class: rviz/Orbit 217 | Distance: 19.42459487915039 218 | Enable Stereo Rendering: 219 | Stereo Eye Separation: 0.05999999865889549 220 | Stereo Focal Distance: 1 221 | Swap Stereo Eyes: false 222 | Value: false 223 | Field of View: 0.7853981852531433 224 | Focal Point: 225 | X: -0.32339975237846375 226 | Y: 0.1459265798330307 227 | Z: -0.41687288880348206 228 | Focal Shape Fixed Size: true 229 | Focal Shape Size: 0.05000000074505806 230 | Invert Z Axis: false 231 | Name: Current View 232 | Near Clip Distance: 0.009999999776482582 233 | Pitch: 1.5697963237762451 234 | Target Frame: 235 | Yaw: 4.708565711975098 236 | Saved: ~ 237 | Window Geometry: 238 | Displays: 239 | collapsed: true 240 | Height: 1016 241 | Hide Left Dock: true 242 | Hide Right Dock: true 243 | Navigability_Image: 244 | collapsed: false 245 | POVNav_Planning_Image: 246 | collapsed: false 247 | QMainWindow State: 000000ff00000000fd0000000400000000000001560000020cfc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073000000018b0000020c000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f0000035afc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003d0000035a000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b2000000000000000000000002000007380000014afc0100000005fb00000012005200470042005f0049006d0061006700650100000000000001c70000008300fffffffb0000002a0053007500720066006100630065005f004e006f0072006d0061006c0073005f0049006d00610067006501000001cd000001c6000000d900fffffffb00000024004e0061007600690067006100620069006c006900740079005f0049006d0061006700650100000399000001cd000000b600fffffffb0000002a0050004f0056004e00610076005f0050006c0061006e006e0069006e0067005f0049006d006100670065010000056c000001cc000000e100fffffffb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007380000003efc0100000002fb0000000800540069006d0065010000000000000738000002eb00fffffffb0000000800540069006d00650100000000000004500000000000000000000007380000020a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 248 | RGB_Image: 249 | collapsed: false 250 | Selection: 251 | collapsed: false 252 | Surface_Normals_Image: 253 | collapsed: false 254 | Time: 255 | collapsed: false 256 | Tool Properties: 257 | collapsed: false 258 | Views: 259 | collapsed: true 260 | Width: 1848 261 | X: 72 262 | Y: 27 263 | -------------------------------------------------------------------------------- /src/pid_IBVS.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | // Implementation of above defined class definition 4 | PID_IBVS::PID_IBVS() 5 | { 6 | 7 | } 8 | 9 | PID_IBVS::PID_IBVS(double _alpha, double Kp, double Ki, double Kd) 10 | { 11 | this->_Kp = Kp; 12 | this->_Ki = Ki; 13 | this->_Kd = Kd; 14 | this->_alpha = _alpha; 15 | this->_pre_error = 0; 16 | this->_integral = 0; 17 | } 18 | 19 | PID_IBVS::~PID_IBVS() 20 | { 21 | } 22 | 23 | /*! Setters */ 24 | void PID_IBVS::set_alpha(double _alpha){ 25 | this->_alpha = _alpha; 26 | } 27 | void PID_IBVS::set_Kp(double Kp){ 28 | this->_Kp = Kp; 29 | } 30 | void PID_IBVS::set_Ki(double Ki){ 31 | this->_Ki = Ki; 32 | } 33 | void PID_IBVS::set_Kd(double Kd){ 34 | this->_Kd = Kd; 35 | } 36 | 37 | double PID_IBVS::getControl(double error, double _dt){ 38 | double _intrgral_MAX = 2.0; 39 | 40 | /*! Discard the first value. */ 41 | // Calculate rate of change of error with low pass filter 42 | double e_dot = _alpha*(_pre_error/_dt) + (1 - _alpha)*((error - _pre_error)/_dt); 43 | 44 | // Proportional term 45 | double Pout = _Kp * error; 46 | 47 | // Integral term 48 | _integral += error * _dt; 49 | double Iout = _Ki * _integral; 50 | // Limit integral value 51 | if(Iout > _intrgral_MAX) 52 | Iout = _intrgral_MAX; 53 | 54 | // Derivative term 55 | double Dout = _Kd * e_dot; 56 | 57 | // Calculate total output 58 | double output = Pout + Iout + Dout; 59 | 60 | // Save error to previous error 61 | _pre_error = error; 62 | 63 | // std::cout << "dt: " << _dt << std::endl; 64 | return output; 65 | } 66 | -------------------------------------------------------------------------------- /src/pov_nav.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | POVNav::POVNav(ros::NodeHandle nh_) 5 | :it_(nh_) 6 | { 7 | // Read the topics from the launch file params 8 | std::string goal_from_rviz_topic; 9 | std::string odom_topic; 10 | std::string navigability_map_topic; 11 | std::string rgb_image_topic; 12 | std::string frame_id; 13 | std::string pov_plan_image; 14 | 15 | if (nh_.getParam("/sim_pov_nav/goal_from_rviz_topic", goal_from_rviz_topic) && 16 | nh_.getParam("/sim_pov_nav/odom_topic", odom_topic) && 17 | nh_.getParam("/sim_pov_nav/navigability_map_topic", navigability_map_topic) && 18 | nh_.getParam("/sim_pov_nav/rgb_image_topic", rgb_image_topic) && 19 | nh_.getParam("/sim_pov_nav/frame_id", frame_id) && 20 | nh_.getParam("/sim_pov_nav/pov_plan_image", pov_plan_image)) 21 | { 22 | //this->frame_id = frame_id; 23 | //this->is_realsense = is_realsense; 24 | ROS_INFO("Received all params!"); 25 | } 26 | else 27 | { 28 | ROS_ERROR("Failed to get param!"); 29 | } 30 | 31 | // Subscribe goal from rviz 32 | goal_sub = nh_.subscribe(goal_from_rviz_topic, 1, &POVNav::goalCallback, this); 33 | 34 | // Subscribe robot's odom 35 | odom_sub = nh_.subscribe(odom_topic, 1, &POVNav::odomCallback, this); 36 | // Subscribe to the segmented image 37 | image_sub_ = it_.subscribe(navigability_map_topic, 1, &POVNav::imageCallback, this); 38 | // Subscribe to the rgb image 39 | rgb_image_sub_ = it_.subscribe(rgb_image_topic, 1, &POVNav::RGBimageCallback, this); 40 | 41 | // Optic Path image publisher 42 | optic_path_pub_ = it_.advertise(pov_plan_image, 1); 43 | 44 | // Control publisher 45 | ctrlVelYawPub = nh_.advertise("cmd_vel", 1); 46 | 47 | // Timer 48 | t_1 = ros::Time::now().nsec; 49 | start_time = ros::Time::now().nsec; 50 | 51 | // Smooth control params 52 | SMOOTH_CTRL = true; 53 | b_previous_ctrl_v = 0.0; 54 | b_previous_ctrl_omega = 0.0; 55 | b_MAX_CTRL_RATE = 1; 56 | } 57 | 58 | POVNav::~POVNav() 59 | { 60 | } 61 | 62 | /* Callback Functions */ 63 | /* Goal Subscriber : Rviz */ 64 | void POVNav::goalCallback(const geometry_msgs::PoseStampedConstPtr& msg){ 65 | goal = *msg; 66 | tf::Quaternion q( 67 | msg->pose.orientation.x, 68 | msg->pose.orientation.y, 69 | msg->pose.orientation.z, 70 | msg->pose.orientation.w 71 | ); 72 | 73 | tf::Matrix3x3 m(q); 74 | 75 | double roll, pitch, yaw; 76 | m.getRPY(roll, pitch, yaw); 77 | 78 | if (DEBUG) 79 | { 80 | std::cout << "Goal_position: \n" << goal.pose.position << std::endl; 81 | std::cout << "Goal_orientation: " << RAD2DEG(yaw) << std::endl; 82 | } 83 | } 84 | 85 | /* Odom Subscriber */ 86 | void POVNav::odomCallback(const nav_msgs::OdometryConstPtr& msg){ 87 | robot_state.header = msg->header; 88 | robot_state.pose = msg->pose.pose; 89 | 90 | tf::Quaternion q( 91 | msg->pose.pose.orientation.x, 92 | msg->pose.pose.orientation.y, 93 | msg->pose.pose.orientation.z, 94 | msg->pose.pose.orientation.w 95 | ); 96 | 97 | tf::Matrix3x3 m(q); 98 | 99 | double roll, pitch, yaw; 100 | m.getRPY(roll, pitch, yaw); 101 | 102 | robot_heading = yaw; 103 | 104 | if (false) 105 | { 106 | std::cout << "Robot State : \n" << robot_state << std::endl; 107 | } 108 | } 109 | 110 | /* RGB Image subscriber */ 111 | void POVNav::RGBimageCallback(const sensor_msgs::ImageConstPtr& msg){ 112 | cv_bridge::CvImagePtr cv_ptr; 113 | try{ 114 | // Read rgb images from robot simulator 115 | cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::TYPE_8UC3); 116 | } 117 | catch (cv_bridge::Exception& e){ 118 | ROS_ERROR("cv_bridge exception: %s", e.what()); 119 | return; 120 | } 121 | 122 | if (DEBUG) 123 | { 124 | std::cout << "Received image." << std::endl; 125 | } 126 | 127 | /*! Copy image. */ 128 | rgb_image = cv_ptr->image.clone(); 129 | } 130 | 131 | /* Visual Horizon */ 132 | cv::Mat visualHorizon(cv::Mat Ir){ 133 | /* 134 | Implement the logic to get the visual horizon. 135 | 1. Define the navigable and non-navigable classes. 136 | 2. See the visual horizon sub-section in the POVNav paper. 137 | for all pixels in each colunm 138 | implement visual horizon eq. 139 | */ 140 | return Ir; 141 | } 142 | 143 | /* Segmented Image subscriber */ 144 | void POVNav::imageCallback(const sensor_msgs::ImageConstPtr& msg){ 145 | cv_bridge::CvImagePtr cv_ptr; 146 | try{ 147 | // Read depth images from Turtlebot3 gazebo 148 | cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::TYPE_8UC1); 149 | } 150 | catch (cv_bridge::Exception& e){ 151 | ROS_ERROR("cv_bridge exception: %s", e.what()); 152 | return; 153 | } 154 | 155 | if (DEBUG) 156 | { 157 | std::cout << "Received image." << std::endl; 158 | } 159 | 160 | /* Set optic path image for visualization. */ 161 | optic_path.set_optic_path_image(rgb_image); 162 | 163 | /* POVNav : steps */ 164 | /*! 0 :: Get Ir : Copy the binary segmented image. */ 165 | cv::Mat Ir = cv_ptr->image.clone(); 166 | 167 | /*! 1 :: Visual Horizon : Eleminate unreachable navigable segments. */ 168 | Ir = visualHorizon(Ir); 169 | 170 | /*! 2 :: from pov planner - Optic Path Object */ 171 | optic_path.set_image(Ir); 172 | optic_path.set_visualization(true); 173 | 174 | /*! 3 :: POG - Get sense of direction */ 175 | cv::Point global_optic_goal = optic_path.get_global_optic_goal(goal, robot_state); 176 | 177 | /*! 4 :: HOG - Local sense of direction */ 178 | // Start point 179 | cv::Point start; 180 | start.x = Ir.cols/2; 181 | start.y = Ir.rows; 182 | 183 | // Update the weights 184 | optic_path.set_w1(w1); 185 | optic_path.set_w2(w2); 186 | cv::Point local_optic_goal = optic_path.get_local_optic_goal(global_optic_goal, start); 187 | 188 | /*! 5 :: Find Optimized Optic Path & get the first node as guider node */ 189 | int n = 5; // Number of nodes in beam element path 190 | cv::Point guider_node = optic_path.find_optic_path(n, global_optic_goal, local_optic_goal); 191 | 192 | /*! 6 :: Control - Get the control velocity and yaw */ 193 | // Get feature 194 | double e_alignment_t = atan2(start.x - guider_node.x, start.y - guider_node.y); 195 | double e_proximity_t = optic_path.get_obs_dist() - 50; // 50 is the threshold 196 | double distance_to_goal = optic_path.get_goal_distance(); 197 | 198 | // Control 199 | // Partitioned Visual Servoin 200 | double proximity_feature = e_proximity_t; 201 | double alignment_feature = e_alignment_t; 202 | 203 | // partitioned_visual_servoing(e_alignment_t, e_proximity_t, distance_to_goal); 204 | /*! PID Implementation for Visual Servoing. 205 | * Function Signature: 206 | * PID_visual_servoing::PID_visual_servoing(double _alpha, double Kp, double Ki, double Kd) 207 | * alpha : low pass filter constant 208 | */ 209 | double alpha = 0.8, alpha1 = 0.8, alpha2 = 0.8; 210 | double Ki = 0.0; 211 | double Ki1 = 0.0; 212 | double Ki2 = 0.0; 213 | 214 | // V_max control 215 | ibvs.set_alpha(alpha); 216 | ibvs.set_Kp(Kp); 217 | ibvs.set_Kd(Kd); 218 | ibvs.set_Ki(Ki); 219 | 220 | // proximity control 221 | ibvs_v.set_alpha(alpha1); 222 | ibvs_v.set_Kp(Kp1); 223 | ibvs_v.set_Kd(Kd1); 224 | ibvs_v.set_Ki(Ki1); 225 | 226 | // alignment control 227 | ibvs_w.set_alpha(alpha2); 228 | ibvs_w.set_Kp(Kp2); 229 | ibvs_w.set_Kd(Kd2); 230 | ibvs_w.set_Ki(Ki2); 231 | 232 | double dt = ros::Time::now().toSec() - t_1; 233 | t_1 = ros::Time::now().toSec(); 234 | 235 | double v_max = ibvs.getControl(distance_to_goal, dt); 236 | 237 | double v_proxy = ibvs_v.getControl(proximity_feature, dt); 238 | double w = ibvs_w.getControl(alignment_feature, dt); 239 | 240 | double v = v_max; 241 | 242 | // switch linear control velocity 243 | if (proximity_feature < 0) 244 | { 245 | // Prevent the robot from moving forward 246 | v = v_proxy; 247 | std::cout << "Obstacle detected!" << std::endl; 248 | } 249 | else if (proximity_feature < 20) 250 | { 251 | // Slow down the robot 252 | v = v_proxy; 253 | std::cout << "Slowing down! Velocity : " << v << std::endl; 2*sin(robot_heading + alignment_feature); 254 | } 255 | else 256 | { 257 | // Move forward with maximum velocity 258 | v = v_max; 259 | std::cout << "Moving with maximum velocity! Velocity : " << v << std::endl; 260 | } 261 | 262 | // Publish the control velocity 263 | double goal_tolerance = 0.5; 264 | if (optic_path.get_goal_distance() < goal_tolerance) 265 | { 266 | // Stop the robot 267 | v = 0; 268 | w = 0; 269 | std::cout << "Goal reached!" << std::endl; 270 | } 271 | publish(v, w); 272 | 273 | /* Publish optic_path_image */ 274 | std_msgs::Header header; 275 | header.frame_id = "odom"; 276 | sensor_msgs::ImagePtr optic_path_msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", optic_path.get_optic_path_image()).toImageMsg(); 277 | optic_path_pub_.publish(optic_path_msg); 278 | 279 | } 280 | 281 | // Control publisher 282 | void POVNav::publish(float ctrl_v, float ctrl_omega){ 283 | if(SMOOTH_CTRL){ 284 | /*! Find rate of change of ctrl values. */ 285 | float ctrl_er_v = b_previous_ctrl_v - ctrl_v; 286 | 287 | /*! Limit ctrl values. */ 288 | float speed_limit_v = 0.5; 289 | float speed_limit_omega = 3.14; 290 | if(std::abs(ctrl_v) > speed_limit_v) 291 | ctrl_v = speed_limit_v * (std::abs(ctrl_v)/ctrl_v + 0.000000000001); 292 | if(std::abs(ctrl_omega) > speed_limit_omega) 293 | ctrl_omega = speed_limit_omega * (std::abs(ctrl_omega)/ctrl_omega + 0.000000000001); 294 | } 295 | 296 | if(true){ 297 | /*! INFO ctrl command.*/ 298 | ROS_INFO("ctrl_vf : %f", ctrl_v); 299 | ROS_INFO("ctrl_wf : %f", ctrl_omega); 300 | } 301 | 302 | /*! Populate the sensor_msgs::Joy with the current value. */ 303 | geometry_msgs::Twist control_v_omega; 304 | control_v_omega.linear.x = ctrl_v; 305 | control_v_omega.angular.z = ctrl_omega; 306 | 307 | /*! Publish to the Velocity Yaw rate topic. */ 308 | ctrlVelYawPub.publish(control_v_omega); 309 | 310 | if(SMOOTH_CTRL){ 311 | /*! Update previous_ctrl. */ 312 | b_previous_ctrl_v = ctrl_v; 313 | b_previous_ctrl_omega = ctrl_omega; 314 | } 315 | } 316 | 317 | // Update configuration 318 | void POVNav::updateConfig(double _Kp, double _Kd, 319 | double _Kp1, double _Kd1, 320 | double _Kp2, double _Kd2, 321 | double _w1, double _w2){ 322 | // Dynamic reconfigure parameters 323 | Kp = _Kp, Kd = _Kd; 324 | Kp1 = _Kp1, Kd1 = _Kd1; 325 | Kp2 = _Kp2, Kd2 = _Kd2; 326 | w1 = _w1, w2 = _w2; 327 | // ROS_INFO("Reconfigure Request: %f %f %f %f %f %f %f %f", 328 | // Kp, Kp1, Kd2, Kd, Kd1, Kd2, w1, w2); 329 | } -------------------------------------------------------------------------------- /src/pov_nav_node.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | /* DyParams */ 4 | double kp, kp1, kp2, kd, kd1, kd2, w1, w2; 5 | 6 | void callback(pov_nav::pov_nav_dyparamConfig &config, uint32_t level) { 7 | kp = config.Kp; 8 | kp1 = config.Kp1; 9 | kp2 = config.Kp2; 10 | kd = config.Kd; 11 | kd1 = config.Kd1; 12 | kd2 = config.Kd2; 13 | w1 = config.w1; 14 | w2 = config.w2; 15 | } 16 | 17 | int main(int argc, char **argv) { 18 | ros::init(argc, argv, "pov_nav"); 19 | 20 | dynamic_reconfigure::Server server; 21 | dynamic_reconfigure::Server::CallbackType f; 22 | 23 | f = boost::bind(&callback, _1, _2); 24 | server.setCallback(f); 25 | 26 | ros::NodeHandle nh; 27 | POVNav local_navigator(nh); 28 | 29 | while(ros::ok()) { 30 | local_navigator.updateConfig(kp, kd, kp1, kd1, kp2, kd2, w1, w2); 31 | ros::spinOnce(); 32 | } 33 | 34 | return 0; 35 | } 36 | -------------------------------------------------------------------------------- /src/pov_plan.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | POVPlan::POVPlan(){} 4 | POVPlan::~POVPlan(){} 5 | 6 | /* Getters */ 7 | cv::Mat POVPlan::get_image(){ 8 | return image; 9 | } 10 | 11 | cv::Mat POVPlan::get_optic_path_image(){ 12 | return optic_path_image; 13 | } 14 | 15 | std::vector POVPlan::get_path(){ 16 | return path; 17 | } 18 | 19 | std::vector POVPlan::get_lpath(){ 20 | return lpath; 21 | } 22 | 23 | std::vector POVPlan::get_rpath(){ 24 | return rpath; 25 | } 26 | 27 | double POVPlan::get_w1(){ 28 | return w1; 29 | } 30 | 31 | double POVPlan::get_w2(){ 32 | return w2; 33 | } 34 | 35 | int POVPlan::get_obs_dist(){ 36 | return obs_dist; 37 | } 38 | 39 | float POVPlan::get_goal_distance(){ 40 | return goal_distance; 41 | } 42 | 43 | /* setters */ 44 | void POVPlan::set_image(cv::Mat image){ 45 | this->image = image; 46 | } 47 | 48 | void POVPlan::set_optic_path_image(cv::Mat optic_path_image){ 49 | this->optic_path_image = optic_path_image; 50 | } 51 | 52 | void POVPlan::set_path(std::vector path){ 53 | this->path = path; 54 | } 55 | 56 | void POVPlan::set_lpath(std::vector lpath){ 57 | this->lpath = lpath; 58 | } 59 | 60 | void POVPlan::set_rpath(std::vector rpath){ 61 | this->rpath = rpath; 62 | } 63 | 64 | void POVPlan::set_w1(double w1){ 65 | this->w1 = w1; 66 | } 67 | 68 | void POVPlan::set_w2(double w2){ 69 | this->w2 = w2; 70 | } 71 | 72 | void POVPlan::set_visualization(bool VISUALIZATION){ 73 | this->VISUALIZATION = VISUALIZATION; 74 | } 75 | 76 | /* Objective functions */ 77 | // Objective 1 : Deviation from goal direction (theta) 78 | float POVPlan::objective1(cv::Point p, cv::Point global_optic_goal, cv::Point start){ 79 | float alpha = atan2(start.x - global_optic_goal.x, start.y - global_optic_goal.y); 80 | float beta = atan2(start.x - p.x, start.y - p.y); 81 | float f1 = abs(beta-alpha); 82 | return f1; 83 | } 84 | 85 | 86 | // Objective 2 : Distance travelled towards the goal (pixels) 87 | // Maximization problem converted into minimization by changing the objective function as - 88 | // || global_optic_goal - p ||. 89 | float POVPlan::objective2(cv::Point p, cv::Point global_optic_goal){ 90 | // Ideally : sqrt(pow((global_optic_goal.x - p.x), 2) + pow((global_optic_goal.y - p.y), 2)); 91 | // Here, only y coordinate matters as x represents objective 1. 92 | float f2 = sqrt(pow((global_optic_goal.y - p.y), 2)); 93 | return f2; 94 | } 95 | 96 | 97 | /* Find optic goal */ 98 | // POG - goal direction on the image boundary 99 | cv::Point POVPlan::get_global_optic_goal(geometry_msgs::PoseStamped goal, geometry_msgs::PoseStamped robot_state) 100 | { 101 | /* Get the robot direction in the World Frame */ 102 | float robot_orientation; 103 | tf::Quaternion q( 104 | robot_state.pose.orientation.x, 105 | robot_state.pose.orientation.y, 106 | robot_state.pose.orientation.z, 107 | robot_state.pose.orientation.w 108 | ); 109 | 110 | tf::Matrix3x3 m(q); 111 | 112 | double roll, pitch, yaw; 113 | m.getRPY(roll, pitch, yaw); 114 | robot_orientation = yaw; 115 | 116 | /* Get the goal vector in world frame */ 117 | cv::Mat goal_w = (cv::Mat_(3, 1) << goal.pose.position.x - robot_state.pose.position.x, goal.pose.position.y - robot_state.pose.position.y, goal.pose.position.z - robot_state.pose.position.z); 118 | // Rotation from Frame F1=World to Frame F2=Robot : alpha -> Yaw, beta->pitch, gamma->roll 119 | double alpha = -robot_orientation, beta = DEG2RAD(0), gamma = DEG2RAD(0); 120 | cv::Mat R_F1_F2 = 121 | (cv::Mat_(3, 3) << cos(alpha)*cos(beta), cos(alpha)*sin(beta)*sin(gamma)-sin(alpha)*cos(gamma), cos(alpha)*sin(beta)*cos(gamma)+sin(alpha)*sin(gamma), 122 | sin(alpha)*cos(beta), sin(alpha)*sin(beta)*sin(gamma)+cos(alpha)*cos(gamma), sin(alpha)*sin(beta)*cos(gamma)-cos(alpha)*sin(gamma), 123 | -sin(beta), cos(beta)*sin(gamma), cos(beta)*cos(gamma) 124 | ); 125 | cv::Mat goal_R = R_F1_F2*goal_w; 126 | // Set goal distance 127 | goal_distance = cv::norm(goal_R, cv::NORM_L2); 128 | 129 | /* Get the goal direction in robot's body frame */ 130 | float goal_direction = atan2(goal_R.at(1,0), goal_R.at(0,0)); 131 | // std::cout << "Theta : " << RAD2DEG(goal_direction) << std::endl; 132 | 133 | /* Get goal direction in Image */ 134 | int l = image.cols; 135 | int b = image.rows; 136 | 137 | // Solution of |y| = l/2, |x| = b and y = mx 138 | float m_ = tan(goal_direction); 139 | cv::Point p1(l/(2*m_), l/2), p2(-l/(2*m_), -l/2), p3(b, m_*b), p4(-b, -m_*b); 140 | 141 | // p = min{Pi} 142 | cv::Point min_p1, min_p2; 143 | float d1 = cv::norm(p1), d3 = cv::norm(p3); 144 | if (d1 < d3) 145 | { 146 | min_p1 = p1; 147 | min_p2 = p2; 148 | } 149 | else 150 | { 151 | min_p1 = p3; 152 | min_p2 = p4; 153 | } 154 | 155 | // Pick the final point based on the unit vector along goal direction 156 | cv::Point p; 157 | int n_ = goal_direction/abs(goal_direction); 158 | int n1 = min_p1.y/abs(min_p1.y); 159 | if (n_*n1 > 0) 160 | { 161 | p = min_p1; 162 | } 163 | else 164 | { 165 | p = min_p2; 166 | } 167 | 168 | // Apply Heaviside step function 169 | p.x = std::max(0, p.x); 170 | cv::Point global_optic_goal; 171 | global_optic_goal.x = l/2 - p.y; 172 | global_optic_goal.y = b - p.x; 173 | return global_optic_goal; 174 | } 175 | 176 | // HOG - Pareto-optimal sub goal in the image 177 | cv::Point POVPlan::get_local_optic_goal(cv::Point global_optic_goal, cv::Point start){ 178 | // Robot Footprint 179 | int robot_footprint = 20; // unit = pixel 180 | cv::Point optic_goal; 181 | float min_distance = std::numeric_limits::max(); 182 | int _d = std::numeric_limits::max(); 183 | 184 | // Rewrite : By iterating over H_t 185 | for(int x = 1; x < image.cols - 1; ++x){ 186 | for(int y = 1; y < image.rows - 1; ++y){ 187 | if(image.at(y, x) > 0){ 188 | if (image.rows - y < _d) 189 | { 190 | _d = image.rows - y; 191 | } 192 | break; 193 | } 194 | } 195 | } 196 | obs_dist = _d; 197 | 198 | cv::Point p_star(1,1); 199 | float p_star_value = std::numeric_limits::max(); 200 | for(int x = 1; x < image.cols - 1; ++x){ 201 | for(int y = 1; y < image.rows - 1; ++y){ 202 | if(image.at(y, x) > 0 && 203 | image.at(y, x - robot_footprint/2) > 0 && 204 | image.at(y, x + robot_footprint/2) > 0){ 205 | cv::Point p(x, y); 206 | float f1 = objective1(p, global_optic_goal, start); 207 | float f2 = objective2(p, global_optic_goal); 208 | float optimal_value = w1*f1 + w2*f2; 209 | 210 | if(optimal_value < p_star_value){ 211 | p_star_value = optimal_value; 212 | p_star = p; 213 | } 214 | 215 | } 216 | } 217 | } 218 | return p_star; 219 | } 220 | 221 | /*! Generate Features - Find n-2D Beam optimized path elements and return next point to follow 222 | Note: Any path planning algorithm can be used if we can consider the robot's size during planning. 223 | */ 224 | cv::Point POVPlan::find_optic_path(int n, cv::Point global_optic_goal, cv::Point local_optic_goal){ 225 | // Size of image 226 | int rows = optic_path_image.rows; 227 | int cols = optic_path_image.cols; 228 | 229 | // Start point 230 | cv::Point start; 231 | start.x = cols/2; 232 | start.y = rows; 233 | 234 | // N 2D finite elements points 235 | std::vector pts_fe; 236 | std::vector l_path; 237 | std::vector r_path; 238 | 239 | if (VISUALIZATION) 240 | { 241 | // Show POG 242 | cv::circle(optic_path_image, cv::Point(global_optic_goal.x, global_optic_goal.y), 30, CV_RGB(0, 0, 255), -1); 243 | // Local optic goal 244 | cv::circle(optic_path_image, cv::Point(local_optic_goal.x, local_optic_goal.y), 10, CV_RGB(0, 255, 0), -1); 245 | // Connect with a line 246 | cv::line(optic_path_image, cv::Point(local_optic_goal.x, local_optic_goal.y), cv::Point(global_optic_goal.x, global_optic_goal.y), CV_RGB(255, 0, 0), 2); 247 | 248 | // Show start point 249 | cv::circle(optic_path_image, cv::Point(start.x, start.y), 50, CV_RGB(0, 255, 0), -1); 250 | // Initial Path : Connect with a line -> Start to optic goal 251 | //cv::line(optic_path_image, cv::Point(local_optic_goal.x, local_optic_goal.y), cv::Point(start.x, start.y), CV_RGB(255, 0, 0), 2, cv::LINE_8); 252 | 253 | } 254 | 255 | // Path Optimization 256 | // Parameters 257 | int gamma = 5; 258 | // optic behabour {Homogeneous factor} 259 | int alpha = 1, delta = 50; 260 | int m = 4.5; // default : 2.66; 261 | 262 | // int sigma = cols/2; // Maximum allowed movement of nodes 263 | int saftey = 10; // pixels 264 | // Push the goal 265 | pts_fe.push_back(cv::Point(local_optic_goal.x, local_optic_goal.y)); 266 | // std::cout << "Pushed : " << local_optic_goal << std::endl; 267 | // Initiate reference node 268 | cv::Point ref_node(local_optic_goal.x, local_optic_goal.y); 269 | int dy = alpha*gamma; 270 | // int sum_y = local_optic_goal.y -140 - start.y/2 + dy; 271 | // std::cout << local_optic_goal.y - start.y/2; 272 | int sum_y = dy; 273 | 274 | // Path Optimization 275 | for(int y = local_optic_goal.y; y < start.y - 10; y = y + dy){ 276 | // std::cout << "Inside for loop \n"; 277 | float theta = atan2(ref_node.x - start.x, ref_node.y - start.y); 278 | dy = alpha*gamma; 279 | sum_y = sum_y + dy; 280 | int dx = dy * tan(theta); 281 | int x = ref_node.x + dx; 282 | // Move the node within the boundary 283 | int i = 1; 284 | // for(int x_ = x; x_ > x - sigma && x_ < x + sigma; x_ = x + pow(-1, i) * i){ 285 | for(int x_ = x; x_ > 0 && x_ < image.cols; x_ = x + pow(-1, i) * i){ 286 | // Limit left x and right x 287 | int left_x_ = (x_- m*sum_y < 0) ? 0 : x_- m*sum_y; 288 | int right_x_ = (x_+ m*sum_y >= cols) ? cols - 1 : x_+ m*sum_y; 289 | cv::Point l(left_x_, y); 290 | cv::Point c(x_, y); 291 | cv::Point r(right_x_, y); 292 | 293 | // Check left and right 294 | int offset = 0; //370; 295 | if(image.at(y, left_x_) > 0 && 296 | image.at(y, x_) > 0 && 297 | image.at(y, right_x_) > 0){ 298 | // Add to path 299 | // l.x = c.x - (m*sum_y + offset) + saftey; 300 | l_path.push_back(l); 301 | // Push center 302 | pts_fe.push_back(c); 303 | // Right 304 | // r.x = c.x + (m*sum_y + offset) - saftey; 305 | r_path.push_back(r); 306 | // Update reference node 307 | ref_node = c; 308 | if (VISUALIZATION) 309 | { 310 | //cv::circle(optic_path_image, ref_node, 17, CV_RGB(0, 0, 0), -1); 311 | } 312 | break; 313 | } 314 | else{ 315 | i++; 316 | } 317 | } 318 | } 319 | pts_fe.push_back(cv::Point(start.x, start.y)); 320 | 321 | // Add to path : left and right features 322 | cv::Point l_f, r_f; 323 | l_f.x = start.x - m*(rows+10-local_optic_goal.y) + saftey; 324 | l_f.y = rows; 325 | l_path.push_back(l_f); 326 | 327 | // Right 328 | r_f.x = start.x + m*(rows+10-local_optic_goal.y) - saftey; 329 | r_f.y = rows; 330 | r_path.push_back(r_f); 331 | 332 | 333 | if (VISUALIZATION) 334 | { 335 | // draws the curve using polylines and line width (GREEN) 336 | int lineWidth = 10; 337 | //cv::polylines(optic_path_image, l_path, false, cv::Scalar(255, 0, 0), lineWidth/2); 338 | cv::polylines(optic_path_image, pts_fe, false, cv::Scalar(0, 255, 0), lineWidth); 339 | //cv::polylines(optic_path_image, r_path, false, cv::Scalar(255, 0, 0), lineWidth/2); 340 | } 341 | // Set optic_path 342 | set_path(pts_fe); 343 | set_lpath(l_path); 344 | set_rpath(r_path); 345 | 346 | // Find the first node to guide the robot towards goal 347 | cv::Point guider_node; 348 | 349 | //pts_fe.pop_back(); // Start node 350 | //std::cout << "After 1st pop : " << *(pts_fe.rbegin()) << std::endl; 351 | pts_fe.pop_back(); // Start node 352 | guider_node = *(pts_fe.rbegin()); 353 | lp_node = *(l_path.rbegin()); 354 | rp_node = *(r_path.rbegin()); 355 | // std::cout << "Guider Node : " << guider_node << std::endl; 356 | if (VISUALIZATION) 357 | { 358 | cv::circle(optic_path_image, guider_node, 5, CV_RGB(255, 0, 0), -1); 359 | cv::line(optic_path_image, start, guider_node, CV_RGB(255, 0, 0), 5); 360 | } 361 | return guider_node; 362 | } 363 | --------------------------------------------------------------------------------