├── .circleci
└── config.yml
├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ └── bug_report.md
├── codeql
│ └── codeql-config.yml
├── dependabot.yml
├── pull_request_template.md
└── workflows
│ ├── Linux_CI.yml
│ ├── MacOS_CI.yml
│ ├── Windows_CI.yml
│ ├── circleci-artifacts-redirector.yml
│ ├── codeql.yml
│ └── gh-pages.yml
├── .gitignore
├── AerialNavigation
├── __init__.py
├── drone_3d_trajectory_following
│ ├── Quadrotor.py
│ ├── TrajectoryGenerator.py
│ ├── __init__.py
│ └── drone_3d_trajectory_following.py
└── rocket_powered_landing
│ └── rocket_powered_landing.py
├── ArmNavigation
├── __init__.py
├── arm_obstacle_navigation
│ ├── arm_obstacle_navigation.py
│ └── arm_obstacle_navigation_2.py
├── n_joint_arm_3d
│ ├── NLinkArm3d.py
│ ├── __init__.py
│ ├── random_forward_kinematics.py
│ └── random_inverse_kinematics.py
├── n_joint_arm_to_point_control
│ ├── NLinkArm.py
│ ├── __init__.py
│ └── n_joint_arm_to_point_control.py
├── rrt_star_seven_joint_arm_control
│ └── rrt_star_seven_joint_arm_control.py
└── two_joint_arm_to_point_control
│ └── two_joint_arm_to_point_control.py
├── Bipedal
├── __init__.py
└── bipedal_planner
│ ├── __init__.py
│ └── bipedal_planner.py
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── InvertedPendulum
├── inverted_pendulum_lqr_control.py
└── inverted_pendulum_mpc_control.py
├── LICENSE
├── Localization
├── __init__.py
├── cubature_kalman_filter
│ └── cubature_kalman_filter.py
├── ensemble_kalman_filter
│ └── ensemble_kalman_filter.py
├── extended_kalman_filter
│ ├── ekf_with_velocity_correction.py
│ └── extended_kalman_filter.py
├── histogram_filter
│ └── histogram_filter.py
├── particle_filter
│ └── particle_filter.py
└── unscented_kalman_filter
│ └── unscented_kalman_filter.py
├── Mapping
├── DistanceMap
│ └── distance_map.py
├── __init__.py
├── circle_fitting
│ └── circle_fitting.py
├── gaussian_grid_map
│ └── gaussian_grid_map.py
├── grid_map_lib
│ ├── __init__.py
│ └── grid_map_lib.py
├── kmeans_clustering
│ └── kmeans_clustering.py
├── lidar_to_grid_map
│ ├── lidar01.csv
│ └── lidar_to_grid_map.py
├── ndt_map
│ └── ndt_map.py
├── normal_vector_estimation
│ └── normal_vector_estimation.py
├── point_cloud_sampling
│ └── point_cloud_sampling.py
├── ray_casting_grid_map
│ └── ray_casting_grid_map.py
└── rectangle_fitting
│ ├── __init_.py
│ ├── rectangle_fitting.py
│ └── simulator.py
├── MissionPlanning
├── BehaviorTree
│ ├── behavior_tree.py
│ ├── robot_behavior_case.py
│ └── robot_behavior_tree.xml
└── StateMachine
│ ├── robot_behavior_case.py
│ └── state_machine.py
├── PathPlanning
├── AStar
│ ├── a_star.py
│ ├── a_star_searching_from_two_side.py
│ └── a_star_variants.py
├── BSplinePath
│ └── bspline_path.py
├── BatchInformedRRTStar
│ └── batch_informed_rrt_star.py
├── BezierPath
│ └── bezier_path.py
├── BidirectionalAStar
│ └── bidirectional_a_star.py
├── BidirectionalBreadthFirstSearch
│ └── bidirectional_breadth_first_search.py
├── BreadthFirstSearch
│ └── breadth_first_search.py
├── BugPlanning
│ └── bug.py
├── Catmull_RomSplinePath
│ ├── blending_functions.py
│ └── catmull_rom_spline_path.py
├── ClosedLoopRRTStar
│ ├── closed_loop_rrt_star_car.py
│ ├── pure_pursuit.py
│ └── unicycle_model.py
├── ClothoidPath
│ └── clothoid_path_planner.py
├── CubicSpline
│ ├── __init__.py
│ ├── cubic_spline_planner.py
│ └── spline_continuity.py
├── DStar
│ └── dstar.py
├── DStarLite
│ └── d_star_lite.py
├── DepthFirstSearch
│ └── depth_first_search.py
├── Dijkstra
│ └── dijkstra.py
├── DubinsPath
│ ├── __init__.py
│ └── dubins_path_planner.py
├── DynamicMovementPrimitives
│ └── dynamic_movement_primitives.py
├── DynamicWindowApproach
│ └── dynamic_window_approach.py
├── ElasticBands
│ ├── elastic_bands.py
│ ├── obstacles.npy
│ └── path.npy
├── Eta3SplinePath
│ └── eta3_spline_path.py
├── Eta3SplineTrajectory
│ └── eta3_spline_trajectory.py
├── FlowField
│ └── flowfield.py
├── FrenetOptimalTrajectory
│ ├── cartesian_frenet_converter.py
│ └── frenet_optimal_trajectory.py
├── GreedyBestFirstSearch
│ └── greedy_best_first_search.py
├── GridBasedSweepCPP
│ └── grid_based_sweep_coverage_path_planner.py
├── HybridAStar
│ ├── __init__.py
│ ├── car.py
│ ├── dynamic_programming_heuristic.py
│ └── hybrid_a_star.py
├── InformedRRTStar
│ └── informed_rrt_star.py
├── LQRPlanner
│ └── lqr_planner.py
├── LQRRRTStar
│ └── lqr_rrt_star.py
├── ModelPredictiveTrajectoryGenerator
│ ├── __init__.py
│ ├── lookup_table_generator.py
│ ├── motion_model.py
│ └── trajectory_generator.py
├── PotentialFieldPlanning
│ └── potential_field_planning.py
├── ProbabilisticRoadMap
│ └── probabilistic_road_map.py
├── QuinticPolynomialsPlanner
│ └── quintic_polynomials_planner.py
├── RRT
│ ├── __init__.py
│ ├── rrt.py
│ ├── rrt_with_pathsmoothing.py
│ ├── rrt_with_sobol_sampler.py
│ └── sobol
│ │ ├── __init__.py
│ │ └── sobol.py
├── RRTDubins
│ ├── __init__.py
│ └── rrt_dubins.py
├── RRTStar
│ ├── __init__.py
│ └── rrt_star.py
├── RRTStarDubins
│ └── rrt_star_dubins.py
├── RRTStarReedsShepp
│ └── rrt_star_reeds_shepp.py
├── ReedsSheppPath
│ └── reeds_shepp_path_planning.py
├── SpiralSpanningTreeCPP
│ ├── map
│ │ ├── test.png
│ │ ├── test_2.png
│ │ └── test_3.png
│ └── spiral_spanning_tree_coverage_path_planner.py
├── StateLatticePlanner
│ ├── __init__.py
│ ├── lookup_table.csv
│ └── state_lattice_planner.py
├── TimeBasedPathPlanning
│ ├── GridWithDynamicObstacles.py
│ ├── SafeInterval.py
│ ├── SpaceTimeAStar.py
│ └── __init__.py
├── VisibilityRoadMap
│ ├── __init__.py
│ ├── geometry.py
│ └── visibility_road_map.py
├── VoronoiRoadMap
│ ├── __init__.py
│ ├── dijkstra_search.py
│ └── voronoi_road_map.py
├── WavefrontCPP
│ ├── map
│ │ ├── test.png
│ │ ├── test_2.png
│ │ └── test_3.png
│ └── wavefront_coverage_path_planner.py
└── __init__.py
├── PathTracking
├── __init__.py
├── cgmres_nmpc
│ └── cgmres_nmpc.py
├── lqr_speed_steer_control
│ └── lqr_speed_steer_control.py
├── lqr_steer_control
│ ├── __init__.py
│ └── lqr_steer_control.py
├── model_predictive_speed_and_steer_control
│ └── model_predictive_speed_and_steer_control.py
├── move_to_pose
│ ├── __init__.py
│ ├── move_to_pose.py
│ └── move_to_pose_robot.py
├── pure_pursuit
│ └── pure_pursuit.py
├── rear_wheel_feedback_control
│ └── rear_wheel_feedback_control.py
└── stanley_control
│ └── stanley_control.py
├── README.md
├── SECURITY.md
├── SLAM
├── EKFSLAM
│ └── ekf_slam.py
├── FastSLAM1
│ └── fast_slam1.py
├── FastSLAM2
│ └── fast_slam2.py
├── GraphBasedSLAM
│ ├── data
│ │ ├── README.rst
│ │ └── input_INTEL.g2o
│ ├── graph_based_slam.py
│ └── graphslam
│ │ ├── __init__.py
│ │ ├── edge
│ │ ├── __init__.py
│ │ └── edge_odometry.py
│ │ ├── graph.py
│ │ ├── load.py
│ │ ├── pose
│ │ ├── __init__.py
│ │ └── se2.py
│ │ ├── util.py
│ │ └── vertex.py
├── ICPMatching
│ └── icp_matching.py
└── __init__.py
├── __init__.py
├── _config.yml
├── appveyor.yml
├── docs
├── Makefile
├── README.md
├── _static
│ ├── .gitkeep
│ ├── custom.css
│ └── img
│ │ ├── doc_ci.png
│ │ ├── source_link_1.png
│ │ └── source_link_2.png
├── _templates
│ └── layout.html
├── conf.py
├── doc_requirements.txt
├── index_main.rst
├── make.bat
└── modules
│ ├── 0_getting_started
│ ├── 0_getting_started_main.rst
│ ├── 1_what_is_python_robotics_main.rst
│ ├── 2_how_to_run_sample_codes_main.rst
│ ├── 3_how_to_contribute_main.rst
│ └── 4_how_to_read_textbook_main.rst
│ ├── 10_inverted_pendulum
│ ├── inverted-pendulum.png
│ └── inverted_pendulum_main.rst
│ ├── 11_utils
│ ├── plot
│ │ ├── curvature_plot.png
│ │ └── plot_main.rst
│ └── utils_main.rst
│ ├── 12_appendix
│ ├── Kalmanfilter_basics_2_files
│ │ └── Kalmanfilter_basics_2_5_0.png
│ ├── Kalmanfilter_basics_2_main.rst
│ ├── Kalmanfilter_basics_files
│ │ ├── Kalmanfilter_basics_14_1.png
│ │ ├── Kalmanfilter_basics_16_0.png
│ │ ├── Kalmanfilter_basics_19_1.png
│ │ ├── Kalmanfilter_basics_21_1.png
│ │ ├── Kalmanfilter_basics_22_0.png
│ │ └── Kalmanfilter_basics_28_1.png
│ ├── Kalmanfilter_basics_main.rst
│ ├── appendix_main.rst
│ ├── external_sensors_main.rst
│ ├── internal_sensors_main.rst
│ ├── steering_motion_model
│ │ ├── steering_model.png
│ │ ├── turning_radius_calc1.png
│ │ └── turning_radius_calc2.png
│ └── steering_motion_model_main.rst
│ ├── 13_mission_planning
│ ├── behavior_tree
│ │ ├── behavior_tree_main.rst
│ │ └── robot_behavior_case.svg
│ ├── mission_planning_main.rst
│ └── state_machine
│ │ ├── robot_behavior_case.png
│ │ └── state_machine_main.rst
│ ├── 1_introduction
│ ├── 1_definition_of_robotics
│ │ └── definition_of_robotics_main.rst
│ ├── 2_python_for_robotics
│ │ └── python_for_robotics_main.rst
│ ├── 3_technologies_for_robotics
│ │ └── technologies_for_robotics_main.rst
│ └── introduction_main.rst
│ ├── 2_localization
│ ├── ensamble_kalman_filter_localization_files
│ │ └── ensamble_kalman_filter_localization_main.rst
│ ├── extended_kalman_filter_localization_files
│ │ ├── ekf_with_velocity_correction_1_0.png
│ │ ├── extended_kalman_filter_localization_1_0.png
│ │ └── extended_kalman_filter_localization_main.rst
│ ├── histogram_filter_localization
│ │ ├── 1.png
│ │ ├── 2.png
│ │ ├── 3.png
│ │ ├── 4.png
│ │ └── histogram_filter_localization_main.rst
│ ├── localization_main.rst
│ ├── particle_filter_localization
│ │ └── particle_filter_localization_main.rst
│ └── unscented_kalman_filter_localization
│ │ └── unscented_kalman_filter_localization_main.rst
│ ├── 3_mapping
│ ├── circle_fitting
│ │ └── circle_fitting_main.rst
│ ├── distance_map
│ │ ├── distance_map.png
│ │ └── distance_map_main.rst
│ ├── gaussian_grid_map
│ │ └── gaussian_grid_map_main.rst
│ ├── k_means_object_clustering
│ │ └── k_means_object_clustering_main.rst
│ ├── lidar_to_grid_map_tutorial
│ │ ├── grid_map_example.png
│ │ ├── lidar_to_grid_map_tutorial_12_0.png
│ │ ├── lidar_to_grid_map_tutorial_14_1.png
│ │ ├── lidar_to_grid_map_tutorial_5_0.png
│ │ ├── lidar_to_grid_map_tutorial_7_0.png
│ │ ├── lidar_to_grid_map_tutorial_8_0.png
│ │ └── lidar_to_grid_map_tutorial_main.rst
│ ├── mapping_main.rst
│ ├── ndt_map
│ │ ├── grid_clustering.png
│ │ ├── ndt_map1.png
│ │ ├── ndt_map2.png
│ │ ├── ndt_map_main.rst
│ │ └── raw_observations.png
│ ├── normal_vector_estimation
│ │ ├── normal_vector_calc.png
│ │ ├── normal_vector_estimation_main.rst
│ │ └── ransac_normal_vector_estimation.png
│ ├── point_cloud_sampling
│ │ ├── farthest_point_sampling.png
│ │ ├── point_cloud_sampling_main.rst
│ │ ├── poisson_disk_sampling.png
│ │ └── voxel_point_sampling.png
│ ├── ray_casting_grid_map
│ │ └── ray_casting_grid_map_main.rst
│ └── rectangle_fitting
│ │ └── rectangle_fitting_main.rst
│ ├── 4_slam
│ ├── FastSLAM1
│ │ ├── FastSLAM1_12_0.png
│ │ ├── FastSLAM1_12_1.png
│ │ ├── FastSLAM1_1_0.png
│ │ └── FastSLAM1_main.rst
│ ├── FastSLAM2
│ │ └── FastSLAM2_main.rst
│ ├── ekf_slam
│ │ ├── ekf_slam_1_0.png
│ │ └── ekf_slam_main.rst
│ ├── graph_slam
│ │ ├── graphSLAM_SE2_example.rst
│ │ ├── graphSLAM_SE2_example_files
│ │ │ ├── graphSLAM_SE2_example_13_0.png
│ │ │ ├── graphSLAM_SE2_example_15_0.png
│ │ │ ├── graphSLAM_SE2_example_16_0.png
│ │ │ ├── graphSLAM_SE2_example_4_0.png
│ │ │ ├── graphSLAM_SE2_example_8_0.png
│ │ │ └── graphSLAM_SE2_example_9_0.png
│ │ ├── graphSLAM_doc.rst
│ │ ├── graphSLAM_doc_files
│ │ │ ├── graphSLAM_doc_11_1.png
│ │ │ ├── graphSLAM_doc_11_2.png
│ │ │ ├── graphSLAM_doc_2_0.png
│ │ │ ├── graphSLAM_doc_2_2.png
│ │ │ ├── graphSLAM_doc_4_0.png
│ │ │ └── graphSLAM_doc_9_1.png
│ │ ├── graphSLAM_formulation.rst
│ │ └── graph_slam_main.rst
│ ├── iterative_closest_point_matching
│ │ └── iterative_closest_point_matching_main.rst
│ └── slam_main.rst
│ ├── 5_path_planning
│ ├── bezier_path
│ │ ├── Figure_1.png
│ │ ├── Figure_2.png
│ │ └── bezier_path_main.rst
│ ├── bspline_path
│ │ ├── approx_and_curvature.png
│ │ ├── approximation1.png
│ │ ├── basis_functions.png
│ │ ├── bspline_path_main.rst
│ │ ├── bspline_path_planning.png
│ │ ├── interp_and_curvature.png
│ │ └── interpolation1.png
│ ├── bugplanner
│ │ └── bugplanner_main.rst
│ ├── catmull_rom_spline
│ │ ├── blending_functions.png
│ │ ├── catmull_rom_path_planning.png
│ │ └── catmull_rom_spline_main.rst
│ ├── clothoid_path
│ │ └── clothoid_path_main.rst
│ ├── coverage_path
│ │ └── coverage_path_main.rst
│ ├── cubic_spline
│ │ ├── cubic_spline_1d.png
│ │ ├── cubic_spline_2d_curvature.png
│ │ ├── cubic_spline_2d_path.png
│ │ ├── cubic_spline_2d_yaw.png
│ │ ├── cubic_spline_main.rst
│ │ ├── spline.png
│ │ └── spline_continuity.png
│ ├── dubins_path
│ │ ├── RLR.jpg
│ │ ├── RSR.jpg
│ │ ├── dubins_path.jpg
│ │ └── dubins_path_main.rst
│ ├── dynamic_window_approach
│ │ └── dynamic_window_approach_main.rst
│ ├── elastic_bands
│ │ └── elastic_bands_main.rst
│ ├── eta3_spline
│ │ └── eta3_spline_main.rst
│ ├── frenet_frame_path
│ │ └── frenet_frame_path_main.rst
│ ├── grid_base_search
│ │ └── grid_base_search_main.rst
│ ├── hybridastar
│ │ └── hybridastar_main.rst
│ ├── lqr_path
│ │ └── lqr_path_main.rst
│ ├── model_predictive_trajectory_generator
│ │ ├── lookup_table.png
│ │ └── model_predictive_trajectory_generator_main.rst
│ ├── path_planning_main.rst
│ ├── prm_planner
│ │ └── prm_planner_main.rst
│ ├── quintic_polynomials_planner
│ │ └── quintic_polynomials_planner_main.rst
│ ├── reeds_shepp_path
│ │ ├── LR_L.png
│ │ ├── LR_LR.png
│ │ ├── LSL.png
│ │ ├── LSL90xR.png
│ │ ├── LSR.png
│ │ ├── LSR90_L.png
│ │ ├── L_R90SL.png
│ │ ├── L_R90SL90_R.png
│ │ ├── L_R90SR.png
│ │ ├── L_RL.png
│ │ ├── L_RL_R.png
│ │ ├── L_R_L.png
│ │ └── reeds_shepp_path_main.rst
│ ├── rrt
│ │ ├── closed_loop_rrt_star_car
│ │ │ ├── Figure_1.png
│ │ │ ├── Figure_3.png
│ │ │ ├── Figure_4.png
│ │ │ └── Figure_5.png
│ │ ├── figure_1.png
│ │ ├── rrt_main.rst
│ │ ├── rrt_star.rst
│ │ ├── rrt_star
│ │ │ └── rrt_star_1_0.png
│ │ └── rrt_star_reeds_shepp
│ │ │ └── figure_1.png
│ ├── state_lattice_planner
│ │ ├── Figure_1.png
│ │ ├── Figure_2.png
│ │ ├── Figure_3.png
│ │ ├── Figure_4.png
│ │ ├── Figure_5.png
│ │ ├── Figure_6.png
│ │ └── state_lattice_planner_main.rst
│ ├── time_based_grid_search
│ │ └── time_based_grid_search_main.rst
│ ├── visibility_road_map_planner
│ │ ├── step0.png
│ │ ├── step1.png
│ │ ├── step2.png
│ │ ├── step3.png
│ │ └── visibility_road_map_planner_main.rst
│ └── vrm_planner
│ │ └── vrm_planner_main.rst
│ ├── 6_path_tracking
│ ├── cgmres_nmpc
│ │ ├── cgmres_nmpc_1_0.png
│ │ ├── cgmres_nmpc_2_0.png
│ │ ├── cgmres_nmpc_3_0.png
│ │ ├── cgmres_nmpc_4_0.png
│ │ └── cgmres_nmpc_main.rst
│ ├── lqr_speed_and_steering_control
│ │ ├── lqr_speed_and_steering_control_main.rst
│ │ ├── lqr_steering_control_model.jpg
│ │ ├── speed.png
│ │ ├── x-y.png
│ │ └── yaw.png
│ ├── lqr_steering_control
│ │ ├── lqr_steering_control_main.rst
│ │ └── lqr_steering_control_model.jpg
│ ├── model_predictive_speed_and_steering_control
│ │ └── model_predictive_speed_and_steering_control_main.rst
│ ├── move_to_a_pose
│ │ └── move_to_a_pose_main.rst
│ ├── path_tracking_main.rst
│ ├── pure_pursuit_tracking
│ │ └── pure_pursuit_tracking_main.rst
│ ├── rear_wheel_feedback_control
│ │ └── rear_wheel_feedback_control_main.rst
│ └── stanley_control
│ │ └── stanley_control_main.rst
│ ├── 7_arm_navigation
│ ├── Planar_Two_Link_IK_files
│ │ ├── Planar_Two_Link_IK_12_0.png
│ │ ├── Planar_Two_Link_IK_5_0.png
│ │ ├── Planar_Two_Link_IK_7_0.png
│ │ └── Planar_Two_Link_IK_9_0.png
│ ├── arm_navigation_main.rst
│ ├── n_joint_arm_to_point_control_main.rst
│ ├── obstacle_avoidance_arm_navigation_main.rst
│ └── planar_two_link_ik_main.rst
│ ├── 8_aerial_navigation
│ ├── aerial_navigation_main.rst
│ ├── drone_3d_trajectory_following
│ │ └── drone_3d_trajectory_following_main.rst
│ └── rocket_powered_landing
│ │ └── rocket_powered_landing_main.rst
│ └── 9_bipedal
│ ├── bipedal_main.rst
│ └── bipedal_planner
│ └── bipedal_planner_main.rst
├── icon.png
├── mypy.ini
├── requirements
├── environment.yml
└── requirements.txt
├── ruff.toml
├── runtests.sh
├── tests
├── __init__.py
├── conftest.py
├── test_LQR_planner.py
├── test_a_star.py
├── test_a_star_searching_two_side.py
├── test_a_star_variants.py
├── test_batch_informed_rrt_star.py
├── test_behavior_tree.py
├── test_bezier_path.py
├── test_bipedal_planner.py
├── test_breadth_first_search.py
├── test_bspline_path.py
├── test_bug.py
├── test_catmull_rom_spline.py
├── test_cgmres_nmpc.py
├── test_circle_fitting.py
├── test_closed_loop_rrt_star_car.py
├── test_clothoidal_paths.py
├── test_codestyle.py
├── test_cubature_kalman_filter.py
├── test_d_star_lite.py
├── test_depth_first_search.py
├── test_dijkstra.py
├── test_distance_map.py
├── test_drone_3d_trajectory_following.py
├── test_dstar.py
├── test_dubins_path_planning.py
├── test_dynamic_movement_primitives.py
├── test_dynamic_window_approach.py
├── test_ekf_slam.py
├── test_elastic_bands.py
├── test_eta3_spline_path.py
├── test_extended_kalman_filter.py
├── test_fast_slam1.py
├── test_fast_slam2.py
├── test_flow_field.py
├── test_frenet_optimal_trajectory.py
├── test_gaussian_grid_map.py
├── test_graph_based_slam.py
├── test_greedy_best_first_search.py
├── test_grid_based_sweep_coverage_path_planner.py
├── test_grid_map_lib.py
├── test_histogram_filter.py
├── test_hybrid_a_star.py
├── test_informed_rrt_star.py
├── test_inverted_pendulum_lqr_control.py
├── test_inverted_pendulum_mpc_control.py
├── test_iterative_closest_point.py
├── test_kmeans_clustering.py
├── test_lqr_rrt_star.py
├── test_lqr_speed_steer_control.py
├── test_lqr_steer_control.py
├── test_model_predictive_speed_and_steer_control.py
├── test_move_to_pose.py
├── test_move_to_pose_robot.py
├── test_mypy_type_check.py
├── test_n_joint_arm_to_point_control.py
├── test_normal_vector_estimation.py
├── test_particle_filter.py
├── test_point_cloud_sampling.py
├── test_potential_field_planning.py
├── test_probabilistic_road_map.py
├── test_pure_pursuit.py
├── test_quintic_polynomials_planner.py
├── test_ray_casting_grid_map.py
├── test_rear_wheel_feedback.py
├── test_rectangle_fitting.py
├── test_reeds_shepp_path_planning.py
├── test_rocket_powered_landing.py
├── test_rrt.py
├── test_rrt_dubins.py
├── test_rrt_star.py
├── test_rrt_star_dubins.py
├── test_rrt_star_reeds_shepp.py
├── test_rrt_star_seven_joint_arm.py
├── test_rrt_with_sobol_sampler.py
├── test_safe_interval_path_planner.py
├── test_space_time_astar.py
├── test_spiral_spanning_tree_coverage_path_planner.py
├── test_stanley_controller.py
├── test_state_lattice_planner.py
├── test_state_machine.py
├── test_two_joint_arm_to_point_control.py
├── test_unscented_kalman_filter.py
├── test_utils.py
├── test_visibility_road_map_planner.py
├── test_voronoi_road_map_planner.py
└── test_wavefront_coverage_path_planner.py
├── users_comments.md
└── utils
├── __init__.py
├── angle.py
└── plot.py
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.1
2 |
3 | orbs:
4 | python: circleci/python@1.4.0
5 |
6 | jobs:
7 | build_doc:
8 | docker:
9 | - image: cimg/python:3.13
10 | steps:
11 | - checkout
12 | - run:
13 | name: doc_build
14 | command: |
15 | python --version
16 | python -m venv venv
17 | . venv/bin/activate
18 | pip install -r requirements/requirements.txt
19 | pip install -r docs/doc_requirements.txt
20 | cd docs;make html
21 | - store_artifacts:
22 | path: docs/_build/html/
23 | destination: html
24 |
25 | workflows:
26 | main:
27 | jobs:
28 | - build_doc
29 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 | github: AtsushiSakai
3 | patreon: myenigma
4 | custom: https://www.paypal.com/paypalme/myenigmapay/
5 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **Expected behavior**
14 | A clear and concise description of what you expected to happen.
15 |
16 | **Screenshots**
17 | If applicable, add screenshots to help explain your problem.
18 |
19 | **Desktop (please complete the following information):**
20 | - Python version (This repo only supports Python 3.9.x or higher).
21 | - Each library version
22 | - OS version
23 |
--------------------------------------------------------------------------------
/.github/codeql/codeql-config.yml:
--------------------------------------------------------------------------------
1 | name: "Extended CodeQL Config"
2 |
3 | # This file adds additional queries to the default configuration to make it equivalent to what LGTM checks.
4 | queries:
5 | - name: Security and quality queries
6 | uses: security-and-quality
7 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: pip
4 | directory: "/requirements"
5 | schedule:
6 | interval: weekly
7 | time: "20:00"
8 | open-pull-requests-limit: 10
9 |
10 | - package-ecosystem: github-actions
11 | directory: "/"
12 | schedule:
13 | interval: weekly
14 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 |
11 |
12 | #### Reference issue
13 |
14 |
15 | #### What does this implement/fix?
16 |
17 |
18 | #### Additional information
19 |
20 |
21 | #### CheckList
22 | - [ ] Did you add an unittest for your new example or defect fix?
23 | - [ ] Did you add documents for your new example?
24 | - [ ] All CIs are green? (You can check it after submitting)
25 |
--------------------------------------------------------------------------------
/.github/workflows/Linux_CI.yml:
--------------------------------------------------------------------------------
1 | name: Linux_CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 |
9 | jobs:
10 | build:
11 |
12 | runs-on: ubuntu-latest
13 | strategy:
14 | matrix:
15 | python-version: [ '3.13' ]
16 |
17 | name: Python ${{ matrix.python-version }} CI
18 |
19 | steps:
20 | - uses: actions/checkout@v4
21 | - run: git fetch --prune --unshallow
22 |
23 | - name: Setup python
24 | uses: actions/setup-python@v5
25 | with:
26 | python-version: ${{ matrix.python-version }}
27 | - name: Install dependencies
28 | run: |
29 | python --version
30 | python -m pip install --upgrade pip
31 | python -m pip install -r requirements/requirements.txt
32 | - name: do all unit tests
33 | run: bash runtests.sh
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/.github/workflows/MacOS_CI.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: MacOS_CI
4 |
5 | # Controls when the action will run. Triggers the workflow on push or pull request
6 | # events but only for the master branch
7 | on:
8 | push:
9 | branches:
10 | - master
11 | pull_request:
12 |
13 |
14 | jobs:
15 | build:
16 | runs-on: macos-latest
17 | strategy:
18 | matrix:
19 | python-version: [ '3.13' ]
20 | name: Python ${{ matrix.python-version }} CI
21 | steps:
22 | - uses: actions/checkout@v4
23 | - run: git fetch --prune --unshallow
24 |
25 | - name: Update bash
26 | run: brew install bash
27 |
28 | - name: Setup python
29 | uses: actions/setup-python@v5
30 | with:
31 | python-version: ${{ matrix.python-version }}
32 |
33 | - name: Install dependencies
34 | run: |
35 | python --version
36 | python -m pip install --upgrade pip
37 | pip install -r requirements/requirements.txt
38 | - name: do all unit tests
39 | run: bash runtests.sh
40 |
--------------------------------------------------------------------------------
/.github/workflows/Windows_CI.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: Windows_CI
4 |
5 | # Controls when the action will run. Triggers the workflow on push or pull request
6 | # events but only for the master branch
7 | on:
8 | push:
9 | branches:
10 | - master
11 | pull_request:
12 |
13 |
14 | jobs:
15 | build:
16 | runs-on: windows-latest
17 | strategy:
18 | matrix:
19 | python-version: [ '3.13' ]
20 | name: Python ${{ matrix.python-version }} CI
21 | steps:
22 | - uses: actions/checkout@v4
23 | - run: git fetch --prune --unshallow
24 |
25 | - name: Setup python
26 | uses: actions/setup-python@v5
27 | with:
28 | python-version: ${{ matrix.python-version }}
29 |
30 | - name: Install dependencies
31 | run: |
32 | python --version
33 | python -m pip install --upgrade pip
34 | pip install -r requirements/requirements.txt
35 | - name: do all unit tests
36 | run: bash runtests.sh
37 |
--------------------------------------------------------------------------------
/.github/workflows/circleci-artifacts-redirector.yml:
--------------------------------------------------------------------------------
1 | name: circleci-artifacts-redirector-action
2 | on: [status]
3 | jobs:
4 | circleci_artifacts_redirector_job:
5 | runs-on: ubuntu-latest
6 | name: Run CircleCI artifacts redirector!!
7 | steps:
8 | - name: run-circleci-artifacts-redirector
9 | uses: larsoner/circleci-artifacts-redirector-action@v1.1.0
10 | with:
11 | repo-token: ${{ secrets.GITHUB_TOKEN }}
12 | api-token: ${{ secrets.CIRCLECI_TOKEN }}
13 | artifact-path: 0/html/index.html
14 | circleci-jobs: build_doc
15 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | name: "Code scanning - action"
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 | schedule:
9 | - cron: '0 19 * * 0'
10 |
11 | jobs:
12 | CodeQL-Build:
13 |
14 | # CodeQL runs on ubuntu-latest and windows-latest
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - name: Checkout repository
19 | uses: actions/checkout@v4
20 | with:
21 | # We must fetch at least the immediate parents so that if this is
22 | # a pull request then we can checkout the head.
23 | fetch-depth: 2
24 |
25 | # Initializes the CodeQL tools for scanning.
26 | - name: Initialize CodeQL
27 | uses: github/codeql-action/init@v3
28 | with:
29 | config-file: ./.github/codeql/codeql-config.yml
30 | # Override language selection by uncommenting this and choosing your languages
31 | # with:
32 | # languages: go, javascript, csharp, python, cpp, java
33 |
34 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
35 | # If this step fails, then you should remove it and run the build manually (see below)
36 | - name: Autobuild
37 | uses: github/codeql-action/autobuild@v3
38 |
39 | # ℹ️ Command-line programs to run using the OS shell.
40 | # 📚 https://git.io/JvXDl
41 |
42 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
43 | # and modify them (or add more) to build your code if your project
44 | # uses a compiled language
45 |
46 | #- run: |
47 | # make bootstrap
48 | # make release
49 |
50 | - name: Perform CodeQL Analysis
51 | uses: github/codeql-action/analyze@v3
52 |
--------------------------------------------------------------------------------
/.github/workflows/gh-pages.yml:
--------------------------------------------------------------------------------
1 | name: GitHub Pages site update
2 | on:
3 | push:
4 | branches:
5 | - master
6 | jobs:
7 | build:
8 | runs-on: ubuntu-latest
9 | environment:
10 | name: github-pages
11 | url: ${{ steps.deployment.outputs.page_url }}
12 | permissions:
13 | id-token: write
14 | pages: write
15 | steps:
16 | - name: Setup python
17 | uses: actions/setup-python@v5
18 | - name: Checkout
19 | uses: actions/checkout@master
20 | with:
21 | fetch-depth: 0 # otherwise, you will fail to push refs to dest repo
22 | - name: Install dependencies
23 | run: |
24 | python --version
25 | python -m pip install --upgrade pip
26 | python -m pip install -r requirements/requirements.txt
27 | - name: Build and Deploy
28 | uses: sphinx-notes/pages@v3
29 | with:
30 | requirements_path: ./docs/doc_requirements.txt
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.csv
2 | *.gif
3 | *.g2o
4 |
5 | # Byte-compiled / optimized / DLL files
6 | __pycache__/
7 | *.py[cod]
8 | *$py.class
9 | _build/
10 | .idea/
11 |
12 | # C extensions
13 | *.so
14 |
15 | # Distribution / packaging
16 | .Python
17 | env/
18 | build/
19 | develop-eggs/
20 | dist/
21 | downloads/
22 | eggs/
23 | .eggs/
24 | lib/
25 | lib64/
26 | parts/
27 | sdist/
28 | var/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 |
33 | .DS_Store
34 |
35 | # PyInstaller
36 | # Usually these files are written by a python script from a template
37 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
38 | *.manifest
39 | *.spec
40 |
41 | # Installer logs
42 | pip-log.txt
43 | pip-delete-this-directory.txt
44 |
45 | # Unit test / coverage reports
46 | htmlcov/
47 | .tox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *,cover
54 | .hypothesis/
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 |
63 | # Sphinx documentation
64 | docs/_build/
65 |
66 | # PyBuilder
67 | target/
68 |
69 | #Ipython Notebook
70 | .ipynb_checkpoints
71 |
72 | matplotrecorder/*
73 | .vscode/settings.json
74 |
--------------------------------------------------------------------------------
/AerialNavigation/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/AerialNavigation/__init__.py
--------------------------------------------------------------------------------
/AerialNavigation/drone_3d_trajectory_following/TrajectoryGenerator.py:
--------------------------------------------------------------------------------
1 | """
2 | Generates a quintic polynomial trajectory.
3 |
4 | Author: Daniel Ingram (daniel-s-ingram)
5 | """
6 |
7 | import numpy as np
8 |
9 | class TrajectoryGenerator():
10 | def __init__(self, start_pos, des_pos, T, start_vel=[0,0,0], des_vel=[0,0,0], start_acc=[0,0,0], des_acc=[0,0,0]):
11 | self.start_x = start_pos[0]
12 | self.start_y = start_pos[1]
13 | self.start_z = start_pos[2]
14 |
15 | self.des_x = des_pos[0]
16 | self.des_y = des_pos[1]
17 | self.des_z = des_pos[2]
18 |
19 | self.start_x_vel = start_vel[0]
20 | self.start_y_vel = start_vel[1]
21 | self.start_z_vel = start_vel[2]
22 |
23 | self.des_x_vel = des_vel[0]
24 | self.des_y_vel = des_vel[1]
25 | self.des_z_vel = des_vel[2]
26 |
27 | self.start_x_acc = start_acc[0]
28 | self.start_y_acc = start_acc[1]
29 | self.start_z_acc = start_acc[2]
30 |
31 | self.des_x_acc = des_acc[0]
32 | self.des_y_acc = des_acc[1]
33 | self.des_z_acc = des_acc[2]
34 |
35 | self.T = T
36 |
37 | def solve(self):
38 | A = np.array(
39 | [[0, 0, 0, 0, 0, 1],
40 | [self.T**5, self.T**4, self.T**3, self.T**2, self.T, 1],
41 | [0, 0, 0, 0, 1, 0],
42 | [5*self.T**4, 4*self.T**3, 3*self.T**2, 2*self.T, 1, 0],
43 | [0, 0, 0, 2, 0, 0],
44 | [20*self.T**3, 12*self.T**2, 6*self.T, 2, 0, 0]
45 | ])
46 |
47 | b_x = np.array(
48 | [[self.start_x],
49 | [self.des_x],
50 | [self.start_x_vel],
51 | [self.des_x_vel],
52 | [self.start_x_acc],
53 | [self.des_x_acc]
54 | ])
55 |
56 | b_y = np.array(
57 | [[self.start_y],
58 | [self.des_y],
59 | [self.start_y_vel],
60 | [self.des_y_vel],
61 | [self.start_y_acc],
62 | [self.des_y_acc]
63 | ])
64 |
65 | b_z = np.array(
66 | [[self.start_z],
67 | [self.des_z],
68 | [self.start_z_vel],
69 | [self.des_z_vel],
70 | [self.start_z_acc],
71 | [self.des_z_acc]
72 | ])
73 |
74 | self.x_c = np.linalg.solve(A, b_x)
75 | self.y_c = np.linalg.solve(A, b_y)
76 | self.z_c = np.linalg.solve(A, b_z)
--------------------------------------------------------------------------------
/AerialNavigation/drone_3d_trajectory_following/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import pathlib
3 | sys.path.append(str(pathlib.Path(__file__).parent))
4 |
--------------------------------------------------------------------------------
/ArmNavigation/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/ArmNavigation/__init__.py
--------------------------------------------------------------------------------
/ArmNavigation/n_joint_arm_3d/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import pathlib
3 | sys.path.append(str(pathlib.Path(__file__).parent))
4 |
--------------------------------------------------------------------------------
/ArmNavigation/n_joint_arm_3d/random_forward_kinematics.py:
--------------------------------------------------------------------------------
1 | """
2 | Forward Kinematics for an n-link arm in 3D
3 | Author: Takayuki Murooka (takayuki5168)
4 | """
5 | import math
6 | from NLinkArm3d import NLinkArm
7 | import random
8 |
9 |
10 | def random_val(min_val, max_val):
11 | return min_val + random.random() * (max_val - min_val)
12 |
13 |
14 | def main():
15 | print("Start solving Forward Kinematics 10 times")
16 | # init NLinkArm with Denavit-Hartenberg parameters of PR2
17 | n_link_arm = NLinkArm([[0., -math.pi / 2, .1, 0.],
18 | [math.pi / 2, math.pi / 2, 0., 0.],
19 | [0., -math.pi / 2, 0., .4],
20 | [0., math.pi / 2, 0., 0.],
21 | [0., -math.pi / 2, 0., .321],
22 | [0., math.pi / 2, 0., 0.],
23 | [0., 0., 0., 0.]])
24 | # execute FK 10 times
25 | for _ in range(10):
26 | n_link_arm.set_joint_angles(
27 | [random_val(-1, 1) for _ in range(len(n_link_arm.link_list))])
28 |
29 | n_link_arm.forward_kinematics(plot=True)
30 |
31 |
32 | if __name__ == "__main__":
33 | main()
34 |
--------------------------------------------------------------------------------
/ArmNavigation/n_joint_arm_3d/random_inverse_kinematics.py:
--------------------------------------------------------------------------------
1 | """
2 | Inverse Kinematics for an n-link arm in 3D
3 | Author: Takayuki Murooka (takayuki5168)
4 | """
5 | import math
6 | from NLinkArm3d import NLinkArm
7 | import random
8 |
9 |
10 | def random_val(min_val, max_val):
11 | return min_val + random.random() * (max_val - min_val)
12 |
13 |
14 | def main():
15 | print("Start solving Inverse Kinematics 10 times")
16 | # init NLinkArm with Denavit-Hartenberg parameters of PR2
17 | n_link_arm = NLinkArm([[0., -math.pi / 2, .1, 0.],
18 | [math.pi / 2, math.pi / 2, 0., 0.],
19 | [0., -math.pi / 2, 0., .4],
20 | [0., math.pi / 2, 0., 0.],
21 | [0., -math.pi / 2, 0., .321],
22 | [0., math.pi / 2, 0., 0.],
23 | [0., 0., 0., 0.]])
24 | # execute IK 10 times
25 | for _ in range(10):
26 | n_link_arm.inverse_kinematics([random_val(-0.5, 0.5),
27 | random_val(-0.5, 0.5),
28 | random_val(-0.5, 0.5),
29 | random_val(-0.5, 0.5),
30 | random_val(-0.5, 0.5),
31 | random_val(-0.5, 0.5)], plot=True)
32 |
33 |
34 | if __name__ == "__main__":
35 | main()
36 |
--------------------------------------------------------------------------------
/ArmNavigation/n_joint_arm_to_point_control/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/ArmNavigation/n_joint_arm_to_point_control/__init__.py
--------------------------------------------------------------------------------
/Bipedal/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/Bipedal/__init__.py
--------------------------------------------------------------------------------
/Bipedal/bipedal_planner/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/Bipedal/bipedal_planner/__init__.py
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | :+1::tada: First of all, thank you very much for taking the time to contribute! :tada::+1:
4 |
5 | Please check this document for contribution: [How to contribute — PythonRobotics documentation](https://atsushisakai.github.io/PythonRobotics/modules/0_getting_started/3_how_to_contribute.html)
6 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 - now Atsushi Sakai and other contributors:
4 | https://github.com/AtsushiSakai/PythonRobotics/contributors
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 |
--------------------------------------------------------------------------------
/Localization/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/Localization/__init__.py
--------------------------------------------------------------------------------
/Mapping/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/Mapping/__init__.py
--------------------------------------------------------------------------------
/Mapping/gaussian_grid_map/gaussian_grid_map.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | 2D gaussian grid map sample
4 |
5 | author: Atsushi Sakai (@Atsushi_twi)
6 |
7 | """
8 |
9 | import math
10 | import numpy as np
11 | import matplotlib.pyplot as plt
12 | from scipy.stats import norm
13 |
14 | EXTEND_AREA = 10.0 # [m] grid map extention length
15 |
16 | show_animation = True
17 |
18 |
19 | def generate_gaussian_grid_map(ox, oy, xyreso, std):
20 |
21 | minx, miny, maxx, maxy, xw, yw = calc_grid_map_config(ox, oy, xyreso)
22 |
23 | gmap = [[0.0 for i in range(yw)] for i in range(xw)]
24 |
25 | for ix in range(xw):
26 | for iy in range(yw):
27 |
28 | x = ix * xyreso + minx
29 | y = iy * xyreso + miny
30 |
31 | # Search minimum distance
32 | mindis = float("inf")
33 | for (iox, ioy) in zip(ox, oy):
34 | d = math.hypot(iox - x, ioy - y)
35 | if mindis >= d:
36 | mindis = d
37 |
38 | pdf = (1.0 - norm.cdf(mindis, 0.0, std))
39 | gmap[ix][iy] = pdf
40 |
41 | return gmap, minx, maxx, miny, maxy
42 |
43 |
44 | def calc_grid_map_config(ox, oy, xyreso):
45 | minx = round(min(ox) - EXTEND_AREA / 2.0)
46 | miny = round(min(oy) - EXTEND_AREA / 2.0)
47 | maxx = round(max(ox) + EXTEND_AREA / 2.0)
48 | maxy = round(max(oy) + EXTEND_AREA / 2.0)
49 | xw = int(round((maxx - minx) / xyreso))
50 | yw = int(round((maxy - miny) / xyreso))
51 |
52 | return minx, miny, maxx, maxy, xw, yw
53 |
54 |
55 | def draw_heatmap(data, minx, maxx, miny, maxy, xyreso):
56 | x, y = np.mgrid[slice(minx - xyreso / 2.0, maxx + xyreso / 2.0, xyreso),
57 | slice(miny - xyreso / 2.0, maxy + xyreso / 2.0, xyreso)]
58 | plt.pcolor(x, y, data, vmax=1.0, cmap=plt.cm.Blues)
59 | plt.axis("equal")
60 |
61 |
62 | def main():
63 | print(__file__ + " start!!")
64 |
65 | xyreso = 0.5 # xy grid resolution
66 | STD = 5.0 # standard diviation for gaussian distribution
67 |
68 | for i in range(5):
69 | ox = (np.random.rand(4) - 0.5) * 10.0
70 | oy = (np.random.rand(4) - 0.5) * 10.0
71 | gmap, minx, maxx, miny, maxy = generate_gaussian_grid_map(
72 | ox, oy, xyreso, STD)
73 |
74 | if show_animation: # pragma: no cover
75 | plt.cla()
76 | # for stopping simulation with the esc key.
77 | plt.gcf().canvas.mpl_connect('key_release_event',
78 | lambda event: [exit(0) if event.key == 'escape' else None])
79 | draw_heatmap(gmap, minx, maxx, miny, maxy, xyreso)
80 | plt.plot(ox, oy, "xr")
81 | plt.plot(0.0, 0.0, "ob")
82 | plt.pause(1.0)
83 |
84 |
85 | if __name__ == '__main__':
86 | main()
87 |
--------------------------------------------------------------------------------
/Mapping/grid_map_lib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/Mapping/grid_map_lib/__init__.py
--------------------------------------------------------------------------------
/Mapping/rectangle_fitting/__init_.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import pathlib
3 | sys.path.append(str(pathlib.Path(__file__).parent))
4 |
--------------------------------------------------------------------------------
/PathPlanning/Catmull_RomSplinePath/blending_functions.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 |
4 | def blending_function_1(t):
5 | return -t + 2*t**2 - t**3
6 |
7 | def blending_function_2(t):
8 | return 2 - 5*t**2 + 3*t**3
9 |
10 | def blending_function_3(t):
11 | return t + 4*t**2 - 3*t**3
12 |
13 | def blending_function_4(t):
14 | return -t**2 + t**3
15 |
16 | def plot_blending_functions():
17 | t = np.linspace(0, 1, 100)
18 |
19 | plt.plot(t, blending_function_1(t), label='b1')
20 | plt.plot(t, blending_function_2(t), label='b2')
21 | plt.plot(t, blending_function_3(t), label='b3')
22 | plt.plot(t, blending_function_4(t), label='b4')
23 |
24 | plt.title("Catmull-Rom Blending Functions")
25 | plt.xlabel("t")
26 | plt.ylabel("Value")
27 | plt.legend()
28 | plt.grid(True)
29 | plt.axhline(y=0, color='k', linestyle='--')
30 | plt.axvline(x=0, color='k', linestyle='--')
31 | plt.show()
32 |
33 | if __name__ == "__main__":
34 | plot_blending_functions()
--------------------------------------------------------------------------------
/PathPlanning/Catmull_RomSplinePath/catmull_rom_spline_path.py:
--------------------------------------------------------------------------------
1 | """
2 | Path Planner with Catmull-Rom Spline
3 | Author: Surabhi Gupta (@this_is_surabhi)
4 | Source: http://graphics.cs.cmu.edu/nsp/course/15-462/Fall04/assts/catmullRom.pdf
5 | """
6 |
7 | import sys
8 | import pathlib
9 | sys.path.append(str(pathlib.Path(__file__).parent.parent.parent))
10 |
11 | import numpy as np
12 | import matplotlib.pyplot as plt
13 |
14 | def catmull_rom_point(t, p0, p1, p2, p3):
15 | """
16 | Parameters
17 | ----------
18 | t : float
19 | Parameter value (0 <= t <= 1) (0 <= t <= 1)
20 | p0, p1, p2, p3 : np.ndarray
21 | Control points for the spline segment
22 |
23 | Returns
24 | -------
25 | np.ndarray
26 | Calculated point on the spline
27 | """
28 | return 0.5 * ((2 * p1) +
29 | (-p0 + p2) * t +
30 | (2 * p0 - 5 * p1 + 4 * p2 - p3) * t**2 +
31 | (-p0 + 3 * p1 - 3 * p2 + p3) * t**3)
32 |
33 |
34 | def catmull_rom_spline(control_points, num_points):
35 | """
36 | Parameters
37 | ----------
38 | control_points : list
39 | List of control points
40 | num_points : int
41 | Number of points to generate on the spline
42 |
43 | Returns
44 | -------
45 | tuple
46 | x and y coordinates of the spline points
47 | """
48 | t_vals = np.linspace(0, 1, num_points)
49 | spline_points = []
50 |
51 | control_points = np.array(control_points)
52 |
53 | for i in range(len(control_points) - 1):
54 | if i == 0:
55 | p1, p2, p3 = control_points[i:i+3]
56 | p0 = p1
57 | elif i == len(control_points) - 2:
58 | p0, p1, p2 = control_points[i-1:i+2]
59 | p3 = p2
60 | else:
61 | p0, p1, p2, p3 = control_points[i-1:i+3]
62 |
63 | for t in t_vals:
64 | point = catmull_rom_point(t, p0, p1, p2, p3)
65 | spline_points.append(point)
66 |
67 | return np.array(spline_points).T
68 |
69 |
70 | def main():
71 | print(__file__ + " start!!")
72 |
73 | way_points = [[-1.0, -2.0], [1.0, -1.0], [3.0, -2.0], [4.0, -1.0], [3.0, 1.0], [1.0, 2.0], [0.0, 2.0]]
74 | n_course_point = 100
75 | spline_x, spline_y = catmull_rom_spline(way_points, n_course_point)
76 |
77 | plt.plot(spline_x,spline_y, '-r', label="Catmull-Rom Spline Path")
78 | plt.plot(np.array(way_points).T[0], np.array(way_points).T[1], '-og', label="Way points")
79 | plt.title("Catmull-Rom Spline Path")
80 | plt.grid(True)
81 | plt.legend()
82 | plt.axis("equal")
83 | plt.show()
84 |
85 | if __name__ == '__main__':
86 | main()
--------------------------------------------------------------------------------
/PathPlanning/ClosedLoopRRTStar/unicycle_model.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | Unicycle model class
4 |
5 | author Atsushi Sakai
6 |
7 | """
8 |
9 | import math
10 | import numpy as np
11 | from utils.angle import angle_mod
12 |
13 | dt = 0.05 # [s]
14 | L = 0.9 # [m]
15 | steer_max = np.deg2rad(40.0)
16 | curvature_max = math.tan(steer_max) / L
17 | curvature_max = 1.0 / curvature_max + 1.0
18 |
19 | accel_max = 5.0
20 |
21 |
22 | class State:
23 |
24 | def __init__(self, x=0.0, y=0.0, yaw=0.0, v=0.0):
25 | self.x = x
26 | self.y = y
27 | self.yaw = yaw
28 | self.v = v
29 |
30 |
31 | def update(state, a, delta):
32 |
33 | state.x = state.x + state.v * math.cos(state.yaw) * dt
34 | state.y = state.y + state.v * math.sin(state.yaw) * dt
35 | state.yaw = state.yaw + state.v / L * math.tan(delta) * dt
36 | state.yaw = pi_2_pi(state.yaw)
37 | state.v = state.v + a * dt
38 |
39 | return state
40 |
41 |
42 | def pi_2_pi(angle):
43 | return angle_mod(angle)
44 |
45 |
46 | if __name__ == '__main__': # pragma: no cover
47 | print("start unicycle simulation")
48 | import matplotlib.pyplot as plt
49 |
50 | T = 100
51 | a = [1.0] * T
52 | delta = [np.deg2rad(1.0)] * T
53 | # print(delta)
54 | # print(a, delta)
55 |
56 | state = State()
57 |
58 | x = []
59 | y = []
60 | yaw = []
61 | v = []
62 |
63 | for (ai, di) in zip(a, delta):
64 | state = update(state, ai, di)
65 |
66 | x.append(state.x)
67 | y.append(state.y)
68 | yaw.append(state.yaw)
69 | v.append(state.v)
70 |
71 | plt.subplots(1)
72 | plt.plot(x, y)
73 | plt.axis("equal")
74 | plt.grid(True)
75 |
76 | plt.subplots(1)
77 | plt.plot(v)
78 | plt.grid(True)
79 |
80 | plt.show()
81 |
--------------------------------------------------------------------------------
/PathPlanning/CubicSpline/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/CubicSpline/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/CubicSpline/spline_continuity.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | from scipy import interpolate
5 |
6 |
7 | class Spline2D:
8 |
9 | def __init__(self, x, y, kind="cubic"):
10 | self.s = self.__calc_s(x, y)
11 | self.sx = interpolate.interp1d(self.s, x, kind=kind)
12 | self.sy = interpolate.interp1d(self.s, y, kind=kind)
13 |
14 | def __calc_s(self, x, y):
15 | self.ds = np.hypot(np.diff(x), np.diff(y))
16 | s = [0.0]
17 | s.extend(np.cumsum(self.ds))
18 | return s
19 |
20 | def calc_position(self, s):
21 | x = self.sx(s)
22 | y = self.sy(s)
23 | return x, y
24 |
25 |
26 | def main():
27 | x = [-2.5, 0.0, 2.5, 5.0, 7.5, 3.0, -1.0]
28 | y = [0.7, -6, -5, -3.5, 0.0, 5.0, -2.0]
29 | ds = 0.1 # [m] distance of each interpolated points
30 |
31 | plt.subplots(1)
32 | plt.plot(x, y, "xb", label="Data points")
33 |
34 | for (kind, label) in [("linear", "C0 (Linear spline)"),
35 | ("quadratic", "C0 & C1 (Quadratic spline)"),
36 | ("cubic", "C0 & C1 & C2 (Cubic spline)")]:
37 | rx, ry = [], []
38 | sp = Spline2D(x, y, kind=kind)
39 | s = np.arange(0, sp.s[-1], ds)
40 | for i_s in s:
41 | ix, iy = sp.calc_position(i_s)
42 | rx.append(ix)
43 | ry.append(iy)
44 | plt.plot(rx, ry, "-", label=label)
45 |
46 | plt.grid(True)
47 | plt.axis("equal")
48 | plt.xlabel("x[m]")
49 | plt.ylabel("y[m]")
50 | plt.legend()
51 | plt.show()
52 |
53 |
54 | if __name__ == '__main__':
55 | main()
56 |
--------------------------------------------------------------------------------
/PathPlanning/DubinsPath/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/DubinsPath/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/ElasticBands/obstacles.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/ElasticBands/obstacles.npy
--------------------------------------------------------------------------------
/PathPlanning/ElasticBands/path.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/ElasticBands/path.npy
--------------------------------------------------------------------------------
/PathPlanning/HybridAStar/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | sys.path.append(os.path.dirname(os.path.abspath(__file__)))
5 |
--------------------------------------------------------------------------------
/PathPlanning/ModelPredictiveTrajectoryGenerator/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/ModelPredictiveTrajectoryGenerator/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/ModelPredictiveTrajectoryGenerator/motion_model.py:
--------------------------------------------------------------------------------
1 | import math
2 | import numpy as np
3 | from scipy.interpolate import interp1d
4 | from utils.angle import angle_mod
5 |
6 | # motion parameter
7 | L = 1.0 # wheel base
8 | ds = 0.1 # course distance
9 | v = 10.0 / 3.6 # velocity [m/s]
10 |
11 |
12 | class State:
13 |
14 | def __init__(self, x=0.0, y=0.0, yaw=0.0, v=0.0):
15 | self.x = x
16 | self.y = y
17 | self.yaw = yaw
18 | self.v = v
19 |
20 |
21 | def pi_2_pi(angle):
22 | return angle_mod(angle)
23 |
24 |
25 | def update(state, v, delta, dt, L):
26 | state.v = v
27 | state.x = state.x + state.v * math.cos(state.yaw) * dt
28 | state.y = state.y + state.v * math.sin(state.yaw) * dt
29 | state.yaw = state.yaw + state.v / L * math.tan(delta) * dt
30 | state.yaw = pi_2_pi(state.yaw)
31 |
32 | return state
33 |
34 |
35 | def generate_trajectory(s, km, kf, k0):
36 | n = s / ds
37 | time = s / v # [s]
38 |
39 | if isinstance(time, type(np.array([]))):
40 | time = time[0]
41 | if isinstance(km, type(np.array([]))):
42 | km = km[0]
43 | if isinstance(kf, type(np.array([]))):
44 | kf = kf[0]
45 |
46 | tk = np.array([0.0, time / 2.0, time])
47 | kk = np.array([k0, km, kf])
48 | t = np.arange(0.0, time, time / n)
49 | fkp = interp1d(tk, kk, kind="quadratic")
50 | kp = [fkp(ti) for ti in t]
51 | dt = float(time / n)
52 |
53 | # plt.plot(t, kp)
54 | # plt.show()
55 |
56 | state = State()
57 | x, y, yaw = [state.x], [state.y], [state.yaw]
58 |
59 | for ikp in kp:
60 | state = update(state, v, ikp, dt, L)
61 | x.append(state.x)
62 | y.append(state.y)
63 | yaw.append(state.yaw)
64 |
65 | return x, y, yaw
66 |
67 |
68 | def generate_last_state(s, km, kf, k0):
69 | n = s / ds
70 | time = s / v # [s]
71 |
72 | if isinstance(n, type(np.array([]))):
73 | n = n[0]
74 | if isinstance(time, type(np.array([]))):
75 | time = time[0]
76 | if isinstance(km, type(np.array([]))):
77 | km = km[0]
78 | if isinstance(kf, type(np.array([]))):
79 | kf = kf[0]
80 |
81 | tk = np.array([0.0, time / 2.0, time])
82 | kk = np.array([k0, km, kf])
83 | t = np.arange(0.0, time, time / n)
84 | fkp = interp1d(tk, kk, kind="quadratic")
85 | kp = [fkp(ti) for ti in t]
86 | dt = time / n
87 |
88 | # plt.plot(t, kp)
89 | # plt.show()
90 |
91 | state = State()
92 |
93 | _ = [update(state, v, ikp, dt, L) for ikp in kp]
94 |
95 | return state.x, state.y, state.yaw
96 |
--------------------------------------------------------------------------------
/PathPlanning/RRT/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/RRT/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/RRT/sobol/__init__.py:
--------------------------------------------------------------------------------
1 | from .sobol import i4_sobol as sobol_quasirand
2 |
--------------------------------------------------------------------------------
/PathPlanning/RRTDubins/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/RRTDubins/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/RRTStar/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/RRTStar/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/SpiralSpanningTreeCPP/map/test.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/SpiralSpanningTreeCPP/map/test.png
--------------------------------------------------------------------------------
/PathPlanning/SpiralSpanningTreeCPP/map/test_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/SpiralSpanningTreeCPP/map/test_2.png
--------------------------------------------------------------------------------
/PathPlanning/SpiralSpanningTreeCPP/map/test_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/SpiralSpanningTreeCPP/map/test_3.png
--------------------------------------------------------------------------------
/PathPlanning/StateLatticePlanner/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/StateLatticePlanner/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/TimeBasedPathPlanning/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/TimeBasedPathPlanning/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/VisibilityRoadMap/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/VisibilityRoadMap/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/VisibilityRoadMap/geometry.py:
--------------------------------------------------------------------------------
1 | class Geometry:
2 |
3 | class Point:
4 | def __init__(self, x, y):
5 | self.x = x
6 | self.y = y
7 |
8 | @staticmethod
9 | def is_seg_intersect(p1, q1, p2, q2):
10 |
11 | def on_segment(p, q, r):
12 | if ((q.x <= max(p.x, r.x)) and (q.x >= min(p.x, r.x)) and
13 | (q.y <= max(p.y, r.y)) and (q.y >= min(p.y, r.y))):
14 | return True
15 | return False
16 |
17 | def orientation(p, q, r):
18 | val = (float(q.y - p.y) * (r.x - q.x)) - (
19 | float(q.x - p.x) * (r.y - q.y))
20 | if val > 0:
21 | return 1
22 | if val < 0:
23 | return 2
24 | return 0
25 |
26 | # Find the 4 orientations required for
27 | # the general and special cases
28 | o1 = orientation(p1, q1, p2)
29 | o2 = orientation(p1, q1, q2)
30 | o3 = orientation(p2, q2, p1)
31 | o4 = orientation(p2, q2, q1)
32 |
33 | if (o1 != o2) and (o3 != o4):
34 | return True
35 | if (o1 == 0) and on_segment(p1, p2, q1):
36 | return True
37 | if (o2 == 0) and on_segment(p1, q2, q1):
38 | return True
39 | if (o3 == 0) and on_segment(p2, p1, q2):
40 | return True
41 | if (o4 == 0) and on_segment(p2, q1, q2):
42 | return True
43 |
44 | return False
45 |
--------------------------------------------------------------------------------
/PathPlanning/VoronoiRoadMap/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/VoronoiRoadMap/__init__.py
--------------------------------------------------------------------------------
/PathPlanning/WavefrontCPP/map/test.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/WavefrontCPP/map/test.png
--------------------------------------------------------------------------------
/PathPlanning/WavefrontCPP/map/test_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/WavefrontCPP/map/test_2.png
--------------------------------------------------------------------------------
/PathPlanning/WavefrontCPP/map/test_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/WavefrontCPP/map/test_3.png
--------------------------------------------------------------------------------
/PathPlanning/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathPlanning/__init__.py
--------------------------------------------------------------------------------
/PathTracking/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathTracking/__init__.py
--------------------------------------------------------------------------------
/PathTracking/lqr_steer_control/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathTracking/lqr_steer_control/__init__.py
--------------------------------------------------------------------------------
/PathTracking/move_to_pose/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/PathTracking/move_to_pose/__init__.py
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | In this project, we are only support latest code.
6 |
7 | | Version | Supported |
8 | | ------- | ------------------ |
9 | | latest | :white_check_mark: |
10 |
11 | ## Reporting a Vulnerability
12 |
13 | If you find any security related problem, let us know by creating an issue about it.
14 |
--------------------------------------------------------------------------------
/SLAM/GraphBasedSLAM/data/README.rst:
--------------------------------------------------------------------------------
1 | Acknowledgments and References
2 | ==============================
3 |
4 | Thanks to Luca Larlone for allowing inclusion of the `Intel dataset `_ in this repo.
5 |
6 | 1. Carlone, L. and Censi, A., 2014. `From angular manifolds to the integer lattice: Guaranteed orientation estimation with application to pose graph optimization `_. IEEE Transactions on Robotics, 30(2), pp.475-492.
7 |
--------------------------------------------------------------------------------
/SLAM/GraphBasedSLAM/graphslam/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2020 Jeff Irion and contributors
2 | #
3 | # This file originated from the `graphslam` package:
4 | #
5 | # https://github.com/JeffLIrion/python-graphslam
6 |
7 | """Graph SLAM solver in Python.
8 |
9 | """
10 |
--------------------------------------------------------------------------------
/SLAM/GraphBasedSLAM/graphslam/edge/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2020 Jeff Irion and contributors
2 | #
3 | # This file originated from the `graphslam` package:
4 | #
5 | # https://github.com/JeffLIrion/python-graphslam
6 |
--------------------------------------------------------------------------------
/SLAM/GraphBasedSLAM/graphslam/load.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2020 Jeff Irion and contributors
2 | #
3 | # This file originated from the `graphslam` package:
4 | #
5 | # https://github.com/JeffLIrion/python-graphslam
6 |
7 | """Functions for loading graphs.
8 |
9 | """
10 |
11 | import logging
12 |
13 | import numpy as np
14 |
15 | from .edge.edge_odometry import EdgeOdometry
16 | from .graph import Graph
17 | from .pose.se2 import PoseSE2
18 | from .util import upper_triangular_matrix_to_full_matrix
19 | from .vertex import Vertex
20 |
21 | _LOGGER = logging.getLogger(__name__)
22 |
23 |
24 | def load_g2o_se2(infile):
25 | """Load an :math:`SE(2)` graph from a .g2o file.
26 |
27 | Parameters
28 | ----------
29 | infile : str
30 | The path to the .g2o file
31 |
32 | Returns
33 | -------
34 | Graph
35 | The loaded graph
36 |
37 | """
38 | edges = []
39 | vertices = []
40 |
41 | with open(infile) as f:
42 | for line in f.readlines():
43 | if line.startswith("VERTEX_SE2"):
44 | numbers = line[10:].split()
45 | arr = np.array([float(number) for number in numbers[1:]],
46 | dtype=float)
47 | p = PoseSE2(arr[:2], arr[2])
48 | v = Vertex(int(numbers[0]), p)
49 | vertices.append(v)
50 | continue
51 |
52 | if line.startswith("EDGE_SE2"):
53 | numbers = line[9:].split()
54 | arr = np.array([float(number) for number in numbers[2:]],
55 | dtype=float)
56 | vertex_ids = [int(numbers[0]), int(numbers[1])]
57 | estimate = PoseSE2(arr[:2], arr[2])
58 | information = upper_triangular_matrix_to_full_matrix(arr[3:], 3)
59 | e = EdgeOdometry(vertex_ids, information, estimate)
60 | edges.append(e)
61 | continue
62 |
63 | if line.strip():
64 | _LOGGER.warning("Line not supported -- '%s'", line.rstrip())
65 |
66 | return Graph(edges, vertices)
67 |
--------------------------------------------------------------------------------
/SLAM/GraphBasedSLAM/graphslam/pose/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2020 Jeff Irion and contributors
2 | #
3 | # This file originated from the `graphslam` package:
4 | #
5 | # https://github.com/JeffLIrion/python-graphslam
6 |
--------------------------------------------------------------------------------
/SLAM/GraphBasedSLAM/graphslam/util.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2020 Jeff Irion and contributors
2 | #
3 | # This file originated from the `graphslam` package:
4 | #
5 | # https://github.com/JeffLIrion/python-graphslam
6 |
7 | """Utility functions used throughout the package.
8 |
9 | """
10 |
11 | import numpy as np
12 |
13 |
14 | TWO_PI = 2 * np.pi
15 |
16 |
17 | def neg_pi_to_pi(angle):
18 | r"""Normalize ``angle`` to be in :math:`[-\pi, \pi)`.
19 |
20 | Parameters
21 | ----------
22 | angle : float
23 | An angle (in radians)
24 |
25 | Returns
26 | -------
27 | float
28 | The angle normalized to :math:`[-\pi, \pi)`
29 |
30 | """
31 | return (angle + np.pi) % (TWO_PI) - np.pi
32 |
33 |
34 | def solve_for_edge_dimensionality(n):
35 | r"""Solve for the dimensionality of an edge.
36 |
37 | In a .g2o file, an edge is specified as `` ``, where only the upper triangular portion of the matrix is provided.
38 |
39 | This solves the problem:
40 |
41 | .. math::
42 |
43 | d + \frac{d (d + 1)}{2} = n
44 |
45 | Returns
46 | -------
47 | int
48 | The dimensionality of the edge
49 |
50 | """
51 | return int(round(np.sqrt(2 * n + 2.25) - 1.5))
52 |
53 |
54 | def upper_triangular_matrix_to_full_matrix(arr, n):
55 | """Given an upper triangular matrix, return the full matrix.
56 |
57 | Parameters
58 | ----------
59 | arr : np.ndarray
60 | The upper triangular portion of the matrix
61 | n : int
62 | The size of the matrix
63 |
64 | Returns
65 | -------
66 | mat : np.ndarray
67 | The full matrix
68 |
69 | """
70 | triu0 = np.triu_indices(n, 0)
71 | tril1 = np.tril_indices(n, -1)
72 |
73 | mat = np.zeros((n, n), dtype=float)
74 | mat[triu0] = arr
75 | mat[tril1] = mat.T[tril1]
76 |
77 | return mat
78 |
--------------------------------------------------------------------------------
/SLAM/GraphBasedSLAM/graphslam/vertex.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2020 Jeff Irion and contributors
2 | #
3 | # This file originated from the `graphslam` package:
4 | #
5 | # https://github.com/JeffLIrion/python-graphslam
6 |
7 | """A ``Vertex`` class.
8 |
9 | """
10 |
11 | import matplotlib.pyplot as plt
12 |
13 |
14 | # pylint: disable=too-few-public-methods
15 | class Vertex:
16 | """A class for representing a vertex in Graph SLAM.
17 |
18 | Parameters
19 | ----------
20 | vertex_id : int
21 | The vertex's unique ID
22 | pose : graphslam.pose.se2.PoseSE2
23 | The pose associated with the vertex
24 | vertex_index : int, None
25 | The vertex's index in the graph's ``vertices`` list
26 |
27 | Attributes
28 | ----------
29 | id : int
30 | The vertex's unique ID
31 | index : int, None
32 | The vertex's index in the graph's ``vertices`` list
33 | pose : graphslam.pose.se2.PoseSE2
34 | The pose associated with the vertex
35 |
36 | """
37 | def __init__(self, vertex_id, pose, vertex_index=None):
38 | self.id = vertex_id
39 | self.pose = pose
40 | self.index = vertex_index
41 |
42 | def to_g2o(self):
43 | """Export the vertex to the .g2o format.
44 |
45 | Returns
46 | -------
47 | str
48 | The vertex in .g2o format
49 |
50 | """
51 | return "VERTEX_SE2 {} {} {} {}\n".format(self.id, self.pose[0], self.pose[1], self.pose[2])
52 |
53 | def plot(self, color='r', marker='o', markersize=3):
54 | """Plot the vertex.
55 |
56 | Parameters
57 | ----------
58 | color : str
59 | The color that will be used to plot the vertex
60 | marker : str
61 | The marker that will be used to plot the vertex
62 | markersize : int
63 | The size of the plotted vertex
64 |
65 | """
66 | x, y = self.pose.position
67 | plt.plot(x, y, color=color, marker=marker, markersize=markersize)
68 |
--------------------------------------------------------------------------------
/SLAM/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/SLAM/__init__.py
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/__init__.py
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-slate
2 | show_downloads: true
3 |
--------------------------------------------------------------------------------
/appveyor.yml:
--------------------------------------------------------------------------------
1 | os: Visual Studio 2022
2 |
3 | environment:
4 | global:
5 | # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
6 | # /E:ON and /V:ON options are not enabled in the batch script intepreter
7 | # See: https://stackoverflow.com/a/13751649/163740
8 | CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
9 |
10 | matrix:
11 | - PYTHON_DIR: C:\Python313-x64
12 |
13 | branches:
14 | only:
15 | - master
16 |
17 | init:
18 | - "ECHO %PYTHON_DIR%"
19 |
20 | install:
21 | # If there is a newer build queued for the same PR, cancel this one.
22 | # The AppVeyor 'rollout builds' option is supposed to serve the same
23 | # purpose but it is problematic because it tends to cancel builds pushed
24 | # directly to master instead of just PR builds (or the converse).
25 | # credits: JuliaLang developers.
26 | - ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
27 | https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
28 | Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
29 | throw "There are newer queued builds for this pull request, failing early." }
30 | - ECHO "Filesystem root:"
31 | - ps: "ls \"C:/\""
32 |
33 | # Prepend newly installed Python to the PATH of this build (this cannot be
34 | # done from inside the powershell script as it would require to restart
35 | # the parent CMD process).
36 | - SET PATH=%PYTHON_DIR%;%PYTHON_DIR%\\Scripts;%PATH%
37 | - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PYTHON%\Library\bin;%PATH%
38 | - SET PATH=%PATH%;C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin
39 | - "python -m pip install --upgrade pip"
40 | - "python -m pip install -r requirements/requirements.txt"
41 | - "python -m pip install pytest-xdist"
42 |
43 | # Check that we have the expected version and architecture for Python
44 | - "python --version"
45 | - "python -c \"import struct; print(struct.calcsize('P') * 8)\""
46 |
47 | build: off
48 |
49 | test_script:
50 | - "pytest tests -n auto -Werror --durations=0"
51 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | # SPHINXOPTS with -W means turn warnings into errors to fail the build when warnings are present.
6 | SPHINXOPTS = -W
7 | SPHINXBUILD = sphinx-build
8 | SPHINXPROJ = PythonRobotics
9 | SOURCEDIR = .
10 | BUILDDIR = _build
11 |
12 | # Put it first so that "make" without argument is like "make help".
13 | help:
14 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
15 |
16 | .PHONY: help Makefile
17 |
18 | # Catch-all target: route all unknown targets to Sphinx using the new
19 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
20 | %: Makefile
21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | ## Python Robotics Documentation
2 |
3 | This folder contains documentation for the Python Robotics project.
4 |
5 |
6 | ### Build the Documentation
7 |
8 | #### Install Sphinx and Theme
9 |
10 | ```
11 | pip install sphinx sphinx-autobuild sphinx-rtd-theme sphinx_rtd_dark_mode sphinx_copybutton sphinx_rtd_dark_mode
12 | ```
13 |
14 | #### Building the Docs
15 |
16 | In the `docs/` folder:
17 | ```
18 | make html
19 | ```
20 |
21 | if you want to building each time a file is changed:
22 |
23 | ```
24 | sphinx-autobuild . _build/html
25 | ```
26 |
27 | #### Check the generated doc
28 |
29 | Open the index.html file under docs/_build/
30 |
--------------------------------------------------------------------------------
/docs/_static/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/_static/.gitkeep
--------------------------------------------------------------------------------
/docs/_static/custom.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Necessary parts from
3 | * Sphinx stylesheet -- basic theme
4 | * absent from sphinx_rtd_theme
5 | * (see https://github.com/readthedocs/sphinx_rtd_theme/issues/301)
6 | * Ref https://github.com/PyAbel/PyAbel/commit/7e4dee81eac3f0a6955a85a4a42cf04a4e0d995c
7 | */
8 |
9 | /* -- math display ---------------------------------------------------------- */
10 |
11 | span.eqno {
12 | float: right;
13 | }
14 |
15 | span.eqno a.headerlink {
16 | position: absolute;
17 | z-index: 1;
18 | visibility: hidden;
19 | }
20 |
21 | div.math:hover a.headerlink {
22 | visibility: visible;
23 | }
24 |
--------------------------------------------------------------------------------
/docs/_static/img/doc_ci.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/_static/img/doc_ci.png
--------------------------------------------------------------------------------
/docs/_static/img/source_link_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/_static/img/source_link_1.png
--------------------------------------------------------------------------------
/docs/_static/img/source_link_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/_static/img/source_link_2.png
--------------------------------------------------------------------------------
/docs/_templates/layout.html:
--------------------------------------------------------------------------------
1 | {% extends "!layout.html" %}
2 |
3 | {% block sidebartitle %}
4 | {{ super() }}
5 |
7 |
8 |
14 |
17 | {% endblock %}
18 |
--------------------------------------------------------------------------------
/docs/doc_requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx == 7.2.6 # For sphinx documentation
2 | sphinx_rtd_theme == 2.0.0
3 | IPython == 8.20.0 # For sphinx documentation
4 | sphinxcontrib-napoleon == 0.7 # For auto doc
5 | sphinx-copybutton
6 | sphinx-rtd-dark-mode
7 |
--------------------------------------------------------------------------------
/docs/index_main.rst:
--------------------------------------------------------------------------------
1 | .. PythonRobotics documentation master file, created by
2 | sphinx-quickstart on Sat Sep 15 13:15:55 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to PythonRobotics's documentation!
7 | ==========================================
8 |
9 | "PythonRobotics" is a Python code collections and textbook
10 | (This document) for robotics algorithm, which is developed on `GitHub`_.
11 |
12 | See this section (:ref:`What is PythonRobotics?`) for more details of this project.
13 |
14 | This project is `the one of the most popular open-source software (OSS) in
15 | the field of robotics on GitHub`_.
16 | This is `user comments about this project`_, and
17 | this graph shows GitHub star history of this project:
18 |
19 | .. image:: https://api.star-history.com/svg?repos=AtsushiSakai/PythonRobotics&type=Date
20 | :alt: Star History
21 | :width: 80%
22 | :align: center
23 |
24 |
25 | .. _GitHub: https://github.com/AtsushiSakai/PythonRobotics
26 | .. _`user comments about this project`: https://github.com/AtsushiSakai/PythonRobotics/blob/master/users_comments.md
27 | .. _`MIT license`: https://github.com/AtsushiSakai/PythonRobotics/blob/master/LICENSE
28 | .. _`the one of the most popular open-source software (OSS) in the field of robotics on GitHub`: https://github.com/topics/robotics
29 |
30 | ----------------------------------
31 |
32 | .. toctree::
33 | :maxdepth: 2
34 | :caption: Table of Contents
35 |
36 | modules/0_getting_started/0_getting_started
37 | modules/1_introduction/introduction
38 | modules/2_localization/localization
39 | modules/3_mapping/mapping
40 | modules/4_slam/slam
41 | modules/5_path_planning/path_planning
42 | modules/6_path_tracking/path_tracking
43 | modules/7_arm_navigation/arm_navigation
44 | modules/8_aerial_navigation/aerial_navigation
45 | modules/9_bipedal/bipedal
46 | modules/10_inverted_pendulum/inverted_pendulum
47 | modules/13_mission_planning/mission_planning
48 | modules/11_utils/utils
49 | modules/12_appendix/appendix
50 |
51 |
52 | Indices and tables
53 | ==================
54 |
55 | * :ref:`genindex`
56 | * :ref:`modindex`
57 | * :ref:`search`
58 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 | set SPHINXPROJ=PythonRobotics
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.https://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/docs/modules/0_getting_started/0_getting_started_main.rst:
--------------------------------------------------------------------------------
1 | .. _`getting started`:
2 |
3 | Getting Started
4 | ===============
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 | :caption: Contents
9 |
10 | 1_what_is_python_robotics
11 | 2_how_to_run_sample_codes
12 | 3_how_to_contribute
13 | 4_how_to_read_textbook
14 |
--------------------------------------------------------------------------------
/docs/modules/0_getting_started/4_how_to_read_textbook_main.rst:
--------------------------------------------------------------------------------
1 | .. _`How to read this textbook`:
2 |
3 | How to read this textbook
4 | --------------------------
5 |
6 | This document is structured to help you learn the fundamental concepts
7 | behind each sample code in PythonRobotics.
8 |
9 | If you already have some knowledge of robotics technologies, you can start
10 | by reading any document that interests you.
11 |
12 | However, if you have no prior knowledge of robotics technologies, it is
13 | recommended that you first read the :ref:`Introduction` section and then proceed
14 | to the documents related to the technical fields that interest you.
--------------------------------------------------------------------------------
/docs/modules/10_inverted_pendulum/inverted-pendulum.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/10_inverted_pendulum/inverted-pendulum.png
--------------------------------------------------------------------------------
/docs/modules/11_utils/plot/curvature_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/11_utils/plot/curvature_plot.png
--------------------------------------------------------------------------------
/docs/modules/11_utils/plot/plot_main.rst:
--------------------------------------------------------------------------------
1 | .. _plot_utils:
2 |
3 | Plotting Utilities
4 | -------------------
5 |
6 | This module contains a number of utility functions for plotting data.
7 |
8 | .. _plot_curvature:
9 |
10 | plot_curvature
11 | ~~~~~~~~~~~~~~~
12 |
13 | .. autofunction:: utils.plot.plot_curvature
14 |
15 | .. image:: curvature_plot.png
16 |
17 |
--------------------------------------------------------------------------------
/docs/modules/11_utils/utils_main.rst:
--------------------------------------------------------------------------------
1 | .. _`utils`:
2 |
3 | Utilities
4 | ==========
5 |
6 | Common utilities for PythonRobotics.
7 |
8 | .. toctree::
9 | :maxdepth: 2
10 | :caption: Contents
11 |
12 | plot/plot
13 |
--------------------------------------------------------------------------------
/docs/modules/12_appendix/Kalmanfilter_basics_2_files/Kalmanfilter_basics_2_5_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/Kalmanfilter_basics_2_files/Kalmanfilter_basics_2_5_0.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_14_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_14_1.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_16_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_16_0.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_19_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_19_1.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_21_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_21_1.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_22_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_22_0.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_28_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/Kalmanfilter_basics_files/Kalmanfilter_basics_28_1.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/appendix_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Appendix`:
2 |
3 | Appendix
4 | ==============
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 | :caption: Contents
9 |
10 | steering_motion_model
11 | Kalmanfilter_basics
12 | Kalmanfilter_basics_2
13 | internal_sensors
14 | external_sensors
15 |
16 |
--------------------------------------------------------------------------------
/docs/modules/12_appendix/steering_motion_model/steering_model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/steering_motion_model/steering_model.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/steering_motion_model/turning_radius_calc1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/steering_motion_model/turning_radius_calc1.png
--------------------------------------------------------------------------------
/docs/modules/12_appendix/steering_motion_model/turning_radius_calc2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/12_appendix/steering_motion_model/turning_radius_calc2.png
--------------------------------------------------------------------------------
/docs/modules/13_mission_planning/mission_planning_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Mission Planning`:
2 |
3 | Mission Planning
4 | ================
5 |
6 | Mission planning includes tools such as finite state machines and behavior trees used to describe robot behavior and high level task planning.
7 |
8 | .. toctree::
9 | :maxdepth: 2
10 | :caption: Contents
11 |
12 | state_machine/state_machine
13 | behavior_tree/behavior_tree
14 |
--------------------------------------------------------------------------------
/docs/modules/13_mission_planning/state_machine/robot_behavior_case.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/13_mission_planning/state_machine/robot_behavior_case.png
--------------------------------------------------------------------------------
/docs/modules/13_mission_planning/state_machine/state_machine_main.rst:
--------------------------------------------------------------------------------
1 | State Machine
2 | -------------
3 |
4 | A state machine is a model used to describe the transitions of an object between different states. It clearly shows how an object changes state based on events and may trigger corresponding actions.
5 |
6 | Core Concepts
7 | ~~~~~~~~~~~~~
8 |
9 | - **State**: A distinct mode or condition of the system (e.g. "Idle", "Running"). Managed by State class with optional on_enter/on_exit callbacks
10 | - **Event**: A trigger signal that may cause state transitions (e.g. "start", "stop")
11 | - **Transition**: A state change path from source to destination state triggered by an event
12 | - **Action**: An operation executed during transition (before entering new state)
13 | - **Guard**: A precondition that must be satisfied to allow transition
14 |
15 | Code Link
16 | ~~~~~~~~~~~
17 |
18 | .. autoclass:: MissionPlanning.StateMachine.state_machine.StateMachine
19 | :members: add_transition, process, register_state
20 | :special-members: __init__
21 |
22 | PlantUML Support
23 | ~~~~~~~~~~~~~~~~
24 |
25 | The ``generate_plantuml()`` method creates diagrams showing:
26 |
27 | - Current state (marked with [*] arrow)
28 | - All possible transitions
29 | - Guard conditions in [brackets]
30 | - Actions prefixed with /
31 |
32 | Example
33 | ~~~~~~~
34 |
35 | state machine diagram:
36 | +++++++++++++++++++++++
37 | .. image:: robot_behavior_case.png
38 |
39 | state transition table:
40 | +++++++++++++++++++++++
41 | .. list-table:: State Transitions
42 | :header-rows: 1
43 | :widths: 20 15 20 20 20
44 |
45 | * - Source State
46 | - Event
47 | - Target State
48 | - Guard
49 | - Action
50 | * - patrolling
51 | - detect_task
52 | - executing_task
53 | -
54 | -
55 | * - executing_task
56 | - task_complete
57 | - patrolling
58 | -
59 | - reset_task
60 | * - executing_task
61 | - low_battery
62 | - returning_to_base
63 | - is_battery_low
64 | -
65 | * - returning_to_base
66 | - reach_base
67 | - charging
68 | -
69 | -
70 | * - charging
71 | - charge_complete
72 | - patrolling
73 | -
74 | -
--------------------------------------------------------------------------------
/docs/modules/1_introduction/3_technologies_for_robotics/technologies_for_robotics_main.rst:
--------------------------------------------------------------------------------
1 | Technologies for Robotics
2 | -------------------------
3 |
4 | The field of robotics needs wide areas of technologies such as mechanical engineering,
5 | electrical engineering, computer science, and artificial intelligence (AI).
6 | This project, `PythonRobotics`, only focus on computer science and artificial intelligence.
7 |
8 | The technologies for robotics are categorized as following 3 categories:
9 |
10 | #. `Autonomous Navigation`_
11 | #. `Manipulation`_
12 | #. `Robot type specific technologies`_
13 |
14 | .. _`Autonomous Navigation`:
15 |
16 | Autonomous Navigation
17 | ^^^^^^^^^^^^^^^^^^^^^^^^
18 | Autonomous navigation is a capability that can move to a goal over long
19 | periods of time without any external control by an operator.
20 |
21 | To achieve autonomous navigation, the robot needs to have the following technologies:
22 | - It needs to know where it is (localization)
23 | - Where it is safe (mapping)
24 | - Where is is safe and where the robot is in the map (Simultaneous Localization and Mapping (SLAM))
25 | - Where and how to move (path planning)
26 | - How to control its motion (path following).
27 |
28 | The autonomous system would not work correctly if any of these technologies is missing.
29 |
30 | In recent years, autonomous navigation technologies have received huge
31 | attention in many fields.
32 | For example, self-driving cars, drones, and autonomous mobile robots in indoor and outdoor environments.
33 |
34 | In this project, we provide many algorithms, sample codes,
35 | and documentations for autonomous navigation.
36 |
37 | #. :ref:`Localization`
38 | #. :ref:`Mapping`
39 | #. :ref:`SLAM`
40 | #. :ref:`Path planning`
41 | #. :ref:`Path tracking`
42 |
43 |
44 |
45 | .. _`Manipulation`:
46 |
47 | Manipulation
48 | ^^^^^^^^^^^^^^^^^^^^^^^^
49 |
50 | #. :ref:`Arm Navigation`
51 |
52 | .. _`Robot type specific technologies`:
53 |
54 | Robot type specific technologies
55 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
56 |
57 | #. :ref:`Aerial Navigation`
58 | #. :ref:`Bipedal`
59 | #. :ref:`Inverted Pendulum`
60 |
61 |
62 | Additional Information
63 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
64 |
65 | #. :ref:`utils`
66 | #. :ref:`Appendix`
67 |
68 |
69 |
--------------------------------------------------------------------------------
/docs/modules/1_introduction/introduction_main.rst:
--------------------------------------------------------------------------------
1 | .. _Introduction:
2 |
3 | Introduction
4 | ============
5 |
6 | PythonRobotics is composed of two words: "Python" and "Robotics".
7 | Therefore, I will first explain these two topics, Robotics and Python.
8 | After that, I will provide an overview of the robotics technologies
9 | covered in PythonRobotics.
10 |
11 | .. toctree::
12 | :maxdepth: 2
13 | :caption: Table of Contents
14 |
15 | 1_definition_of_robotics/definition_of_robotics
16 | 2_python_for_robotics/python_for_robotics
17 | 3_technologies_for_robotics/technologies_for_robotics
18 |
19 |
--------------------------------------------------------------------------------
/docs/modules/2_localization/ensamble_kalman_filter_localization_files/ensamble_kalman_filter_localization_main.rst:
--------------------------------------------------------------------------------
1 | Ensamble Kalman Filter Localization
2 | -----------------------------------
3 |
4 | .. figure:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Localization/ensamble_kalman_filter/animation.gif
5 |
6 | This is a sensor fusion localization with Ensamble Kalman Filter(EnKF).
7 |
8 | Code Link
9 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
10 |
11 | .. autofunction:: Localization.ensemble_kalman_filter.ensemble_kalman_filter.enkf_localization
12 |
13 |
--------------------------------------------------------------------------------
/docs/modules/2_localization/extended_kalman_filter_localization_files/ekf_with_velocity_correction_1_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/2_localization/extended_kalman_filter_localization_files/ekf_with_velocity_correction_1_0.png
--------------------------------------------------------------------------------
/docs/modules/2_localization/extended_kalman_filter_localization_files/extended_kalman_filter_localization_1_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/2_localization/extended_kalman_filter_localization_files/extended_kalman_filter_localization_1_0.png
--------------------------------------------------------------------------------
/docs/modules/2_localization/histogram_filter_localization/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/2_localization/histogram_filter_localization/1.png
--------------------------------------------------------------------------------
/docs/modules/2_localization/histogram_filter_localization/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/2_localization/histogram_filter_localization/2.png
--------------------------------------------------------------------------------
/docs/modules/2_localization/histogram_filter_localization/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/2_localization/histogram_filter_localization/3.png
--------------------------------------------------------------------------------
/docs/modules/2_localization/histogram_filter_localization/4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/2_localization/histogram_filter_localization/4.png
--------------------------------------------------------------------------------
/docs/modules/2_localization/localization_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Localization`:
2 |
3 | Localization
4 | ============
5 | Localization is the ability of a robot to know its position and orientation with sensors such as Global Navigation Satellite System:GNSS etc. In localization, Bayesian filters such as Kalman filters, histogram filter, and particle filter are widely used[31]. Fig.2 shows localization simulations using histogram filter and particle filter.
6 |
7 | .. toctree::
8 | :maxdepth: 2
9 | :caption: Contents
10 |
11 | extended_kalman_filter_localization_files/extended_kalman_filter_localization
12 | ensamble_kalman_filter_localization_files/ensamble_kalman_filter_localization
13 | unscented_kalman_filter_localization/unscented_kalman_filter_localization
14 | histogram_filter_localization/histogram_filter_localization
15 | particle_filter_localization/particle_filter_localization
16 |
17 |
18 |
--------------------------------------------------------------------------------
/docs/modules/2_localization/particle_filter_localization/particle_filter_localization_main.rst:
--------------------------------------------------------------------------------
1 | Particle filter localization
2 | ----------------------------
3 |
4 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Localization/particle_filter/animation.gif
5 |
6 | This is a sensor fusion localization with Particle Filter(PF).
7 |
8 | The blue line is true trajectory, the black line is dead reckoning
9 | trajectory,
10 |
11 | and the red line is estimated trajectory with PF.
12 |
13 | It is assumed that the robot can measure a distance from landmarks
14 | (RFID).
15 |
16 | This measurements are used for PF localization.
17 |
18 | Code Link
19 | ~~~~~~~~~~~~~
20 |
21 | .. autofunction:: Localization.particle_filter.particle_filter.pf_localization
22 |
23 |
24 | How to calculate covariance matrix from particles
25 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
26 |
27 | The covariance matrix :math:`\Xi` from particle information is calculated by the following equation:
28 |
29 | .. math:: \Xi_{j,k}=\frac{1}{1-\sum^N_{i=1}(w^i)^2}\sum^N_{i=1}w^i(x^i_j-\mu_j)(x^i_k-\mu_k)
30 |
31 | - :math:`\Xi_{j,k}` is covariance matrix element at row :math:`i` and column :math:`k`.
32 |
33 | - :math:`w^i` is the weight of the :math:`i` th particle.
34 |
35 | - :math:`x^i_j` is the :math:`j` th state of the :math:`i` th particle.
36 |
37 | - :math:`\mu_j` is the :math:`j` th mean state of particles.
38 |
39 | Reference
40 | ~~~~~~~~~~~
41 |
42 | - `_PROBABILISTIC ROBOTICS: `_
43 | - `Improving the particle filter in high dimensions using conjugate artificial process noise `_
44 |
--------------------------------------------------------------------------------
/docs/modules/2_localization/unscented_kalman_filter_localization/unscented_kalman_filter_localization_main.rst:
--------------------------------------------------------------------------------
1 | Unscented Kalman Filter localization
2 | ------------------------------------
3 |
4 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Localization/unscented_kalman_filter/animation.gif
5 |
6 | This is a sensor fusion localization with Unscented Kalman Filter(UKF).
7 |
8 | The lines and points are same meaning of the EKF simulation.
9 |
10 | Code Link
11 | ~~~~~~~~~~~~~
12 |
13 | .. autofunction:: Localization.unscented_kalman_filter.unscented_kalman_filter.ukf_estimation
14 |
15 |
16 | Reference
17 | ~~~~~~~~~~~
18 |
19 | - `Discriminatively Trained Unscented Kalman Filter for Mobile Robot Localization `_
20 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/circle_fitting/circle_fitting_main.rst:
--------------------------------------------------------------------------------
1 | Object shape recognition using circle fitting
2 | ---------------------------------------------
3 |
4 | This is an object shape recognition using circle fitting.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Mapping/circle_fitting/animation.gif
7 |
8 | The blue circle is the true object shape.
9 |
10 | The red crosses are observations from a ranging sensor.
11 |
12 | The red circle is the estimated object shape using circle fitting.
13 |
14 | Code Link
15 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
16 |
17 | .. autofunction:: Mapping.circle_fitting.circle_fitting.circle_fitting
18 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/distance_map/distance_map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/distance_map/distance_map.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/distance_map/distance_map_main.rst:
--------------------------------------------------------------------------------
1 | Distance Map
2 | ------------
3 |
4 | This is an implementation of the Distance Map algorithm for path planning.
5 |
6 | The Distance Map algorithm computes the unsigned distance field (UDF) and signed distance field (SDF) from a boolean field representing obstacles.
7 |
8 | The UDF gives the distance from each point to the nearest obstacle. The SDF gives positive distances for points outside obstacles and negative distances for points inside obstacles.
9 |
10 | Example
11 | ~~~~~~~
12 |
13 | The algorithm is demonstrated on a simple 2D grid with obstacles:
14 |
15 | .. image:: distance_map.png
16 |
17 | Code Link
18 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
19 |
20 | .. autofunction:: Mapping.DistanceMap.distance_map.compute_sdf
21 |
22 | .. autofunction:: Mapping.DistanceMap.distance_map.compute_udf
23 |
24 | References
25 | ~~~~~~~~~~
26 |
27 | - `Distance Transforms of Sampled Functions `_ paper by Pedro F. Felzenszwalb and Daniel P. Huttenlocher.
--------------------------------------------------------------------------------
/docs/modules/3_mapping/gaussian_grid_map/gaussian_grid_map_main.rst:
--------------------------------------------------------------------------------
1 | .. _gaussian_grid_map:
2 |
3 | Gaussian grid map
4 | -----------------
5 |
6 | This is a 2D Gaussian grid mapping example.
7 |
8 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Mapping/gaussian_grid_map/animation.gif
9 |
10 | Code Link
11 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
12 |
13 | .. autofunction:: Mapping.gaussian_grid_map.gaussian_grid_map.generate_gaussian_grid_map
14 |
15 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/k_means_object_clustering/k_means_object_clustering_main.rst:
--------------------------------------------------------------------------------
1 | k-means object clustering
2 | -------------------------
3 |
4 | This is a 2D object clustering with k-means algorithm.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Mapping/kmeans_clustering/animation.gif
7 |
8 | Code Link
9 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
10 |
11 | .. autofunction:: Mapping.kmeans_clustering.kmeans_clustering.kmeans_clustering
12 |
13 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/lidar_to_grid_map_tutorial/grid_map_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/lidar_to_grid_map_tutorial/grid_map_example.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_12_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_12_0.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_14_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_14_1.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_5_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_5_0.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_7_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_7_0.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_8_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial_8_0.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/mapping_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Mapping`:
2 |
3 | Mapping
4 | =======
5 | Mapping is the ability of a robot to understand its surroundings with external sensors such as LIDAR and camera. Robots have to recognize the position and shape of obstacles to avoid them. In mapping, grid mapping and machine learning algorithms are widely used[31][18]. Fig.3 shows mapping simulation results using grid mapping with 2D ray casting and 2D object clustering with k-means algorithm.
6 |
7 | .. toctree::
8 | :maxdepth: 2
9 | :caption: Contents
10 |
11 | gaussian_grid_map/gaussian_grid_map
12 | ndt_map/ndt_map
13 | ray_casting_grid_map/ray_casting_grid_map
14 | lidar_to_grid_map_tutorial/lidar_to_grid_map_tutorial
15 | point_cloud_sampling/point_cloud_sampling
16 | k_means_object_clustering/k_means_object_clustering
17 | circle_fitting/circle_fitting
18 | rectangle_fitting/rectangle_fitting
19 | normal_vector_estimation/normal_vector_estimation
20 | distance_map/distance_map
21 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/ndt_map/grid_clustering.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/ndt_map/grid_clustering.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/ndt_map/ndt_map1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/ndt_map/ndt_map1.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/ndt_map/ndt_map2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/ndt_map/ndt_map2.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/ndt_map/ndt_map_main.rst:
--------------------------------------------------------------------------------
1 | .. _ndt_map:
2 |
3 | Normal Distance Transform (NDT) map
4 | ------------------------------------
5 |
6 | This is a NDT mapping example.
7 |
8 | Normal Distribution Transform (NDT) is a map representation that uses normal distribution for observation point modeling.
9 |
10 | Normal Distribution
11 | ~~~~~~~~~~~~~~~~~~~~~
12 |
13 | Normal distribution consists of two parameters: mean :math:`\mu` and covariance :math:`\Sigma`.
14 |
15 | :math:`\mathbf{X} \sim \mathcal{N}(\boldsymbol{\mu}, \boldsymbol{\Sigma})`
16 |
17 | In the 2D case, :math:`\boldsymbol{\mu}` is a 2D vector and :math:`\boldsymbol{\Sigma}` is a 2x2 matrix.
18 |
19 | In the matrix form, the probability density function of thr normal distribution is:
20 |
21 | :math:`X=\frac{1}{\sqrt{(2 \pi)^2|\Sigma|}} \exp \left\{-\frac{1}{2}^t(x-\mu) \sum^{-1}(x-\mu)\right\}`
22 |
23 | Normal Distance Transform mapping steps
24 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25 |
26 | NDT mapping consists of two steps:
27 |
28 | When we have a new observation like this:
29 |
30 | .. figure:: raw_observations.png
31 |
32 | First, we need to cluster the observation points.
33 | This is done by using a grid based clustering algorithm.
34 |
35 | The result is:
36 |
37 | .. figure:: grid_clustering.png
38 |
39 | Then, we need to fit a normal distribution to each grid cluster.
40 |
41 | Black ellipse shows each NDT grid like this:
42 |
43 | .. figure:: ndt_map1.png
44 |
45 | .. figure:: ndt_map2.png
46 |
47 | API
48 | ~~~~~
49 |
50 | .. autoclass:: Mapping.ndt_map.ndt_map.NDTMap
51 | :members:
52 | :class-doc-from: class
53 |
54 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/ndt_map/raw_observations.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/ndt_map/raw_observations.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/normal_vector_estimation/normal_vector_calc.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/normal_vector_estimation/normal_vector_calc.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/normal_vector_estimation/normal_vector_estimation_main.rst:
--------------------------------------------------------------------------------
1 | Normal vector estimation
2 | -------------------------
3 |
4 |
5 | Normal vector calculation of a 3D triangle
6 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
7 |
8 | A 3D point is as a vector:
9 |
10 | .. math:: p = [x, y, z]
11 |
12 | When there are 3 points in 3D space, :math:`p_1, p_2, p_3`,
13 |
14 | we can calculate a normal vector n of a 3D triangle which is consisted of the points.
15 |
16 | .. math:: n = \frac{v1 \times v2}{|v1 \times v2|}
17 |
18 | where
19 |
20 | .. math:: v1 = p2 - p1
21 |
22 | .. math:: v2 = p3 - p1
23 |
24 | This is an example of normal vector calculation:
25 |
26 | .. figure:: normal_vector_calc.png
27 |
28 | Code Link
29 | ==========
30 |
31 | .. autofunction:: Mapping.normal_vector_estimation.normal_vector_estimation.calc_normal_vector
32 |
33 | Normal vector estimation with RANdam SAmpling Consensus(RANSAC)
34 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
35 |
36 | Consider the problem of estimating the normal vector of a plane based on a
37 | set of N 3D points where a plane can be observed.
38 |
39 | There is a way that uses all point cloud data to estimate a plane and
40 | a normal vector using the `least-squares method `_
41 |
42 | However, this method is vulnerable to noise of the point cloud.
43 |
44 | In this document, we will use a method that uses
45 | `RANdam SAmpling Consensus(RANSAC) `_
46 | to estimate a plane and a normal vector.
47 |
48 | RANSAC is a robust estimation methods for data set with outliers.
49 |
50 |
51 |
52 | This RANSAC based normal vector estimation method is as follows:
53 |
54 | #. Select 3 points randomly from the point cloud.
55 |
56 | #. Calculate a normal vector of a plane which is consists of the sampled 3 points.
57 |
58 | #. Calculate the distance between the calculated plane and the all point cloud.
59 |
60 | #. If the distance is less than a threshold, the point is considered to be an inlier.
61 |
62 | #. Repeat the above steps until the inlier ratio is greater than a threshold.
63 |
64 |
65 |
66 | This is an example of RANSAC based normal vector estimation:
67 |
68 | .. figure:: ransac_normal_vector_estimation.png
69 |
70 | Code Link
71 | ==========
72 |
73 | .. autofunction:: Mapping.normal_vector_estimation.normal_vector_estimation.ransac_normal_vector_estimation
74 |
75 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/normal_vector_estimation/ransac_normal_vector_estimation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/normal_vector_estimation/ransac_normal_vector_estimation.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/point_cloud_sampling/farthest_point_sampling.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/point_cloud_sampling/farthest_point_sampling.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/point_cloud_sampling/point_cloud_sampling_main.rst:
--------------------------------------------------------------------------------
1 | .. _point_cloud_sampling:
2 |
3 | Point cloud Sampling
4 | ----------------------
5 |
6 | This sections explains point cloud sampling algorithms in PythonRobotics.
7 |
8 | Point clouds are two-dimensional and three-dimensional based data
9 | acquired by external sensors like LIDAR, cameras, etc.
10 | In general, Point Cloud data is very large in number of data.
11 | So, if you process all the data, computation time might become an issue.
12 |
13 | Point cloud sampling is a technique for solving this computational complexity
14 | issue by extracting only representative point data and thinning the point
15 | cloud data without compromising the performance of processing using the point
16 | cloud data.
17 |
18 | Voxel Point Sampling
19 | ~~~~~~~~~~~~~~~~~~~~~~~~
20 | .. figure:: voxel_point_sampling.png
21 |
22 | Voxel grid sampling is a method of reducing point cloud data by using the
23 | `Voxel grids `_ which is regular grids
24 | in three-dimensional space.
25 |
26 | This method determines which each point is in a grid, and replaces the point
27 | clouds that are in the same Voxel with their average to reduce the number of
28 | points.
29 |
30 | Code Link
31 | ==========
32 |
33 | .. autofunction:: Mapping.point_cloud_sampling.point_cloud_sampling.voxel_point_sampling
34 |
35 |
36 | Farthest Point Sampling
37 | ~~~~~~~~~~~~~~~~~~~~~~~~~
38 | .. figure:: farthest_point_sampling.png
39 |
40 | Farthest Point Sampling is a point cloud sampling method by a specified
41 | number of points so that the distance between points is as far from as
42 | possible.
43 |
44 | This method is useful for machine learning and other situations where
45 | you want to obtain a specified number of points from point cloud.
46 |
47 | API
48 | =====
49 |
50 | .. autofunction:: Mapping.point_cloud_sampling.point_cloud_sampling.farthest_point_sampling
51 |
52 | Poisson Disk Sampling
53 | ~~~~~~~~~~~~~~~~~~~~~~~~~
54 | .. figure:: poisson_disk_sampling.png
55 |
56 | Poisson disk sample is a point cloud sampling method by a specified
57 | number of points so that the algorithm selects points where the distance
58 | from selected points is greater than a certain distance.
59 |
60 | Although this method does not have good performance comparing the Farthest
61 | distance sample where each point is distributed farther from each other,
62 | this is suitable for real-time processing because of its fast computation time.
63 |
64 | Code Link
65 | ==========
66 |
67 | .. autofunction:: Mapping.point_cloud_sampling.point_cloud_sampling.poisson_disk_sampling
68 |
69 |
70 |
71 |
--------------------------------------------------------------------------------
/docs/modules/3_mapping/point_cloud_sampling/poisson_disk_sampling.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/point_cloud_sampling/poisson_disk_sampling.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/point_cloud_sampling/voxel_point_sampling.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/3_mapping/point_cloud_sampling/voxel_point_sampling.png
--------------------------------------------------------------------------------
/docs/modules/3_mapping/ray_casting_grid_map/ray_casting_grid_map_main.rst:
--------------------------------------------------------------------------------
1 | Ray casting grid map
2 | --------------------
3 |
4 | This is a 2D ray casting grid mapping example.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Mapping/raycasting_grid_map/animation.gif
7 |
8 | Code Link
9 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
10 |
11 | .. autofunction:: Mapping.ray_casting_grid_map.ray_casting_grid_map.generate_ray_casting_grid_map
12 |
--------------------------------------------------------------------------------
/docs/modules/4_slam/FastSLAM1/FastSLAM1_12_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/FastSLAM1/FastSLAM1_12_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/FastSLAM1/FastSLAM1_12_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/FastSLAM1/FastSLAM1_12_1.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/FastSLAM1/FastSLAM1_1_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/FastSLAM1/FastSLAM1_1_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/FastSLAM2/FastSLAM2_main.rst:
--------------------------------------------------------------------------------
1 | FastSLAM 2.0
2 | ------------
3 |
4 | This is a feature based SLAM example using FastSLAM 2.0.
5 |
6 | The animation has the same meanings as one of FastSLAM 1.0.
7 |
8 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/SLAM/FastSLAM2/animation.gif
9 |
10 | Code Link
11 | ~~~~~~~~~~~
12 |
13 | .. autofunction:: SLAM.FastSLAM2.fast_slam2.fast_slam2
14 |
15 |
16 | References
17 | ~~~~~~~~~~
18 |
19 | - `PROBABILISTIC ROBOTICS `_
20 |
21 | - `SLAM simulations by Tim Bailey `_
22 |
23 |
--------------------------------------------------------------------------------
/docs/modules/4_slam/ekf_slam/ekf_slam_1_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/ekf_slam/ekf_slam_1_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_13_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_13_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_15_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_15_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_16_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_16_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_4_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_4_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_8_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_8_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_9_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_SE2_example_files/graphSLAM_SE2_example_9_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_11_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_11_1.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_11_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_11_2.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_2_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_2_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_2_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_2_2.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_4_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_4_0.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_9_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/4_slam/graph_slam/graphSLAM_doc_files/graphSLAM_doc_9_1.png
--------------------------------------------------------------------------------
/docs/modules/4_slam/graph_slam/graph_slam_main.rst:
--------------------------------------------------------------------------------
1 | Graph based SLAM
2 | ----------------
3 |
4 | This is a graph based SLAM example.
5 |
6 | The blue line is ground truth.
7 |
8 | The black line is dead reckoning.
9 |
10 | The red line is the estimated trajectory with Graph based SLAM.
11 |
12 | The black stars are landmarks for graph edge generation.
13 |
14 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/SLAM/GraphBasedSLAM/animation.gif
15 |
16 | Code Link
17 | ~~~~~~~~~~~~
18 |
19 | .. autofunction:: SLAM.GraphBasedSLAM.graph_based_slam.graph_based_slam
20 |
21 |
22 | .. include:: graphSLAM_doc.rst
23 | .. include:: graphSLAM_formulation.rst
24 | .. include:: graphSLAM_SE2_example.rst
25 |
26 | Reference
27 | ~~~~~~~~~~~
28 |
29 | - `A Tutorial on Graph-Based SLAM `_
30 |
31 |
--------------------------------------------------------------------------------
/docs/modules/4_slam/iterative_closest_point_matching/iterative_closest_point_matching_main.rst:
--------------------------------------------------------------------------------
1 | .. _iterative-closest-point-(icp)-matching:
2 |
3 | Iterative Closest Point (ICP) Matching
4 | --------------------------------------
5 |
6 | This is a 2D ICP matching example with singular value decomposition.
7 |
8 | It can calculate a rotation matrix and a translation vector between
9 | points to points.
10 |
11 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/SLAM/iterative_closest_point/animation.gif
12 |
13 | Code Link
14 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
15 |
16 | .. autofunction:: SLAM.ICPMatching.icp_matching.icp_matching
17 |
18 |
19 | References
20 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
21 |
22 | - `Introduction to Mobile Robotics: Iterative Closest Point Algorithm `_
23 |
--------------------------------------------------------------------------------
/docs/modules/4_slam/slam_main.rst:
--------------------------------------------------------------------------------
1 | .. _`SLAM`:
2 |
3 | SLAM
4 | ====
5 |
6 | Simultaneous Localization and Mapping(SLAM) examples
7 | Simultaneous Localization and Mapping (SLAM) is an ability to estimate the pose of a robot and the map of the environment at the same time. The SLAM problem is hard to
8 | solve, because a map is needed for localization and localization is needed for mapping. In this way, SLAM is often said to be similar to a ‘chicken-and-egg’ problem. Popular SLAM solution methods include the extended Kalman filter, particle filter, and Fast SLAM algorithm[31]. Fig.4 shows SLAM simulation results using extended Kalman filter and results using FastSLAM2.0[31].
9 |
10 | .. toctree::
11 | :maxdepth: 2
12 | :caption: Contents
13 |
14 | iterative_closest_point_matching/iterative_closest_point_matching
15 | ekf_slam/ekf_slam
16 | FastSLAM1/FastSLAM1
17 | FastSLAM2/FastSLAM2
18 | graph_slam/graph_slam
19 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bezier_path/Figure_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bezier_path/Figure_1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bezier_path/Figure_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bezier_path/Figure_2.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bezier_path/bezier_path_main.rst:
--------------------------------------------------------------------------------
1 | Bezier path planning
2 | --------------------
3 |
4 | A sample code of Bezier path planning.
5 |
6 | It is based on 4 control points Beizer path.
7 |
8 | .. image:: Figure_1.png
9 |
10 | If you change the offset distance from start and end point,
11 |
12 | You can get different Beizer course:
13 |
14 | .. image:: Figure_2.png
15 |
16 | Code Link
17 | ~~~~~~~~~~~~~~~
18 |
19 | .. autofunction:: PathPlanning.BezierPath.bezier_path.calc_4points_bezier_path
20 |
21 |
22 | Reference
23 | ~~~~~~~~~~~~~~~
24 |
25 | - `Continuous Curvature Path Generation Based on Bezier Curves for
26 | Autonomous
27 | Vehicles `__
28 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bspline_path/approx_and_curvature.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bspline_path/approx_and_curvature.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bspline_path/approximation1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bspline_path/approximation1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bspline_path/basis_functions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bspline_path/basis_functions.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bspline_path/bspline_path_planning.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bspline_path/bspline_path_planning.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bspline_path/interp_and_curvature.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bspline_path/interp_and_curvature.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bspline_path/interpolation1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/bspline_path/interpolation1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/bugplanner/bugplanner_main.rst:
--------------------------------------------------------------------------------
1 | Bug planner
2 | -----------
3 |
4 | This is a 2D planning with Bug algorithm.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/BugPlanner/animation.gif
7 |
8 | Code Link
9 | ~~~~~~~~~~~~~~~
10 |
11 | .. autofunction:: PathPlanning.BugPlanning.bug.main
12 |
13 | Reference
14 | ~~~~~~~~~~~~
15 |
16 | - `ECE452 Bug Algorithms `_
17 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/catmull_rom_spline/blending_functions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/catmull_rom_spline/blending_functions.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/catmull_rom_spline/catmull_rom_path_planning.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/catmull_rom_spline/catmull_rom_path_planning.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/coverage_path/coverage_path_main.rst:
--------------------------------------------------------------------------------
1 | Coverage path planner
2 | ---------------------
3 |
4 | Grid based sweep
5 | ~~~~~~~~~~~~~~~~
6 |
7 | This is a 2D grid based sweep coverage path planner simulation:
8 |
9 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/GridBasedSweepCPP/animation.gif
10 |
11 | Code Link
12 | +++++++++++++
13 |
14 | .. autofunction:: PathPlanning.GridBasedSweepCPP.grid_based_sweep_coverage_path_planner.planning
15 |
16 | Spiral Spanning Tree
17 | ~~~~~~~~~~~~~~~~~~~~
18 |
19 | This is a 2D grid based spiral spanning tree coverage path planner simulation:
20 |
21 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/SpiralSpanningTreeCPP/animation1.gif
22 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/SpiralSpanningTreeCPP/animation2.gif
23 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/SpiralSpanningTreeCPP/animation3.gif
24 |
25 | Code Link
26 | +++++++++++++
27 |
28 | .. autofunction:: PathPlanning.SpiralSpanningTreeCPP.spiral_spanning_tree_coverage_path_planner.main
29 |
30 | Reference
31 | +++++++++++++
32 |
33 | - `Spiral-STC: An On-Line Coverage Algorithm of Grid Environments by a Mobile Robot `_
34 |
35 |
36 | Wavefront path
37 | ~~~~~~~~~~~~~~
38 |
39 | This is a 2D grid based wavefront coverage path planner simulation:
40 |
41 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/WavefrontCPP/animation1.gif
42 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/WavefrontCPP/animation2.gif
43 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/WavefrontCPP/animation3.gif
44 |
45 | Code Link
46 | +++++++++++++
47 |
48 | .. autofunction:: PathPlanning.WavefrontCPP.wavefront_coverage_path_planner.wavefront
49 |
50 | Reference
51 | +++++++++++++
52 |
53 | - `Planning paths of complete coverage of an unstructured environment by a mobile robot `_
54 |
55 |
56 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/cubic_spline/cubic_spline_1d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/cubic_spline/cubic_spline_1d.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/cubic_spline/cubic_spline_2d_curvature.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/cubic_spline/cubic_spline_2d_curvature.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/cubic_spline/cubic_spline_2d_path.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/cubic_spline/cubic_spline_2d_path.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/cubic_spline/cubic_spline_2d_yaw.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/cubic_spline/cubic_spline_2d_yaw.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/cubic_spline/spline.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/cubic_spline/spline.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/cubic_spline/spline_continuity.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/cubic_spline/spline_continuity.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/dubins_path/RLR.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/dubins_path/RLR.jpg
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/dubins_path/RSR.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/dubins_path/RSR.jpg
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/dubins_path/dubins_path.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/dubins_path/dubins_path.jpg
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/dynamic_window_approach/dynamic_window_approach_main.rst:
--------------------------------------------------------------------------------
1 | .. _dynamic_window_approach:
2 |
3 | Dynamic Window Approach
4 | -----------------------
5 |
6 | This is a 2D navigation sample code with Dynamic Window Approach.
7 |
8 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/DynamicWindowApproach/animation.gif
9 |
10 | Code Link
11 | +++++++++++++
12 |
13 | .. autofunction:: PathPlanning.DynamicWindowApproach.dynamic_window_approach.dwa_control
14 |
15 |
16 | Reference
17 | ~~~~~~~~~~~~
18 |
19 | - `The Dynamic Window Approach to Collision
20 | Avoidance `__
21 |
22 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/elastic_bands/elastic_bands_main.rst:
--------------------------------------------------------------------------------
1 | Elastic Bands
2 | -------------
3 |
4 | This is a path planning with Elastic Bands.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/ElasticBands/animation.gif
7 |
8 | Code Link
9 | +++++++++++++
10 |
11 | .. autoclass:: PathPlanning.ElasticBands.elastic_bands.ElasticBands
12 |
13 |
14 | Core Concept
15 | ~~~~~~~~~~~~
16 | - **Elastic Band**: A dynamically deformable collision-free path initialized by a global planner.
17 | - **Objective**:
18 |
19 | * Shorten and smooth the path.
20 | * Maximize obstacle clearance.
21 | * Maintain global path connectivity.
22 |
23 | Bubble Representation
24 | ~~~~~~~~~~~~~~~~~~~~~~~~
25 | - **Definition**: A local free-space region around configuration :math:`b`:
26 |
27 | .. math::
28 | B(b) = \{ q: \|q - b\| < \rho(b) \},
29 |
30 | where :math:`\rho(b)` is the radius of the bubble.
31 |
32 |
33 | Force-Based Deformation
34 | ~~~~~~~~~~~~~~~~~~~~~~~
35 | The elastic band deforms under artificial forces:
36 |
37 | Internal Contraction Force
38 | ++++++++++++++++++++++++++
39 | - **Purpose**: Reduces path slack and length.
40 | - **Formula**: For node :math:`b_i`:
41 |
42 | .. math::
43 | f_c(b_i) = k_c \left( \frac{b_{i-1} - b_i}{\|b_{i-1} - b_i\|} + \frac{b_{i+1} - b_i}{\|b_{i+1} - b_i\|} \right)
44 |
45 | where :math:`k_c` is the contraction gain.
46 |
47 | External Repulsion Force
48 | +++++++++++++++++++++++++
49 | - **Purpose**: Pushes the path away from obstacles.
50 | - **Formula**: For node :math:`b_i`:
51 |
52 | .. math::
53 | f_r(b_i) = \begin{cases}
54 | k_r (\rho_0 - \rho(b_i)) \nabla \rho(b_i) & \text{if } \rho(b_i) < \rho_0, \\
55 | 0 & \text{otherwise}.
56 | \end{cases}
57 |
58 | where :math:`k_r` is the repulsion gain, :math:`\rho_0` is the maximum distance for applying repulsion force, and :math:`\nabla \rho(b_i)` is approximated via finite differences:
59 |
60 | .. math::
61 | \frac{\partial \rho}{\partial x} \approx \frac{\rho(b_i + h) - \rho(b_i - h)}{2h}.
62 |
63 | Dynamic Path Maintenance
64 | ~~~~~~~~~~~~~~~~~~~~~~~~~~
65 | 1. **Node Update**:
66 |
67 | .. math::
68 | b_i^{\text{new}} = b_i^{\text{old}} + \alpha (f_c + f_r),
69 |
70 | where :math:`\alpha` is a step-size parameter, which often proportional to :math:`\rho(b_i^{\text{old}})`
71 |
72 | 2. **Overlap Enforcement**:
73 | - Insert new nodes if adjacent nodes are too far apart
74 | - Remove redundant nodes if adjacent nodes are too close
75 |
76 | References
77 | +++++++++++++
78 |
79 | - `Elastic Bands: Connecting Path Planning and Control `__
80 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/eta3_spline/eta3_spline_main.rst:
--------------------------------------------------------------------------------
1 | .. _eta^3-spline-path-planning:
2 |
3 | Eta^3 Spline path planning
4 | --------------------------
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/Eta3SplinePath/animation.gif
7 |
8 | This is a path planning with Eta^3 spline.
9 |
10 | Code Link
11 | ~~~~~~~~~~~~~~~
12 |
13 | .. autoclass:: PathPlanning.Eta3SplineTrajectory.eta3_spline_trajectory.Eta3SplineTrajectory
14 |
15 |
16 | Reference
17 | ~~~~~~~~~~~~~~~
18 |
19 | - `\\eta^3-Splines for the Smooth Path Generation of Wheeled Mobile
20 | Robots `__
21 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/frenet_frame_path/frenet_frame_path_main.rst:
--------------------------------------------------------------------------------
1 | Optimal Trajectory in a Frenet Frame
2 | ------------------------------------
3 |
4 | This is optimal trajectory generation in a Frenet Frame.
5 |
6 | The cyan line is the target course and black crosses are obstacles.
7 |
8 | The red line is predicted path.
9 |
10 | Code Link
11 | ~~~~~~~~~~~~~~
12 |
13 | .. autofunction:: PathPlanning.FrenetOptimalTrajectory.frenet_optimal_trajectory.main
14 |
15 |
16 | High Speed and Velocity Keeping Scenario
17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
18 |
19 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/FrenetOptimalTrajectory/high_speed_and_velocity_keeping_frenet_path.gif
20 |
21 | This scenario shows how the trajectory is maintained at high speeds while keeping a consistent velocity.
22 |
23 | High Speed and Merging and Stopping Scenario
24 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25 |
26 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/FrenetOptimalTrajectory/high_speed_and_merging_and_stopping_frenet_path.gif
27 |
28 | This scenario demonstrates the trajectory planning at high speeds with merging and stopping behaviors.
29 |
30 | Low Speed and Velocity Keeping Scenario
31 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
32 |
33 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/FrenetOptimalTrajectory/low_speed_and_velocity_keeping_frenet_path.gif
34 |
35 | This scenario demonstrates how the trajectory is managed at low speeds while maintaining a steady velocity.
36 |
37 | Low Speed and Merging and Stopping Scenario
38 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
39 |
40 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/FrenetOptimalTrajectory/low_speed_and_merging_and_stopping_frenet_path.gif
41 |
42 | This scenario illustrates the trajectory planning at low speeds with merging and stopping behaviors.
43 |
44 | Reference
45 |
46 | - `Optimal Trajectory Generation for Dynamic Street Scenarios in a
47 | Frenet
48 | Frame `__
49 |
50 | - `Optimal trajectory generation for dynamic street scenarios in a
51 | Frenet Frame `__
52 |
53 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/hybridastar/hybridastar_main.rst:
--------------------------------------------------------------------------------
1 | Hybrid a star
2 | ---------------------
3 |
4 | This is a simple vehicle model based hybrid A\* path planner.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/HybridAStar/animation.gif
7 |
8 | Code Link
9 | +++++++++++++
10 |
11 | .. autofunction:: PathPlanning.HybridAStar.hybrid_a_star.hybrid_a_star_planning
12 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/lqr_path/lqr_path_main.rst:
--------------------------------------------------------------------------------
1 | LQR based path planning
2 | -----------------------
3 |
4 | A sample code using LQR based path planning for double integrator model.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/LQRPlanner/animation.gif?raw=true
7 |
8 | Code Link
9 | +++++++++++++
10 |
11 | .. autoclass:: PathPlanning.LQRPlanner.lqr_planner.LQRPlanner
12 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/model_predictive_trajectory_generator/lookup_table.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/model_predictive_trajectory_generator/lookup_table.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/model_predictive_trajectory_generator/model_predictive_trajectory_generator_main.rst:
--------------------------------------------------------------------------------
1 | Model Predictive Trajectory Generator
2 | -------------------------------------
3 |
4 | This is a path optimization sample on model predictive trajectory
5 | generator.
6 |
7 | This algorithm is used for state lattice planner.
8 |
9 | Code Link
10 | ~~~~~~~~~~~~~
11 |
12 | .. autofunction:: PathPlanning.ModelPredictiveTrajectoryGenerator.trajectory_generator.optimize_trajectory
13 |
14 |
15 | Path optimization sample
16 | ~~~~~~~~~~~~~~~~~~~~~~~~
17 |
18 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/ModelPredictiveTrajectoryGenerator/kn05animation.gif
19 |
20 | Lookup table generation sample
21 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
22 |
23 | .. image:: lookup_table.png
24 |
25 | Reference
26 | ~~~~~~~~~~~~
27 |
28 | - `Optimal rough terrain trajectory generation for wheeled mobile
29 | robots `__
30 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/path_planning_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Path Planning`:
2 |
3 | Path Planning
4 | =============
5 |
6 | Path planning is the ability of a robot to search feasible and efficient path to the goal. The path has to satisfy some constraints based on the robot’s motion model and obstacle positions, and optimize some objective functions such as time to goal and distance to obstacle. In path planning, dynamic programming based approaches and sampling based approaches are widely used[22]. Fig.5 shows simulation results of potential field path planning and LQRRRT* path planning[27].
7 |
8 | .. toctree::
9 | :maxdepth: 2
10 | :caption: Contents
11 |
12 | dynamic_window_approach/dynamic_window_approach
13 | bugplanner/bugplanner
14 | grid_base_search/grid_base_search
15 | time_based_grid_search/time_based_grid_search
16 | model_predictive_trajectory_generator/model_predictive_trajectory_generator
17 | state_lattice_planner/state_lattice_planner
18 | prm_planner/prm_planner
19 | visibility_road_map_planner/visibility_road_map_planner
20 | vrm_planner/vrm_planner
21 | rrt/rrt
22 | cubic_spline/cubic_spline
23 | bspline_path/bspline_path
24 | catmull_rom_spline/catmull_rom_spline
25 | clothoid_path/clothoid_path
26 | eta3_spline/eta3_spline
27 | bezier_path/bezier_path
28 | quintic_polynomials_planner/quintic_polynomials_planner
29 | dubins_path/dubins_path
30 | reeds_shepp_path/reeds_shepp_path
31 | lqr_path/lqr_path
32 | hybridastar/hybridastar
33 | frenet_frame_path/frenet_frame_path
34 | coverage_path/coverage_path
35 | elastic_bands/elastic_bands
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/prm_planner/prm_planner_main.rst:
--------------------------------------------------------------------------------
1 | .. _probabilistic-road-map-(prm)-planning:
2 |
3 | Probabilistic Road-Map (PRM) planning
4 | -------------------------------------
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/ProbabilisticRoadMap/animation.gif
7 |
8 | This PRM planner uses Dijkstra method for graph search.
9 |
10 | In the animation, blue points are sampled points,
11 |
12 | Cyan crosses means searched points with Dijkstra method,
13 |
14 | The red line is the final path of PRM.
15 |
16 | Code Link
17 | ~~~~~~~~~~~~~~~
18 |
19 | .. autofunction:: PathPlanning.ProbabilisticRoadMap.probabilistic_road_map.prm_planning
20 |
21 |
22 | Reference
23 | ~~~~~~~~~~~
24 |
25 | - `Probabilistic roadmap -
26 | Wikipedia `__
27 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/LR_L.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/LR_L.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/LR_LR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/LR_LR.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/LSL.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/LSL.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/LSL90xR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/LSL90xR.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/LSR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/LSR.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/LSR90_L.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/LSR90_L.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/L_R90SL.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/L_R90SL.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/L_R90SL90_R.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/L_R90SL90_R.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/L_R90SR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/L_R90SR.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/L_RL.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/L_RL.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/L_RL_R.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/L_RL_R.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/reeds_shepp_path/L_R_L.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/reeds_shepp_path/L_R_L.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_3.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_4.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/rrt/closed_loop_rrt_star_car/Figure_5.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/figure_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/rrt/figure_1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/rrt_star.rst:
--------------------------------------------------------------------------------
1 | RRT\*
2 | ~~~~~
3 |
4 | .. figure:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/RRTstar/animation.gif
5 |
6 | This is a path planning code with RRT\*
7 |
8 | Black circles are obstacles, green line is a searched tree, red crosses are start and goal positions.
9 |
10 | Code Link
11 | ^^^^^^^^^^
12 |
13 | .. autoclass:: PathPlanning.RRTStar.rrt_star.RRTStar
14 |
15 |
16 | Simulation
17 | ^^^^^^^^^^
18 |
19 | .. image:: rrt_star/rrt_star_1_0.png
20 | :width: 600px
21 |
22 |
23 | Ref
24 | ^^^
25 | - `Sampling-based Algorithms for Optimal Motion Planning `__
26 | - `Incremental Sampling-based Algorithms for Optimal Motion Planning `__
27 |
28 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/rrt_star/rrt_star_1_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/rrt/rrt_star/rrt_star_1_0.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/rrt/rrt_star_reeds_shepp/figure_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/rrt/rrt_star_reeds_shepp/figure_1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/state_lattice_planner/Figure_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/state_lattice_planner/Figure_1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/state_lattice_planner/Figure_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/state_lattice_planner/Figure_2.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/state_lattice_planner/Figure_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/state_lattice_planner/Figure_3.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/state_lattice_planner/Figure_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/state_lattice_planner/Figure_4.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/state_lattice_planner/Figure_5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/state_lattice_planner/Figure_5.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/state_lattice_planner/Figure_6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/state_lattice_planner/Figure_6.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/state_lattice_planner/state_lattice_planner_main.rst:
--------------------------------------------------------------------------------
1 | State Lattice Planning
2 | ----------------------
3 |
4 | This script is a path planning code with state lattice planning.
5 |
6 | This code uses the model predictive trajectory generator to solve
7 | boundary problem.
8 |
9 |
10 | Uniform polar sampling
11 | ~~~~~~~~~~~~~~~~~~~~~~
12 |
13 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/StateLatticePlanner/UniformPolarSampling.gif
14 |
15 | Code Link
16 | ^^^^^^^^^^^^^
17 |
18 | .. autofunction:: PathPlanning.StateLatticePlanner.state_lattice_planner.calc_uniform_polar_states
19 |
20 |
21 | Biased polar sampling
22 | ~~~~~~~~~~~~~~~~~~~~~
23 |
24 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/StateLatticePlanner/BiasedPolarSampling.gif
25 |
26 | Code Link
27 | ^^^^^^^^^^^^^
28 | .. autofunction:: PathPlanning.StateLatticePlanner.state_lattice_planner.calc_biased_polar_states
29 |
30 |
31 | Lane sampling
32 | ~~~~~~~~~~~~~
33 |
34 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/StateLatticePlanner/LaneSampling.gif
35 |
36 | Code Link
37 | ^^^^^^^^^^^^^
38 | .. autofunction:: PathPlanning.StateLatticePlanner.state_lattice_planner.calc_lane_states
39 |
40 |
41 | Reference
42 | ~~~~~~~~~~~~~~~
43 |
44 | - `Optimal rough terrain trajectory generation for wheeled mobile
45 | robots `__
46 |
47 | - `State Space Sampling of Feasible Motions for High-Performance Mobile
48 | Robot Navigation in Complex
49 | Environments `__
50 |
51 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/visibility_road_map_planner/step0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/visibility_road_map_planner/step0.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/visibility_road_map_planner/step1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/visibility_road_map_planner/step1.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/visibility_road_map_planner/step2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/visibility_road_map_planner/step2.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/visibility_road_map_planner/step3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/5_path_planning/visibility_road_map_planner/step3.png
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/visibility_road_map_planner/visibility_road_map_planner_main.rst:
--------------------------------------------------------------------------------
1 | Visibility Road-Map planner
2 | ---------------------------
3 |
4 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/VisibilityRoadMap/animation.gif
5 |
6 | `[Code] `_
7 |
8 | This visibility road-map planner uses Dijkstra method for graph search.
9 |
10 | In the animation, the black lines are polygon obstacles,
11 |
12 | red crosses are visibility nodes, and blue lines area collision free visibility graphs.
13 |
14 | The red line is the final path searched by dijkstra algorithm frm the visibility graphs.
15 |
16 | Code Link
17 | ~~~~~~~~~~~~
18 | .. autoclass:: PathPlanning.VisibilityRoadMap.visibility_road_map.VisibilityRoadMap
19 |
20 |
21 | Algorithms
22 | ~~~~~~~~~~
23 |
24 | In this chapter, how does the visibility road map planner search a path.
25 |
26 | We assume this planner can be provided these information in the below figure.
27 |
28 | - 1. Start point (Red point)
29 | - 2. Goal point (Blue point)
30 | - 3. Obstacle polygons (Black lines)
31 |
32 | .. image:: step0.png
33 | :width: 400px
34 |
35 |
36 | Step1: Generate visibility nodes based on polygon obstacles
37 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
38 |
39 | The nodes are generated by expanded these polygons vertexes like the below figure:
40 |
41 | .. image:: step1.png
42 | :width: 400px
43 |
44 | Each polygon vertex is expanded outward from the vector of adjacent vertices.
45 |
46 | The start and goal point are included as nodes as well.
47 |
48 | Step2: Generate visibility graphs connecting the nodes.
49 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
50 |
51 | When connecting the nodes, the arc between two nodes is checked to collided or not to each obstacles.
52 |
53 | If the arc is collided, the graph is removed.
54 |
55 | The blue lines are generated visibility graphs in the figure:
56 |
57 | .. image:: step2.png
58 | :width: 400px
59 |
60 |
61 | Step3: Search the shortest path in the graphs using Dijkstra algorithm
62 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
63 |
64 | The red line is searched path in the figure:
65 |
66 | .. image:: step3.png
67 | :width: 400px
68 |
69 | You can find the details of Dijkstra algorithm in :ref:`dijkstra`.
70 |
71 | References
72 | ~~~~~~~~~~~~
73 |
74 | - `Visibility graph - Wikipedia `_
75 |
76 |
77 |
--------------------------------------------------------------------------------
/docs/modules/5_path_planning/vrm_planner/vrm_planner_main.rst:
--------------------------------------------------------------------------------
1 | Voronoi Road-Map planning
2 | -------------------------
3 |
4 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathPlanning/VoronoiRoadMap/animation.gif
5 |
6 | This Voronoi road-map planner uses Dijkstra method for graph search.
7 |
8 | In the animation, blue points are Voronoi points,
9 |
10 | Cyan crosses mean searched points with Dijkstra method,
11 |
12 | The red line is the final path of Vornoi Road-Map.
13 |
14 | Code Link
15 | ~~~~~~~~~~~~~~~
16 | .. autoclass:: PathPlanning.VoronoiRoadMap.voronoi_road_map.VoronoiRoadMapPlanner
17 |
18 |
19 | Reference
20 | ~~~~~~~~~~~~
21 |
22 | - `Robotic Motion Planning `__
23 |
24 |
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_1_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_1_0.png
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_2_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_2_0.png
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_3_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_3_0.png
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_4_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_4_0.png
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/cgmres_nmpc/cgmres_nmpc_main.rst:
--------------------------------------------------------------------------------
1 |
2 | Nonlinear Model Predictive Control with C-GMRES
3 | -----------------------------------------------
4 |
5 | .. image:: cgmres_nmpc_1_0.png
6 | :width: 600px
7 |
8 | .. image:: cgmres_nmpc_2_0.png
9 | :width: 600px
10 |
11 | .. image:: cgmres_nmpc_3_0.png
12 | :width: 600px
13 |
14 | .. image:: cgmres_nmpc_4_0.png
15 | :width: 600px
16 |
17 | .. figure:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathTracking/cgmres_nmpc/animation.gif
18 | :alt: gif
19 |
20 | Code Link
21 | ~~~~~~~~~~~~
22 |
23 | .. autofunction:: PathTracking.cgmres_nmpc.cgmres_nmpc.NMPCControllerCGMRES
24 |
25 | Mathematical Formulation
26 | ~~~~~~~~~~~~~~~~~~~~~~~~
27 |
28 | Motion model is
29 |
30 | .. math:: \dot{x}=vcos\theta
31 |
32 | .. math:: \dot{y}=vsin\theta
33 |
34 | .. math:: \dot{\theta}=\frac{v}{WB}sin(u_{\delta})
35 |
36 | \ (tan is not good for optimization)
37 |
38 | .. math:: \dot{v}=u_a
39 |
40 | Cost function is
41 |
42 | .. math:: J=\frac{1}{2}(u_a^2+u_{\delta}^2)-\phi_a d_a-\phi_\delta d_\delta
43 |
44 | Input constraints are
45 |
46 | .. math:: |u_a| \leq u_{a,max}
47 |
48 | .. math:: |u_\delta| \leq u_{\delta,max}
49 |
50 | So, Hamiltonian is
51 |
52 | .. math::
53 |
54 | J=\frac{1}{2}(u_a^2+u_{\delta}^2)-\phi_a d_a-\phi_\delta d_\delta\\ +\lambda_1vcos\theta+\lambda_2vsin\theta+\lambda_3\frac{v}{WB}sin(u_{\delta})+\lambda_4u_a\\
55 | +\rho_1(u_a^2+d_a^2+u_{a,max}^2)+\rho_2(u_\delta^2+d_\delta^2+u_{\delta,max}^2)
56 |
57 | Partial differential equations of the Hamiltonian are:
58 |
59 | :math:`\begin{equation*} \frac{\partial H}{\partial \bf{x}}=\\ \begin{bmatrix} \frac{\partial H}{\partial x}= 0&\\ \frac{\partial H}{\partial y}= 0&\\ \frac{\partial H}{\partial \theta}= -\lambda_1vsin\theta+\lambda_2vcos\theta&\\ \frac{\partial H}{\partial v}=-\lambda_1cos\theta+\lambda_2sin\theta+\lambda_3\frac{1}{WB}sin(u_{\delta})&\\ \end{bmatrix} \\ \end{equation*}`
60 |
61 | :math:`\begin{equation*} \frac{\partial H}{\partial \bf{u}}=\\ \begin{bmatrix} \frac{\partial H}{\partial u_a}= u_a+\lambda_4+2\rho_1u_a&\\ \frac{\partial H}{\partial u_\delta}= u_\delta+\lambda_3\frac{v}{WB}cos(u_{\delta})+2\rho_2u_\delta&\\ \frac{\partial H}{\partial d_a}= -\phi_a+2\rho_1d_a&\\ \frac{\partial H}{\partial d_\delta}=-\phi_\delta+2\rho_2d_\delta&\\ \end{bmatrix} \\ \end{equation*}`
62 |
63 | Ref
64 | ~~~
65 |
66 | - `Shunichi09/nonlinear_control: Implementing the nonlinear model
67 | predictive control, sliding mode
68 | control `__
69 |
70 | - `非線形モデル予測制御におけるCGMRES法をpythonで実装する -
71 | Qiita `__
72 |
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/lqr_speed_and_steering_control/lqr_steering_control_model.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/lqr_speed_and_steering_control/lqr_steering_control_model.jpg
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/lqr_speed_and_steering_control/speed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/lqr_speed_and_steering_control/speed.png
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/lqr_speed_and_steering_control/x-y.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/lqr_speed_and_steering_control/x-y.png
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/lqr_speed_and_steering_control/yaw.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/lqr_speed_and_steering_control/yaw.png
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/lqr_steering_control/lqr_steering_control_model.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/6_path_tracking/lqr_steering_control/lqr_steering_control_model.jpg
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/path_tracking_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Path Tracking`:
2 |
3 | Path Tracking
4 | =============
5 |
6 | Path tracking is the ability of a robot to follow the reference path generated by a path planner while simultaneously stabilizing the robot. The path tracking controller may need to account for modeling error and other forms of uncertainty. In path tracking, feedback control techniques and optimization based control techniques are widely used[22]. Fig.6 shows simulations using rear wheel feedback steering control and PID speed control, and iterative linear model predictive path tracking control[27].
7 |
8 | .. toctree::
9 | :maxdepth: 2
10 | :caption: Contents
11 |
12 | pure_pursuit_tracking/pure_pursuit_tracking
13 | stanley_control/stanley_control
14 | rear_wheel_feedback_control/rear_wheel_feedback_control
15 | lqr_steering_control/lqr_steering_control
16 | lqr_speed_and_steering_control/lqr_speed_and_steering_control
17 | model_predictive_speed_and_steering_control/model_predictive_speed_and_steering_control
18 | cgmres_nmpc/cgmres_nmpc
19 | move_to_a_pose/move_to_a_pose
20 |
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/pure_pursuit_tracking/pure_pursuit_tracking_main.rst:
--------------------------------------------------------------------------------
1 | Pure pursuit tracking
2 | ---------------------
3 |
4 | Path tracking simulation with pure pursuit steering control and PID
5 | speed control.
6 |
7 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathTracking/pure_pursuit/animation.gif
8 |
9 | The red line is a target course, the green cross means the target point
10 | for pure pursuit control, the blue line is the tracking.
11 |
12 | Code Link
13 | ~~~~~~~~~~~~~~~
14 |
15 | .. autofunction:: PathTracking.pure_pursuit.pure_pursuit.pure_pursuit_steer_control
16 |
17 |
18 | Reference
19 | ~~~~~~~~~~~
20 |
21 | - `A Survey of Motion Planning and Control Techniques for Self-driving
22 | Urban Vehicles `_
23 |
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/rear_wheel_feedback_control/rear_wheel_feedback_control_main.rst:
--------------------------------------------------------------------------------
1 | Rear wheel feedback control
2 | ---------------------------
3 |
4 | Path tracking simulation with rear wheel feedback steering control and
5 | PID speed control.
6 |
7 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathTracking/rear_wheel_feedback/animation.gif
8 |
9 | Code Link
10 | ~~~~~~~~~~~~~~~
11 |
12 | .. autofunction:: PathTracking.rear_wheel_feedback_control.rear_wheel_feedback_control.rear_wheel_feedback_control
13 |
14 |
15 | Reference
16 | ~~~~~~~~~~~
17 | - `A Survey of Motion Planning and Control Techniques for Self-driving
18 | Urban Vehicles `__
19 |
--------------------------------------------------------------------------------
/docs/modules/6_path_tracking/stanley_control/stanley_control_main.rst:
--------------------------------------------------------------------------------
1 | Stanley control
2 | ---------------
3 |
4 | Path tracking simulation with Stanley steering control and PID speed
5 | control.
6 |
7 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/PathTracking/stanley_controller/animation.gif
8 |
9 | Code Link
10 | ~~~~~~~~~~~~~~~
11 |
12 | .. autofunction:: PathTracking.stanley_control.stanley_control.stanley_control
13 |
14 |
15 | Reference
16 | ~~~~~~~~~~~
17 |
18 | - `Stanley: The robot that won the DARPA grand
19 | challenge `_
20 |
21 | - `Automatic Steering Methods for Autonomous Automobile Path
22 | Tracking `_
23 |
--------------------------------------------------------------------------------
/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_12_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_12_0.png
--------------------------------------------------------------------------------
/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_5_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_5_0.png
--------------------------------------------------------------------------------
/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_7_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_7_0.png
--------------------------------------------------------------------------------
/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_9_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/docs/modules/7_arm_navigation/Planar_Two_Link_IK_files/Planar_Two_Link_IK_9_0.png
--------------------------------------------------------------------------------
/docs/modules/7_arm_navigation/arm_navigation_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Arm Navigation`:
2 |
3 | Arm Navigation
4 | ==============
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 | :caption: Contents
9 |
10 | planar_two_link_ik
11 | n_joint_arm_to_point_control
12 | obstacle_avoidance_arm_navigation
13 |
--------------------------------------------------------------------------------
/docs/modules/7_arm_navigation/n_joint_arm_to_point_control_main.rst:
--------------------------------------------------------------------------------
1 | N joint arm to point control
2 | ----------------------------
3 |
4 | N joint arm to a point control simulation.
5 |
6 | This is a interactive simulation.
7 |
8 | You can set the goal position of the end effector with left-click on the
9 | plotting area.
10 |
11 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/ArmNavigation/n_joint_arm_to_point_control/animation.gif
12 |
13 | In this simulation N = 10, however, you can change it.
14 |
15 |
16 | Code Link
17 | ~~~~~~~~~~~~~~~
18 |
19 | .. autofunction:: ArmNavigation.n_joint_arm_to_point_control.n_joint_arm_to_point_control.main
20 |
21 |
--------------------------------------------------------------------------------
/docs/modules/7_arm_navigation/obstacle_avoidance_arm_navigation_main.rst:
--------------------------------------------------------------------------------
1 | Arm navigation with obstacle avoidance
2 | --------------------------------------
3 |
4 | Arm navigation with obstacle avoidance simulation.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/ArmNavigation/arm_obstacle_navigation/animation.gif
7 |
8 | Code Link
9 | ~~~~~~~~~~~~~~~
10 |
11 | .. autofunction:: ArmNavigation.arm_obstacle_navigation.arm_obstacle_navigation.main
12 |
--------------------------------------------------------------------------------
/docs/modules/8_aerial_navigation/aerial_navigation_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Aerial Navigation`:
2 |
3 | Aerial Navigation
4 | =================
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 | :caption: Contents
9 |
10 | drone_3d_trajectory_following/drone_3d_trajectory_following
11 | rocket_powered_landing/rocket_powered_landing
12 |
13 |
--------------------------------------------------------------------------------
/docs/modules/8_aerial_navigation/drone_3d_trajectory_following/drone_3d_trajectory_following_main.rst:
--------------------------------------------------------------------------------
1 | Drone 3d trajectory following
2 | -----------------------------
3 |
4 | This is a 3d trajectory following simulation for a quadrotor.
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/AerialNavigation/drone_3d_trajectory_following/animation.gif
7 |
8 | Code Link
9 | ~~~~~~~~~~~~~~~
10 |
11 | .. autofunction:: AerialNavigation.drone_3d_trajectory_following.drone_3d_trajectory_following.quad_sim
12 |
--------------------------------------------------------------------------------
/docs/modules/9_bipedal/bipedal_main.rst:
--------------------------------------------------------------------------------
1 | .. _`Bipedal`:
2 |
3 | Bipedal
4 | =================
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 | :caption: Contents
9 |
10 | bipedal_planner/bipedal_planner
11 |
12 |
--------------------------------------------------------------------------------
/docs/modules/9_bipedal/bipedal_planner/bipedal_planner_main.rst:
--------------------------------------------------------------------------------
1 | Bipedal Planner
2 | -----------------------------
3 |
4 | Bipedal Walking with modifying designated footsteps
5 |
6 | .. image:: https://github.com/AtsushiSakai/PythonRoboticsGifs/raw/master/Bipedal/bipedal_planner/animation.gif
7 |
8 | Code Link
9 | ~~~~~~~~~~~~~~~
10 |
11 | .. autofunction:: Bipedal.bipedal_planner.bipedal_planner.BipedalPlanner
12 |
--------------------------------------------------------------------------------
/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/icon.png
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | ignore_missing_imports = True
--------------------------------------------------------------------------------
/requirements/environment.yml:
--------------------------------------------------------------------------------
1 | name: python_robotics
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.13
6 | - pip
7 | - scipy
8 | - numpy
9 | - cvxpy
10 | - matplotlib
11 |
--------------------------------------------------------------------------------
/requirements/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy == 2.2.4
2 | scipy == 1.15.2
3 | matplotlib == 3.10.1
4 | cvxpy == 1.6.5
5 | ecos == 2.0.14
6 | pytest == 8.3.5 # For unit test
7 | pytest-xdist == 3.6.1 # For unit test
8 | mypy == 1.15.0 # For unit test
9 | ruff == 0.11.11 # For unit test
10 |
--------------------------------------------------------------------------------
/ruff.toml:
--------------------------------------------------------------------------------
1 | line-length = 88
2 |
3 | select = ["F", "E", "W", "UP"]
4 | ignore = ["E501", "E741", "E402"]
5 | exclude = [
6 | ]
7 |
8 | # Assume Python 3.13
9 | target-version = "py313"
10 |
11 | [per-file-ignores]
12 |
13 | [mccabe]
14 | # Unlike Flake8, default to a complexity level of 10.
15 | max-complexity = 10
16 |
17 | [pydocstyle]
18 | convention = "numpy"
19 |
--------------------------------------------------------------------------------
/runtests.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | cd "$(dirname "$0")" || exit 1
3 | echo "Run test suites! "
4 |
5 | # === pytest based test runner ===
6 | # -Werror: warning as error
7 | # --durations=0: show ranking of test durations
8 | # -l (--showlocals); show local variables when test failed
9 | pytest tests -l -Werror --durations=0
10 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/tests/__init__.py
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """Path hack to make tests work."""
2 | import sys
3 | import os
4 | import pytest
5 |
6 | TEST_DIR = os.path.dirname(os.path.abspath(__file__))
7 | sys.path.append(TEST_DIR) # to import this file from test code.
8 | ROOT_DIR = os.path.dirname(TEST_DIR)
9 | sys.path.append(ROOT_DIR)
10 |
11 |
12 | def run_this_test(file):
13 | pytest.main(args=["-W", "error", "-Werror", "--pythonwarnings=error", os.path.abspath(file)])
14 |
--------------------------------------------------------------------------------
/tests/test_LQR_planner.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.LQRPlanner import lqr_planner as m
3 |
4 |
5 | def test_1():
6 | m.SHOW_ANIMATION = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_a_star.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.AStar import a_star as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_a_star_searching_two_side.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.AStar import a_star_searching_from_two_side as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main(800)
8 |
9 |
10 | def test2():
11 | m.show_animation = False
12 | m.main(5000) # increase obstacle number, block path
13 |
14 |
15 | if __name__ == '__main__':
16 | conftest.run_this_test(__file__)
17 |
--------------------------------------------------------------------------------
/tests/test_a_star_variants.py:
--------------------------------------------------------------------------------
1 | import PathPlanning.AStar.a_star_variants as a_star
2 | import conftest
3 |
4 |
5 | def test_1():
6 | # A* with beam search
7 | a_star.show_animation = False
8 |
9 | a_star.use_beam_search = True
10 | a_star.main()
11 | reset_all()
12 |
13 | # A* with iterative deepening
14 | a_star.use_iterative_deepening = True
15 | a_star.main()
16 | reset_all()
17 |
18 | # A* with dynamic weighting
19 | a_star.use_dynamic_weighting = True
20 | a_star.main()
21 | reset_all()
22 |
23 | # theta*
24 | a_star.use_theta_star = True
25 | a_star.main()
26 | reset_all()
27 |
28 | # A* with jump point
29 | a_star.use_jump_point = True
30 | a_star.main()
31 | reset_all()
32 |
33 |
34 | def reset_all():
35 | a_star.show_animation = False
36 | a_star.use_beam_search = False
37 | a_star.use_iterative_deepening = False
38 | a_star.use_dynamic_weighting = False
39 | a_star.use_theta_star = False
40 | a_star.use_jump_point = False
41 |
42 |
43 | if __name__ == '__main__':
44 | conftest.run_this_test(__file__)
45 |
--------------------------------------------------------------------------------
/tests/test_batch_informed_rrt_star.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 | import conftest
4 | from PathPlanning.BatchInformedRRTStar import batch_informed_rrt_star as m
5 |
6 |
7 | def test_1():
8 | m.show_animation = False
9 | random.seed(12345)
10 | m.main(maxIter=10)
11 |
12 |
13 | if __name__ == '__main__':
14 | conftest.run_this_test(__file__)
15 |
--------------------------------------------------------------------------------
/tests/test_bezier_path.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.BezierPath import bezier_path as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | def test_2():
11 | m.show_animation = False
12 | m.main2()
13 |
14 |
15 | if __name__ == '__main__':
16 | conftest.run_this_test(__file__)
17 |
--------------------------------------------------------------------------------
/tests/test_bipedal_planner.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from Bipedal.bipedal_planner import bipedal_planner as m
3 |
4 |
5 | def test_1():
6 | bipedal_planner = m.BipedalPlanner()
7 |
8 | footsteps = [[0.0, 0.2, 0.0],
9 | [0.3, 0.2, 0.0],
10 | [0.3, 0.2, 0.2],
11 | [0.3, 0.2, 0.2],
12 | [0.0, 0.2, 0.2]]
13 | bipedal_planner.set_ref_footsteps(footsteps)
14 | bipedal_planner.walk(plot=False)
15 |
16 |
17 | if __name__ == '__main__':
18 | conftest.run_this_test(__file__)
19 |
--------------------------------------------------------------------------------
/tests/test_breadth_first_search.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.BreadthFirstSearch import breadth_first_search as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_bug.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.BugPlanning import bug as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main(bug_0=True, bug_1=True, bug_2=True)
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_catmull_rom_spline.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.Catmull_RomSplinePath.catmull_rom_spline_path import catmull_rom_spline
3 |
4 | def test_catmull_rom_spline():
5 | way_points = [[0, 0], [1, 2], [2, 0], [3, 3]]
6 | num_points = 100
7 |
8 | spline_x, spline_y = catmull_rom_spline(way_points, num_points)
9 |
10 | assert spline_x.size > 0, "Spline X coordinates should not be empty"
11 | assert spline_y.size > 0, "Spline Y coordinates should not be empty"
12 |
13 | assert spline_x.shape == spline_y.shape, "Spline X and Y coordinates should have the same shape"
14 |
15 | if __name__ == '__main__':
16 | conftest.run_this_test(__file__)
17 |
--------------------------------------------------------------------------------
/tests/test_cgmres_nmpc.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathTracking.cgmres_nmpc import cgmres_nmpc as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_circle_fitting.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from Mapping.circle_fitting import circle_fitting as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_closed_loop_rrt_star_car.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.ClosedLoopRRTStar import closed_loop_rrt_star_car as m
3 | import random
4 |
5 |
6 | def test_1():
7 | random.seed(12345)
8 | m.show_animation = False
9 | m.main(gx=1.0, gy=0.0, gyaw=0.0, max_iter=5)
10 |
11 |
12 | if __name__ == '__main__':
13 | conftest.run_this_test(__file__)
14 |
--------------------------------------------------------------------------------
/tests/test_clothoidal_paths.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.ClothoidPath import clothoid_path_planner as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_cubature_kalman_filter.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from Localization.cubature_kalman_filter import cubature_kalman_filter as m
3 |
4 |
5 | def test1():
6 | m.show_final = False
7 | m.show_animation = False
8 | m.show_ellipse = False
9 | m.main()
10 |
11 |
12 | if __name__ == '__main__':
13 | conftest.run_this_test(__file__)
14 |
--------------------------------------------------------------------------------
/tests/test_d_star_lite.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.DStarLite import d_star_lite as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_depth_first_search.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.DepthFirstSearch import depth_first_search as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_dijkstra.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.Dijkstra import dijkstra as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_drone_3d_trajectory_following.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from AerialNavigation.drone_3d_trajectory_following \
3 | import drone_3d_trajectory_following as m
4 |
5 |
6 | def test1():
7 | m.show_animation = False
8 | m.main()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_dstar.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.DStar import dstar as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_dynamic_movement_primitives.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | import numpy as np
3 | from PathPlanning.DynamicMovementPrimitives import \
4 | dynamic_movement_primitives
5 |
6 |
7 | def test_1():
8 | # test that trajectory can be learned from user-passed data
9 | T = 5
10 | t = np.arange(0, T, 0.01)
11 | sin_t = np.sin(t)
12 | train_data = np.array([t, sin_t]).T
13 |
14 | DMP_controller = dynamic_movement_primitives.DMP(train_data, T)
15 | DMP_controller.recreate_trajectory(train_data[0], train_data[-1], 4)
16 |
17 |
18 | def test_2():
19 | # test that length of trajectory is equal to desired number of timesteps
20 | T = 5
21 | t = np.arange(0, T, 0.01)
22 | sin_t = np.sin(t)
23 | train_data = np.array([t, sin_t]).T
24 |
25 | DMP_controller = dynamic_movement_primitives.DMP(train_data, T)
26 | t, path = DMP_controller.recreate_trajectory(train_data[0],
27 | train_data[-1], 4)
28 |
29 | assert(path.shape[0] == DMP_controller.timesteps)
30 |
31 |
32 | def test_3():
33 | # check that learned trajectory is close to initial
34 | T = 3*np.pi/2
35 | A_noise = 0.02
36 | t = np.arange(0, T, 0.01)
37 | noisy_sin_t = np.sin(t) + A_noise*np.random.rand(len(t))
38 | train_data = np.array([t, noisy_sin_t]).T
39 |
40 | DMP_controller = dynamic_movement_primitives.DMP(train_data, T)
41 | t, pos = DMP_controller.recreate_trajectory(train_data[0],
42 | train_data[-1], T)
43 |
44 | diff = abs(pos[:, 1] - noisy_sin_t)
45 | assert(max(diff) < 5*A_noise)
46 |
47 |
48 | if __name__ == '__main__':
49 | conftest.run_this_test(__file__)
50 |
--------------------------------------------------------------------------------
/tests/test_dynamic_window_approach.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | import numpy as np
3 |
4 | from PathPlanning.DynamicWindowApproach import dynamic_window_approach as m
5 |
6 |
7 | def test_main1():
8 | m.show_animation = False
9 | m.main(gx=1.0, gy=1.0)
10 |
11 |
12 | def test_main2():
13 | m.show_animation = False
14 | m.main(gx=1.0, gy=1.0, robot_type=m.RobotType.rectangle)
15 |
16 |
17 | def test_stuck_main():
18 | m.show_animation = False
19 | # adjust cost
20 | m.config.to_goal_cost_gain = 0.2
21 | m.config.obstacle_cost_gain = 2.0
22 | # obstacles and goals for stuck condition
23 | m.config.ob = -1 * np.array([[-1.0, -1.0],
24 | [0.0, 2.0],
25 | [2.0, 6.0],
26 | [2.0, 8.0],
27 | [3.0, 9.27],
28 | [3.79, 9.39],
29 | [7.25, 8.97],
30 | [7.0, 2.0],
31 | [3.0, 4.0],
32 | [6.0, 5.0],
33 | [3.5, 5.8],
34 | [6.0, 9.0],
35 | [8.8, 9.0],
36 | [5.0, 9.0],
37 | [7.5, 3.0],
38 | [9.0, 8.0],
39 | [5.8, 4.4],
40 | [12.0, 12.0],
41 | [3.0, 2.0],
42 | [13.0, 13.0]
43 | ])
44 | m.main(gx=-5.0, gy=-7.0)
45 |
46 |
47 | if __name__ == '__main__':
48 | conftest.run_this_test(__file__)
49 |
--------------------------------------------------------------------------------
/tests/test_ekf_slam.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from SLAM.EKFSLAM import ekf_slam as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_elastic_bands.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | import numpy as np
3 | from PathPlanning.ElasticBands.elastic_bands import ElasticBands
4 |
5 |
6 | def test_1():
7 | path = np.load("PathPlanning/ElasticBands/path.npy")
8 | obstacles_points = np.load("PathPlanning/ElasticBands/obstacles.npy")
9 | obstacles = np.zeros((500, 500))
10 | for x, y in obstacles_points:
11 | size = 30 # Side length of the square
12 | half_size = size // 2
13 | x_start = max(0, x - half_size)
14 | x_end = min(obstacles.shape[0], x + half_size)
15 | y_start = max(0, y - half_size)
16 | y_end = min(obstacles.shape[1], y + half_size)
17 | obstacles[x_start:x_end, y_start:y_end] = 1
18 | elastic_bands = ElasticBands(path, obstacles)
19 | elastic_bands.update_bubbles()
20 |
21 |
22 | if __name__ == "__main__":
23 | conftest.run_this_test(__file__)
24 |
--------------------------------------------------------------------------------
/tests/test_eta3_spline_path.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.Eta3SplinePath import eta3_spline_path as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_extended_kalman_filter.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from Localization.extended_kalman_filter import extended_kalman_filter as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_fast_slam1.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from SLAM.FastSLAM1 import fast_slam1 as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.SIM_TIME = 3.0
8 | m.main()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_fast_slam2.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from SLAM.FastSLAM2 import fast_slam2 as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.SIM_TIME = 3.0
8 | m.main()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_flow_field.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | import PathPlanning.FlowField.flowfield as flow_field
3 |
4 |
5 | def test():
6 | flow_field.show_animation = False
7 | flow_field.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_frenet_optimal_trajectory.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.FrenetOptimalTrajectory import frenet_optimal_trajectory as m
3 | from PathPlanning.FrenetOptimalTrajectory.frenet_optimal_trajectory import (
4 | LateralMovement,
5 | LongitudinalMovement,
6 | )
7 |
8 |
9 | def default_scenario_test():
10 | m.show_animation = False
11 | m.SIM_LOOP = 5
12 | m.main()
13 |
14 |
15 | def high_speed_and_merging_and_stopping_scenario_test():
16 | m.show_animation = False
17 | m.LATERAL_MOVEMENT = LateralMovement.HIGH_SPEED
18 | m.LONGITUDINAL_MOVEMENT = LongitudinalMovement.MERGING_AND_STOPPING
19 | m.SIM_LOOP = 5
20 | m.main()
21 |
22 |
23 | def high_speed_and_velocity_keeping_scenario_test():
24 | m.show_animation = False
25 | m.LATERAL_MOVEMENT = LateralMovement.HIGH_SPEED
26 | m.LONGITUDINAL_MOVEMENT = LongitudinalMovement.VELOCITY_KEEPING
27 | m.SIM_LOOP = 5
28 | m.main()
29 |
30 |
31 | def low_speed_and_velocity_keeping_scenario_test():
32 | m.show_animation = False
33 | m.LATERAL_MOVEMENT = LateralMovement.LOW_SPEED
34 | m.LONGITUDINAL_MOVEMENT = LongitudinalMovement.VELOCITY_KEEPING
35 | m.SIM_LOOP = 5
36 | m.main()
37 |
38 |
39 | def low_speed_and_merging_and_stopping_scenario_test():
40 | m.show_animation = False
41 | m.LATERAL_MOVEMENT = LateralMovement.LOW_SPEED
42 | m.LONGITUDINAL_MOVEMENT = LongitudinalMovement.MERGING_AND_STOPPING
43 | m.SIM_LOOP = 5
44 | m.main()
45 |
46 |
47 | if __name__ == "__main__":
48 | conftest.run_this_test(__file__)
49 |
--------------------------------------------------------------------------------
/tests/test_gaussian_grid_map.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from Mapping.gaussian_grid_map import gaussian_grid_map as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_graph_based_slam.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from SLAM.GraphBasedSLAM import graph_based_slam as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.SIM_TIME = 20.0
8 | m.main()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_greedy_best_first_search.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.GreedyBestFirstSearch import greedy_best_first_search as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_grid_map_lib.py:
--------------------------------------------------------------------------------
1 | from Mapping.grid_map_lib.grid_map_lib import GridMap
2 | import conftest
3 | import numpy as np
4 |
5 |
6 | def test_position_set():
7 | grid_map = GridMap(100, 120, 0.5, 10.0, -0.5)
8 |
9 | grid_map.set_value_from_xy_pos(10.1, -1.1, 1.0)
10 | grid_map.set_value_from_xy_pos(10.1, -0.1, 1.0)
11 | grid_map.set_value_from_xy_pos(10.1, 1.1, 1.0)
12 | grid_map.set_value_from_xy_pos(11.1, 0.1, 1.0)
13 | grid_map.set_value_from_xy_pos(10.1, 0.1, 1.0)
14 | grid_map.set_value_from_xy_pos(9.1, 0.1, 1.0)
15 |
16 |
17 | def test_polygon_set():
18 | ox = [0.0, 4.35, 20.0, 50.0, 100.0, 130.0, 40.0]
19 | oy = [0.0, -4.15, -20.0, 0.0, 30.0, 60.0, 80.0]
20 |
21 | grid_map = GridMap(600, 290, 0.7, 60.0, 30.5)
22 |
23 | grid_map.set_value_from_polygon(ox, oy, 1.0, inside=False)
24 | grid_map.set_value_from_polygon(np.array(ox), np.array(oy),
25 | 1.0, inside=False)
26 |
27 |
28 | def test_xy_and_grid_index_conversion():
29 | grid_map = GridMap(100, 120, 0.5, 10.0, -0.5)
30 |
31 | for x_ind in range(grid_map.width):
32 | for y_ind in range(grid_map.height):
33 | grid_ind = grid_map.calc_grid_index_from_xy_index(x_ind, y_ind)
34 | x_ind_2, y_ind_2 = grid_map.calc_xy_index_from_grid_index(grid_ind)
35 | assert x_ind == x_ind_2
36 | assert y_ind == y_ind_2
37 |
38 |
39 | if __name__ == '__main__':
40 | conftest.run_this_test(__file__)
41 |
--------------------------------------------------------------------------------
/tests/test_histogram_filter.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from Localization.histogram_filter import histogram_filter as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.SIM_TIME = 1.0
8 | m.main()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_hybrid_a_star.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.HybridAStar import hybrid_a_star as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_informed_rrt_star.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from PathPlanning.InformedRRTStar import informed_rrt_star as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_inverted_pendulum_lqr_control.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from InvertedPendulum import inverted_pendulum_lqr_control as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_inverted_pendulum_mpc_control.py:
--------------------------------------------------------------------------------
1 | import conftest
2 |
3 | from InvertedPendulum import inverted_pendulum_mpc_control as m
4 |
5 |
6 | def test1():
7 | m.show_animation = False
8 | m.main()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_iterative_closest_point.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from SLAM.ICPMatching import icp_matching as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | def test_2():
11 | m.show_animation = False
12 | m.main_3d_points()
13 |
14 |
15 | if __name__ == '__main__':
16 | conftest.run_this_test(__file__)
17 |
--------------------------------------------------------------------------------
/tests/test_kmeans_clustering.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | from Mapping.kmeans_clustering import kmeans_clustering as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_lqr_rrt_star.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.LQRRRTStar import lqr_rrt_star as m
3 | import random
4 |
5 | random.seed(12345)
6 |
7 |
8 | def test1():
9 | m.show_animation = False
10 | m.main(maxIter=5)
11 |
12 |
13 | if __name__ == '__main__':
14 | conftest.run_this_test(__file__)
15 |
--------------------------------------------------------------------------------
/tests/test_lqr_speed_steer_control.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathTracking.lqr_speed_steer_control import lqr_speed_steer_control as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_lqr_steer_control.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathTracking.lqr_steer_control import lqr_steer_control as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_model_predictive_speed_and_steer_control.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 |
3 | from PathTracking.model_predictive_speed_and_steer_control \
4 | import model_predictive_speed_and_steer_control as m
5 |
6 |
7 | def test_1():
8 | m.show_animation = False
9 | m.main()
10 |
11 |
12 | def test_2():
13 | m.show_animation = False
14 | m.main2()
15 |
16 |
17 | if __name__ == '__main__':
18 | conftest.run_this_test(__file__)
19 |
--------------------------------------------------------------------------------
/tests/test_move_to_pose_robot.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathTracking.move_to_pose import move_to_pose as m
3 |
4 |
5 | def test_1():
6 | """
7 | This unit test tests the move_to_pose_robot.py program
8 | """
9 | m.show_animation = False
10 | m.main()
11 |
12 |
13 | if __name__ == '__main__':
14 | conftest.run_this_test(__file__)
15 |
--------------------------------------------------------------------------------
/tests/test_mypy_type_check.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 |
4 | import conftest
5 |
6 | SUBPACKAGE_LIST = [
7 | "AerialNavigation",
8 | "ArmNavigation",
9 | "Bipedal",
10 | "Localization",
11 | "Mapping",
12 | "PathPlanning",
13 | "PathTracking",
14 | "SLAM",
15 | "InvertedPendulum"
16 | ]
17 |
18 |
19 | def run_mypy(dir_name, project_path, config_path):
20 | res = subprocess.run(
21 | ['mypy',
22 | '--config-file',
23 | config_path,
24 | '-p',
25 | dir_name],
26 | cwd=project_path,
27 | stdout=subprocess.PIPE,
28 | encoding='utf-8')
29 | return res.returncode, res.stdout
30 |
31 |
32 | def test():
33 | project_dir_path = os.path.dirname(
34 | os.path.dirname(os.path.abspath(__file__)))
35 | print(f"{project_dir_path=}")
36 |
37 | config_path = os.path.join(project_dir_path, "mypy.ini")
38 | print(f"{config_path=}")
39 |
40 | for sub_package_name in SUBPACKAGE_LIST:
41 | rc, errors = run_mypy(sub_package_name, project_dir_path, config_path)
42 | if errors:
43 | print(errors)
44 | else:
45 | print("No lint errors found.")
46 | assert rc == 0
47 |
48 |
49 | if __name__ == '__main__':
50 | conftest.run_this_test(__file__)
51 |
--------------------------------------------------------------------------------
/tests/test_n_joint_arm_to_point_control.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from ArmNavigation.n_joint_arm_to_point_control\
3 | import n_joint_arm_to_point_control as m
4 | import random
5 |
6 | random.seed(12345)
7 |
8 |
9 | def test1():
10 | m.show_animation = False
11 | m.animation()
12 |
13 |
14 | if __name__ == '__main__':
15 | conftest.run_this_test(__file__)
16 |
--------------------------------------------------------------------------------
/tests/test_normal_vector_estimation.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from Mapping.normal_vector_estimation import normal_vector_estimation as m
3 | import random
4 |
5 | random.seed(12345)
6 |
7 |
8 | def test_1():
9 | m.show_animation = False
10 | m.main1()
11 |
12 |
13 | def test_2():
14 | m.show_animation = False
15 | m.main2()
16 |
17 |
18 | if __name__ == '__main__':
19 | conftest.run_this_test(__file__)
20 |
--------------------------------------------------------------------------------
/tests/test_particle_filter.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from Localization.particle_filter import particle_filter as m
3 |
4 |
5 | def test_1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_point_cloud_sampling.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from Mapping.point_cloud_sampling import point_cloud_sampling as m
3 |
4 |
5 | def test_1(capsys):
6 | m.do_plot = False
7 | m.main()
8 | captured = capsys.readouterr()
9 | assert "voxel_sampling_points.shape=(27, 3)" in captured.out
10 | assert "farthest_sampling_points.shape=(20, 3)" in captured.out
11 | assert "poisson_disk_points.shape=(20, 3)" in captured.out
12 |
13 |
14 | if __name__ == '__main__':
15 | conftest.run_this_test(__file__)
16 |
--------------------------------------------------------------------------------
/tests/test_potential_field_planning.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.PotentialFieldPlanning import potential_field_planning as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_probabilistic_road_map.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | import numpy as np
3 | from PathPlanning.ProbabilisticRoadMap import probabilistic_road_map
4 |
5 |
6 | def test1():
7 | probabilistic_road_map.show_animation = False
8 | probabilistic_road_map.main(rng=np.random.default_rng(1233))
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_pure_pursuit.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathTracking.pure_pursuit import pure_pursuit as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_quintic_polynomials_planner.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.QuinticPolynomialsPlanner import quintic_polynomials_planner as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_ray_casting_grid_map.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from Mapping.ray_casting_grid_map import ray_casting_grid_map as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_rear_wheel_feedback.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathTracking.rear_wheel_feedback_control import rear_wheel_feedback_control as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_rectangle_fitting.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from Mapping.rectangle_fitting import rectangle_fitting as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_reeds_shepp_path_planning.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | import conftest # Add root path to sys.path
4 | from PathPlanning.ReedsSheppPath import reeds_shepp_path_planning as m
5 |
6 |
7 | def check_edge_condition(px, py, pyaw, start_x, start_y, start_yaw, end_x,
8 | end_y, end_yaw):
9 | assert (abs(px[0] - start_x) <= 0.01)
10 | assert (abs(py[0] - start_y) <= 0.01)
11 | assert (abs(pyaw[0] - start_yaw) <= 0.01)
12 | assert (abs(px[-1] - end_x) <= 0.01)
13 | assert (abs(py[-1] - end_y) <= 0.01)
14 | assert (abs(pyaw[-1] - end_yaw) <= 0.01)
15 |
16 |
17 | def check_path_length(px, py, lengths):
18 | sum_len = sum(abs(length) for length in lengths)
19 | dpx = np.diff(px)
20 | dpy = np.diff(py)
21 | actual_len = sum(
22 | np.hypot(dx, dy) for (dx, dy) in zip(dpx, dpy))
23 | diff_len = sum_len - actual_len
24 | assert (diff_len <= 0.01)
25 |
26 |
27 | def test1():
28 | m.show_animation = False
29 | m.main()
30 |
31 |
32 | def test2():
33 | N_TEST = 10
34 | np.random.seed(1234)
35 |
36 | for i in range(N_TEST):
37 | start_x = (np.random.rand() - 0.5) * 10.0 # [m]
38 | start_y = (np.random.rand() - 0.5) * 10.0 # [m]
39 | start_yaw = np.deg2rad((np.random.rand() - 0.5) * 180.0) # [rad]
40 |
41 | end_x = (np.random.rand() - 0.5) * 10.0 # [m]
42 | end_y = (np.random.rand() - 0.5) * 10.0 # [m]
43 | end_yaw = np.deg2rad((np.random.rand() - 0.5) * 180.0) # [rad]
44 |
45 | curvature = 1.0 / (np.random.rand() * 5.0)
46 |
47 | px, py, pyaw, mode, lengths = m.reeds_shepp_path_planning(
48 | start_x, start_y, start_yaw,
49 | end_x, end_y, end_yaw, curvature)
50 |
51 | check_edge_condition(px, py, pyaw, start_x, start_y, start_yaw, end_x,
52 | end_y, end_yaw)
53 | check_path_length(px, py, lengths)
54 |
55 |
56 | if __name__ == '__main__':
57 | conftest.run_this_test(__file__)
58 |
--------------------------------------------------------------------------------
/tests/test_rocket_powered_landing.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | import numpy as np
3 | from numpy.testing import suppress_warnings
4 |
5 | from AerialNavigation.rocket_powered_landing import rocket_powered_landing as m
6 |
7 |
8 | def test1():
9 | m.show_animation = False
10 | with suppress_warnings() as sup:
11 | sup.filter(UserWarning,
12 | "You are solving a parameterized problem that is not DPP"
13 | )
14 | sup.filter(UserWarning,
15 | "Solution may be inaccurate")
16 | m.main(rng=np.random.default_rng(1234))
17 |
18 |
19 | if __name__ == '__main__':
20 | conftest.run_this_test(__file__)
21 |
--------------------------------------------------------------------------------
/tests/test_rrt.py:
--------------------------------------------------------------------------------
1 | import conftest
2 | import random
3 |
4 | from PathPlanning.RRT import rrt as m
5 | from PathPlanning.RRT import rrt_with_pathsmoothing as m1
6 |
7 | random.seed(12345)
8 |
9 |
10 | def test1():
11 | m.show_animation = False
12 | m.main(gx=1.0, gy=1.0)
13 |
14 |
15 | def test2():
16 | m1.show_animation = False
17 | m1.main()
18 |
19 |
20 | if __name__ == '__main__':
21 | conftest.run_this_test(__file__)
22 |
--------------------------------------------------------------------------------
/tests/test_rrt_dubins.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.RRTDubins import rrt_dubins as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_rrt_star.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.RRTStar import rrt_star as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | def test_no_obstacle():
11 | obstacle_list = []
12 |
13 | # Set Initial parameters
14 | rrt_star = m.RRTStar(start=[0, 0],
15 | goal=[6, 10],
16 | rand_area=[-2, 15],
17 | obstacle_list=obstacle_list)
18 | path = rrt_star.planning(animation=False)
19 | assert path is not None
20 |
21 |
22 | def test_no_obstacle_and_robot_radius():
23 | obstacle_list = []
24 |
25 | # Set Initial parameters
26 | rrt_star = m.RRTStar(start=[0, 0],
27 | goal=[6, 10],
28 | rand_area=[-2, 15],
29 | obstacle_list=obstacle_list,
30 | robot_radius=0.8)
31 | path = rrt_star.planning(animation=False)
32 | assert path is not None
33 |
34 |
35 | if __name__ == '__main__':
36 | conftest.run_this_test(__file__)
37 |
--------------------------------------------------------------------------------
/tests/test_rrt_star_dubins.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.RRTStarDubins import rrt_star_dubins as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_rrt_star_reeds_shepp.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.RRTStarReedsShepp import rrt_star_reeds_shepp as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main(max_iter=5)
8 |
9 | obstacleList = [
10 | (5, 5, 1),
11 | (4, 6, 1),
12 | (4, 8, 1),
13 | (4, 10, 1),
14 | (6, 5, 1),
15 | (7, 5, 1),
16 | (8, 6, 1),
17 | (8, 8, 1),
18 | (8, 10, 1)
19 | ] # [x,y,size(radius)]
20 |
21 | start = [0.0, 0.0, m.np.deg2rad(0.0)]
22 | goal = [6.0, 7.0, m.np.deg2rad(90.0)]
23 |
24 | def test2():
25 | step_size = 0.2
26 | rrt_star_reeds_shepp = m.RRTStarReedsShepp(start, goal,
27 | obstacleList, [-2.0, 15.0],
28 | max_iter=100, step_size=step_size)
29 | rrt_star_reeds_shepp.set_random_seed(seed=8)
30 | path = rrt_star_reeds_shepp.planning(animation=False)
31 | for i in range(len(path)-1):
32 | # + 0.00000000000001 for acceptable errors arising from the planning process
33 | assert m.math.dist(path[i][0:2], path[i+1][0:2]) < step_size + 0.00000000000001
34 |
35 | def test_too_big_step_size():
36 | step_size = 20
37 | rrt_star_reeds_shepp = m.RRTStarReedsShepp(start, goal,
38 | obstacleList, [-2.0, 15.0],
39 | max_iter=100, step_size=step_size)
40 | rrt_star_reeds_shepp.set_random_seed(seed=8)
41 | path = rrt_star_reeds_shepp.planning(animation=False)
42 | assert path is None
43 |
44 |
45 | if __name__ == '__main__':
46 | conftest.run_this_test(__file__)
47 |
--------------------------------------------------------------------------------
/tests/test_rrt_star_seven_joint_arm.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from ArmNavigation.rrt_star_seven_joint_arm_control \
3 | import rrt_star_seven_joint_arm_control as m
4 |
5 |
6 | def test1():
7 | m.show_animation = False
8 | m.main()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_rrt_with_sobol_sampler.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.RRT import rrt_with_sobol_sampler as m
3 | import random
4 |
5 | random.seed(12345)
6 |
7 |
8 | def test1():
9 | m.show_animation = False
10 | m.main(gx=1.0, gy=1.0)
11 |
12 |
13 | if __name__ == '__main__':
14 | conftest.run_this_test(__file__)
15 |
--------------------------------------------------------------------------------
/tests/test_safe_interval_path_planner.py:
--------------------------------------------------------------------------------
1 | from PathPlanning.TimeBasedPathPlanning.GridWithDynamicObstacles import (
2 | Grid,
3 | ObstacleArrangement,
4 | Position,
5 | )
6 | from PathPlanning.TimeBasedPathPlanning import SafeInterval as m
7 | import numpy as np
8 | import conftest
9 |
10 |
11 | def test_1():
12 | start = Position(1, 11)
13 | goal = Position(19, 19)
14 | grid_side_length = 21
15 | grid = Grid(
16 | np.array([grid_side_length, grid_side_length]),
17 | obstacle_arrangement=ObstacleArrangement.ARRANGEMENT1,
18 | )
19 |
20 | m.show_animation = False
21 | planner = m.SafeIntervalPathPlanner(grid, start, goal)
22 |
23 | path = planner.plan(False)
24 |
25 | # path should have 31 entries
26 | assert len(path.path) == 31
27 |
28 | # path should end at the goal
29 | assert path.path[-1].position == goal
30 |
31 |
32 | if __name__ == "__main__":
33 | conftest.run_this_test(__file__)
34 |
--------------------------------------------------------------------------------
/tests/test_space_time_astar.py:
--------------------------------------------------------------------------------
1 | from PathPlanning.TimeBasedPathPlanning.GridWithDynamicObstacles import (
2 | Grid,
3 | ObstacleArrangement,
4 | Position,
5 | )
6 | from PathPlanning.TimeBasedPathPlanning import SpaceTimeAStar as m
7 | import numpy as np
8 | import conftest
9 |
10 |
11 | def test_1():
12 | start = Position(1, 11)
13 | goal = Position(19, 19)
14 | grid_side_length = 21
15 | grid = Grid(
16 | np.array([grid_side_length, grid_side_length]),
17 | obstacle_arrangement=ObstacleArrangement.ARRANGEMENT1,
18 | )
19 |
20 | m.show_animation = False
21 | planner = m.SpaceTimeAStar(grid, start, goal)
22 |
23 | path = planner.plan(False)
24 |
25 | # path should have 28 entries
26 | assert len(path.path) == 31
27 |
28 | # path should end at the goal
29 | assert path.path[-1].position == goal
30 |
31 | assert planner.expanded_node_count < 1000
32 |
33 | if __name__ == "__main__":
34 | conftest.run_this_test(__file__)
35 |
--------------------------------------------------------------------------------
/tests/test_spiral_spanning_tree_coverage_path_planner.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | import os
3 | import matplotlib.pyplot as plt
4 | from PathPlanning.SpiralSpanningTreeCPP \
5 | import spiral_spanning_tree_coverage_path_planner
6 |
7 | spiral_spanning_tree_coverage_path_planner.do_animation = True
8 |
9 |
10 | def spiral_stc_cpp(img, start):
11 | num_free = 0
12 | for i in range(img.shape[0]):
13 | for j in range(img.shape[1]):
14 | num_free += img[i][j]
15 |
16 | STC_planner = spiral_spanning_tree_coverage_path_planner.\
17 | SpiralSpanningTreeCoveragePlanner(img)
18 |
19 | edge, route, path = STC_planner.plan(start)
20 |
21 | covered_nodes = set()
22 | for p, q in edge:
23 | covered_nodes.add(p)
24 | covered_nodes.add(q)
25 |
26 | # assert complete coverage
27 | assert len(covered_nodes) == num_free / 4
28 |
29 |
30 | def test_spiral_stc_cpp_1():
31 | img_dir = os.path.dirname(
32 | os.path.abspath(__file__)) + \
33 | "/../PathPlanning/SpiralSpanningTreeCPP"
34 | img = plt.imread(os.path.join(img_dir, 'map', 'test.png'))
35 | start = (0, 0)
36 | spiral_stc_cpp(img, start)
37 |
38 |
39 | def test_spiral_stc_cpp_2():
40 | img_dir = os.path.dirname(
41 | os.path.abspath(__file__)) + \
42 | "/../PathPlanning/SpiralSpanningTreeCPP"
43 | img = plt.imread(os.path.join(img_dir, 'map', 'test_2.png'))
44 | start = (10, 0)
45 | spiral_stc_cpp(img, start)
46 |
47 |
48 | def test_spiral_stc_cpp_3():
49 | img_dir = os.path.dirname(
50 | os.path.abspath(__file__)) + \
51 | "/../PathPlanning/SpiralSpanningTreeCPP"
52 | img = plt.imread(os.path.join(img_dir, 'map', 'test_3.png'))
53 | start = (0, 0)
54 | spiral_stc_cpp(img, start)
55 |
56 |
57 | if __name__ == '__main__':
58 | conftest.run_this_test(__file__)
59 |
--------------------------------------------------------------------------------
/tests/test_stanley_controller.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathTracking.stanley_control import stanley_control as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_state_lattice_planner.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.StateLatticePlanner import state_lattice_planner as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_state_machine.py:
--------------------------------------------------------------------------------
1 | import conftest
2 |
3 | from MissionPlanning.StateMachine.state_machine import StateMachine
4 |
5 |
6 | def test_transition():
7 | sm = StateMachine("state_machine")
8 | sm.add_transition(src_state="idle", event="start", dst_state="running")
9 | sm.set_current_state("idle")
10 | sm.process("start")
11 | assert sm.get_current_state().name == "running"
12 |
13 |
14 | def test_guard():
15 | class Model:
16 | def can_start(self):
17 | return False
18 |
19 | sm = StateMachine("state_machine", Model())
20 | sm.add_transition(
21 | src_state="idle", event="start", dst_state="running", guard="can_start"
22 | )
23 | sm.set_current_state("idle")
24 | sm.process("start")
25 | assert sm.get_current_state().name == "idle"
26 |
27 |
28 | def test_action():
29 | class Model:
30 | def on_start(self):
31 | self.start_called = True
32 |
33 | model = Model()
34 | sm = StateMachine("state_machine", model)
35 | sm.add_transition(
36 | src_state="idle", event="start", dst_state="running", action="on_start"
37 | )
38 | sm.set_current_state("idle")
39 | sm.process("start")
40 | assert model.start_called
41 |
42 |
43 | def test_plantuml():
44 | sm = StateMachine("state_machine")
45 | sm.add_transition(src_state="idle", event="start", dst_state="running")
46 | sm.set_current_state("idle")
47 | assert sm.generate_plantuml()
48 |
49 |
50 | if __name__ == "__main__":
51 | conftest.run_this_test(__file__)
52 |
--------------------------------------------------------------------------------
/tests/test_two_joint_arm_to_point_control.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from ArmNavigation.two_joint_arm_to_point_control \
3 | import two_joint_arm_to_point_control as m
4 |
5 |
6 | def test1():
7 | m.show_animation = False
8 | m.animation()
9 |
10 |
11 | if __name__ == '__main__':
12 | conftest.run_this_test(__file__)
13 |
--------------------------------------------------------------------------------
/tests/test_unscented_kalman_filter.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from Localization.unscented_kalman_filter import unscented_kalman_filter as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from utils import angle
3 | from numpy.testing import assert_allclose
4 | import numpy as np
5 |
6 |
7 | def test_rot_mat_2d():
8 | assert_allclose(angle.rot_mat_2d(0.0),
9 | np.array([[1., 0.],
10 | [0., 1.]]))
11 |
12 |
13 | def test_angle_mod():
14 | assert_allclose(angle.angle_mod(-4.0), 2.28318531)
15 | assert(isinstance(angle.angle_mod(-4.0), float))
16 | assert_allclose(angle.angle_mod([-4.0]), [2.28318531])
17 | assert(isinstance(angle.angle_mod([-4.0]), np.ndarray))
18 |
19 | assert_allclose(angle.angle_mod([-150.0, 190.0, 350], degree=True),
20 | [-150., -170., -10.])
21 |
22 | assert_allclose(angle.angle_mod(-60.0, zero_2_2pi=True, degree=True),
23 | [300.])
24 |
25 |
26 | if __name__ == '__main__':
27 | conftest.run_this_test(__file__)
28 |
--------------------------------------------------------------------------------
/tests/test_visibility_road_map_planner.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.VoronoiRoadMap import voronoi_road_map as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_voronoi_road_map_planner.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | from PathPlanning.VisibilityRoadMap import visibility_road_map as m
3 |
4 |
5 | def test1():
6 | m.show_animation = False
7 | m.main()
8 |
9 |
10 | if __name__ == '__main__':
11 | conftest.run_this_test(__file__)
12 |
--------------------------------------------------------------------------------
/tests/test_wavefront_coverage_path_planner.py:
--------------------------------------------------------------------------------
1 | import conftest # Add root path to sys.path
2 | import os
3 | import matplotlib.pyplot as plt
4 | from PathPlanning.WavefrontCPP import wavefront_coverage_path_planner
5 |
6 | wavefront_coverage_path_planner.do_animation = False
7 |
8 |
9 | def wavefront_cpp(img, start, goal):
10 | num_free = 0
11 | for i in range(img.shape[0]):
12 | for j in range(img.shape[1]):
13 | num_free += 1 - img[i][j]
14 |
15 | DT = wavefront_coverage_path_planner.transform(
16 | img, goal, transform_type='distance')
17 | DT_path = wavefront_coverage_path_planner.wavefront(DT, start, goal)
18 | assert len(DT_path) == num_free # assert complete coverage
19 |
20 | PT = wavefront_coverage_path_planner.transform(
21 | img, goal, transform_type='path', alpha=0.01)
22 | PT_path = wavefront_coverage_path_planner.wavefront(PT, start, goal)
23 | assert len(PT_path) == num_free # assert complete coverage
24 |
25 |
26 | def test_wavefront_CPP_1():
27 | img_dir = os.path.dirname(
28 | os.path.abspath(__file__)) + "/../PathPlanning/WavefrontCPP"
29 | img = plt.imread(os.path.join(img_dir, 'map', 'test.png'))
30 | img = 1 - img
31 |
32 | start = (43, 0)
33 | goal = (0, 0)
34 |
35 | wavefront_cpp(img, start, goal)
36 |
37 |
38 | def test_wavefront_CPP_2():
39 | img_dir = os.path.dirname(
40 | os.path.abspath(__file__)) + "/../PathPlanning/WavefrontCPP"
41 | img = plt.imread(os.path.join(img_dir, 'map', 'test_2.png'))
42 | img = 1 - img
43 |
44 | start = (10, 0)
45 | goal = (10, 40)
46 |
47 | wavefront_cpp(img, start, goal)
48 |
49 |
50 | def test_wavefront_CPP_3():
51 | img_dir = os.path.dirname(
52 | os.path.abspath(__file__)) + "/../PathPlanning/WavefrontCPP"
53 | img = plt.imread(os.path.join(img_dir, 'map', 'test_3.png'))
54 | img = 1 - img
55 |
56 | start = (0, 0)
57 | goal = (30, 30)
58 |
59 | wavefront_cpp(img, start, goal)
60 |
61 |
62 | if __name__ == '__main__':
63 | conftest.run_this_test(__file__)
64 |
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AtsushiSakai/PythonRobotics/74610fd56bfe7eee02054c48afaa7328bec0d6b7/utils/__init__.py
--------------------------------------------------------------------------------
/utils/angle.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from scipy.spatial.transform import Rotation as Rot
3 |
4 |
5 | def rot_mat_2d(angle):
6 | """
7 | Create 2D rotation matrix from an angle
8 |
9 | Parameters
10 | ----------
11 | angle :
12 |
13 | Returns
14 | -------
15 | A 2D rotation matrix
16 |
17 | Examples
18 | --------
19 | >>> angle_mod(-4.0)
20 |
21 |
22 | """
23 | return Rot.from_euler('z', angle).as_matrix()[0:2, 0:2]
24 |
25 |
26 | def angle_mod(x, zero_2_2pi=False, degree=False):
27 | """
28 | Angle modulo operation
29 | Default angle modulo range is [-pi, pi)
30 |
31 | Parameters
32 | ----------
33 | x : float or array_like
34 | A angle or an array of angles. This array is flattened for
35 | the calculation. When an angle is provided, a float angle is returned.
36 | zero_2_2pi : bool, optional
37 | Change angle modulo range to [0, 2pi)
38 | Default is False.
39 | degree : bool, optional
40 | If True, then the given angles are assumed to be in degrees.
41 | Default is False.
42 |
43 | Returns
44 | -------
45 | ret : float or ndarray
46 | an angle or an array of modulated angle.
47 |
48 | Examples
49 | --------
50 | >>> angle_mod(-4.0)
51 | 2.28318531
52 |
53 | >>> angle_mod([-4.0])
54 | np.array(2.28318531)
55 |
56 | >>> angle_mod([-150.0, 190.0, 350], degree=True)
57 | array([-150., -170., -10.])
58 |
59 | >>> angle_mod(-60.0, zero_2_2pi=True, degree=True)
60 | array([300.])
61 |
62 | """
63 | if isinstance(x, float):
64 | is_float = True
65 | else:
66 | is_float = False
67 |
68 | x = np.asarray(x).flatten()
69 | if degree:
70 | x = np.deg2rad(x)
71 |
72 | if zero_2_2pi:
73 | mod_angle = x % (2 * np.pi)
74 | else:
75 | mod_angle = (x + np.pi) % (2 * np.pi) - np.pi
76 |
77 | if degree:
78 | mod_angle = np.rad2deg(mod_angle)
79 |
80 | if is_float:
81 | return mod_angle.item()
82 | else:
83 | return mod_angle
84 |
--------------------------------------------------------------------------------