├── .gitignore ├── CODE_OF_CONDUCT.md ├── LICENSE ├── MobileRobot ├── map.png ├── mobile_robot_basic_en.ipynb ├── mobile_robot_basic_jp.ipynb ├── mobile_robot_extended_kalman_filter_en.ipynb ├── mobile_robot_extended_kalman_filter_jp.ipynb ├── mobile_robot_global_path_planning_a_star_en.ipynb ├── mobile_robot_global_path_planning_a_star_jp.ipynb ├── mobile_robot_line_trace_en.ipynb ├── mobile_robot_line_trace_jp.ipynb ├── mobile_robot_local_path_planning_dwa_en.ipynb ├── mobile_robot_local_path_planning_dwa_jp.ipynb ├── mobile_robot_particle_filter_en.ipynb ├── mobile_robot_particle_filter_jp.ipynb ├── mobile_robot_reinforcement_q_learning_en.ipynb ├── mobile_robot_reinforcement_q_learning_jp.ipynb ├── mobile_robot_sensor_en.ipynb ├── mobile_robot_sensor_jp.ipynb ├── mobile_robot_wheel_odometry_en.ipynb └── mobile_robot_wheel_odometry_jp.ipynb ├── PybulletBasic ├── ar_marker_detect_en.ipynb ├── ar_marker_detect_jp.ipynb ├── pybullet_basic_en.ipynb └── pybullet_basic_jp.ipynb ├── README.md ├── README_jp.md ├── RobotArm ├── robot_arm_analytical_inverse_kinematics_en.ipynb ├── robot_arm_analytical_inverse_kinematics_jp.ipynb ├── robot_arm_basic_en.ipynb ├── robot_arm_basic_jp.ipynb ├── robot_arm_collision_check_en.ipynb ├── robot_arm_collision_check_jp.ipynb ├── robot_arm_eye_in_hand_estimate_obj_pose_en.ipynb ├── robot_arm_eye_in_hand_estimate_obj_pose_jp.ipynb ├── robot_arm_eye_to_hand_estimate_obj_pose_en.ipynb ├── robot_arm_eye_to_hand_estimate_obj_pose_jp.ipynb ├── robot_arm_homogeneous_matrix_forward_kinematics_en.ipynb ├── robot_arm_homogeneous_matrix_forward_kinematics_jp.ipynb ├── robot_arm_image_based_visual_servo_en.ipynb ├── robot_arm_image_based_visual_servo_jp.ipynb ├── robot_arm_jacobian_inverse_kinematics_en.ipynb ├── robot_arm_jacobian_inverse_kinematics_jp.ipynb ├── robot_arm_sensor_en.ipynb ├── robot_arm_sensor_jp.ipynb ├── robot_arm_trigonometric_forward_kinematics_en.ipynb └── robot_arm_trigonometric_forward_kinematics_jp.ipynb ├── images ├── Common │ ├── Lidar.gif │ ├── ar_marker_detect │ │ ├── ar_marker_blender.png │ │ └── ar_marker_detect.gif │ ├── bumper_sensor.gif │ ├── camera.gif │ ├── camera_explain.jpeg │ ├── mobile_robot_basic.gif │ ├── mobile_robot_line_trace.gif │ ├── protect_drop_sensor.gif │ ├── raycast_explain.jpeg │ ├── robot_arm_basic_position_control.gif │ ├── robot_arm_basic_torque_control.gif │ ├── robot_arm_basic_velocity_control.gif │ ├── robot_arm_collision_check.gif │ ├── robot_arm_tip_camera.gif │ ├── robot_arm_tip_force_sensor.gif │ └── ultra_sonic_sensor.gif ├── MobileRobot │ ├── mobile_robot1.png │ ├── mobile_robot2.png │ ├── mobile_robot_extended_kalman_filter │ │ └── extended_kalman_filter.gif │ ├── mobile_robot_global_path_planning_a_star │ │ ├── a_star_animation.gif │ │ ├── a_star_pybullet.gif │ │ ├── cost_example.png │ │ ├── next_node.png │ │ ├── next_node_cost.png │ │ ├── nodes_map.png │ │ ├── obstacle_expansion.png │ │ ├── obstacle_real_to_map.png │ │ └── start_node_cost_set.png │ ├── mobile_robot_local_path_planning_dwa │ │ ├── dynamic_window_approach.gif │ │ ├── dynamic_window_approach_overview.png │ │ ├── global_path_planning.png │ │ └── local_path_planning.png │ ├── mobile_robot_particle_filter │ │ └── particle_filter.gif │ ├── mobile_robot_reinforcement_q_learning │ │ ├── q_learn.gif │ │ ├── q_learn_and_sarsa.png │ │ ├── q_table.png │ │ ├── q_table_after_learning.png │ │ ├── r_t1.png │ │ ├── reward.png │ │ ├── sim_environment.png │ │ ├── td_error.png │ │ └── td_target.png │ ├── mobile_robot_sensor.png │ ├── mobile_robot_sensor_en.png │ └── mobile_robot_wheel_odometry │ │ └── wheel_odometry.gif └── RobotArm │ ├── 2d_robot_arm.png │ ├── 2d_robot_arm_sensor.png │ ├── 2d_robot_arm_sensor_en.png │ ├── robot_arm_analytical_inverse_kinematics │ └── inverse_kinematics_analytical_overview.png │ ├── robot_arm_eye_in_hand_estimate_obj_pose │ └── eye_in_hand.png │ ├── robot_arm_eye_to_hand_estimate_obj_pose │ └── eye_to_hand.png │ ├── robot_arm_homogeneous_matrix_forward_kinematics │ ├── 2d_robot_arm_link_cordinate.png │ ├── 2d_robot_arm_link_cordinate_en.png │ ├── 2d_robot_arm_link_homogeneous_matrix_forward_kinematics.gif │ ├── 2d_robot_arm_link_homogeneous_matrix_forward_kinematics_en.gif │ ├── chain_rule.png │ ├── coordinate_transformation.png │ ├── direct_transformation.gif │ ├── direct_transformation_en.gif │ ├── homogeneour_matrix_formulation.png │ ├── homogeneour_matrix_forward_kinematics_overview.png │ ├── overview.gif │ ├── overview_en.gif │ ├── rotation_matrix_formulation.png │ ├── rotation_matrix_important_point.png │ ├── step_transformation.gif │ ├── translation_matrix_formulation.png │ └── translation_matrix_important_point.png │ ├── robot_arm_image_based_visual_servo │ └── pybullet_ibvs.gif │ ├── robot_arm_jacobian_inverse_kinemarics │ ├── delta_p.png │ ├── delta_p_en.png │ ├── delta_q_delta_p_jacpbian.png │ ├── delta_q_delta_p_jacpbian_en.png │ ├── inverse_kinematics_jacobian_overview.png │ ├── inverse_kinematics_jacobian_overview_en.png │ └── inverse_kinematics_jacobian_pybullet.gif │ └── robot_arm_trigonometric_forward_kinematics │ ├── 2d_arm_forward_kinematics_overview.png │ ├── 2d_arm_forward_kinematics_overview_en.png │ ├── note_link_root.png │ ├── overview.png │ ├── step1.png │ └── step2.png ├── obj └── ar_marker_box.obj ├── texture ├── ar_marker_box.png ├── line_trace_ground.png └── random_line_trace_ground.png ├── urdf ├── ar_marker_box.urdf ├── horizontal_box.urdf ├── pin_hole_box.urdf ├── plane_box.urdf ├── simple2d_arm.urdf ├── simple6d_arm_with_force_sensor.urdf ├── simple6d_arm_with_gripper.urdf ├── simple_box.urdf ├── simple_camera.urdf └── simple_two_wheel_car.urdf └── utility └── generate_ar_marker.py /.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/.gitignore -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | akinami327@gmail.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 akinami3 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MobileRobot/map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/MobileRobot/map.png -------------------------------------------------------------------------------- /MobileRobot/mobile_robot_basic_en.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [Mobile Robot](#toc1_) \n", 9 | "- [Starting pybullet](#toc2_) \n", 10 | "- [Initial Setup of pybullet](#toc3_) \n", 11 | "- [Creating the Mobile Robot](#toc4_) \n", 12 | "- [Running the Simulation](#toc5_) \n", 13 | "\n", 14 | "\n", 21 | "" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "# [Mobile Robot](#toc0_)\n", 29 | "\n", 30 | "This notebook explains the steps to create and operate a two-wheeled mobile robot.\n", 31 | "\n", 32 | "(For a manual summarizing the functions available in pybullet, please refer to [this link](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf).)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "# [Starting pybullet](#toc0_)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 1, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stderr", 49 | "output_type": "stream", 50 | "text": [ 51 | "pybullet build time: Nov 28 2023 23:45:17\n" 52 | ] 53 | }, 54 | { 55 | "name": "stdout", 56 | "output_type": "stream", 57 | "text": [ 58 | "startThreads creating 1 threads.\n", 59 | "starting thread 0\n", 60 | "started thread 0 \n", 61 | "argc=2\n", 62 | "argv[0] = --unused\n", 63 | "argv[1] = --start_demo_name=Physics Server\n", 64 | "ExampleBrowserThreadFunc started\n", 65 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "Creating context\n", 68 | "Created GL 3.3 context\n", 69 | "Direct GLX rendering context obtained\n", 70 | "Making context current\n", 71 | "GL_VENDOR=Mesa\n", 72 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 73 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 74 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 75 | "pthread_getconcurrency()=0\n", 76 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 77 | "Vendor = Mesa\n", 78 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 79 | "b3Printf: Selected demo: Physics Server\n", 80 | "startThreads creating 1 threads.\n", 81 | "starting thread 0\n", 82 | "started thread 0 \n", 83 | "MotionThreadFunc thread started\n" 84 | ] 85 | } 86 | ], 87 | "source": [ 88 | "import pybullet\n", 89 | "import pybullet_data\n", 90 | "physics_client = pybullet.connect(pybullet.GUI) " 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "# [Initial Setup of pybullet](#toc0_)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 2, 103 | "metadata": {}, 104 | "outputs": [ 105 | { 106 | "name": "stdout", 107 | "output_type": "stream", 108 | "text": [ 109 | "ven = Mesa\n", 110 | "ven = Mesa\n" 111 | ] 112 | } 113 | ], 114 | "source": [ 115 | "pybullet.resetSimulation() # Reset the simulation space\n", 116 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # Add path to necessary data for pybullet\n", 117 | "pybullet.setGravity(0.0, 0.0, -9.8) # Set gravity as on Earth\n", 118 | "time_step = 1./240.\n", 119 | "pybullet.setTimeStep(time_step) # Set the elapsed time per step\n", 120 | "\n", 121 | "# Load the floor\n", 122 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 123 | "\n", 124 | "# Set the camera position and other parameters in GUI mode\n", 125 | "camera_distance = 2.0\n", 126 | "camera_yaw = 0.0 # deg\n", 127 | "camera_pitch = -20 # deg\n", 128 | "camera_target_position = [0.0, 0.0, 0.0]\n", 129 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "# [Creating the Mobile Robot](#toc0_)\n", 137 | "This time, we will create a two-wheeled robot `simple_two_wheel_car.urdf`. \n", 138 | "The structure of this robot is as shown in the figure below (sensors are explained in `mobile_robot_sensor.ipynb`).\n", 139 | "\n", 140 | "![](../images/MobileRobot/mobile_robot1.png)\n", 141 | "\n", 142 | "![](../images/MobileRobot/mobile_robot2.png)" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 3, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "# Load the robot\n", 152 | "car_start_pos = [0, 0, 0.1] # Set the initial position (x, y, z)\n", 153 | "car_start_orientation = pybullet.getQuaternionFromEuler([0, 0, 0]) # Set the initial orientation (roll, pitch, yaw)\n", 154 | "# It seems that the mesh in the urdf file does not reflect textures, so each link is colored with rgba tags\n", 155 | "car_id = pybullet.loadURDF(\"../urdf/simple_two_wheel_car.urdf\", car_start_pos, car_start_orientation)" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": {}, 161 | "source": [ 162 | "# [Running the Simulation](#toc0_)\n", 163 | "With `setJointMotorControl2`, you can operate the specified joints of the specified robot. \n", 164 | "Here, to operate with a velocity command, `VELOCITY_CONTROL` is specified as the third argument.\n" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": 4, 170 | "metadata": {}, 171 | "outputs": [ 172 | { 173 | "ename": "", 174 | "evalue": "", 175 | "output_type": "error", 176 | "traceback": [ 177 | "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", 178 | "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", 179 | "\u001b[1;31mClick here for more info. \n", 180 | "\u001b[1;31mView Jupyter log for further details." 181 | ] 182 | } 183 | ], 184 | "source": [ 185 | "import time\n", 186 | "RIGHT_WHEEL_JOINT_IDX = 0\n", 187 | "LEFT_WHEEL_JOINT_IDX = 1\n", 188 | "for i in range(500):\n", 189 | " # Move forward at speed 10\n", 190 | " pybullet.setJointMotorControl2(car_id, LEFT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 191 | " pybullet.setJointMotorControl2(car_id, RIGHT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 192 | "\n", 193 | " # Move backward at speed 5\n", 194 | " # pybullet.setJointMotorControl2(car_id, LEFT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=-5)\n", 195 | " # pybullet.setJointMotorControl2(car_id, RIGHT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=-5)\n", 196 | "\n", 197 | " # Turn right\n", 198 | " # pybullet.setJointMotorControl2(car_id, LEFT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 199 | " # pybullet.setJointMotorControl2(car_id, RIGHT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=2)\n", 200 | "\n", 201 | " # Turn left\n", 202 | " # pybullet.setJointMotorControl2(car_id, LEFT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=2)\n", 203 | " # pybullet.setJointMotorControl2(car_id, RIGHT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 204 | "\n", 205 | " pybullet.stepSimulation()\n", 206 | " time.sleep(time_step)" 207 | ] 208 | } 209 | ], 210 | "metadata": { 211 | "kernelspec": { 212 | "display_name": "Python 3", 213 | "language": "python", 214 | "name": "python3" 215 | }, 216 | "language_info": { 217 | "codemirror_mode": { 218 | "name": "ipython", 219 | "version": 3 220 | }, 221 | "file_extension": ".py", 222 | "mimetype": "text/x-python", 223 | "name": "python", 224 | "nbconvert_exporter": "python", 225 | "pygments_lexer": "ipython3", 226 | "version": "3.10.12" 227 | } 228 | }, 229 | "nbformat": 4, 230 | "nbformat_minor": 2 231 | } 232 | -------------------------------------------------------------------------------- /MobileRobot/mobile_robot_basic_jp.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [移動ロボット](#toc1_) \n", 9 | "- [pybulletの起動](#toc2_) \n", 10 | "- [pybulletの初期設定](#toc3_) \n", 11 | "- [移動ロボットの生成](#toc4_) \n", 12 | "- [シミュレーションの実行](#toc5_) \n", 13 | "\n", 14 | "\n", 21 | "" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "# [移動ロボット](#toc0_)\n", 29 | "\n", 30 | "本notebookでは2輪の移動ロボットを生成し、走行させる手順について解説します。\n", 31 | "\n", 32 | "(pybulletで使用可能な関数がまとめられたマニュアルについては[こちら](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf)を参照してください。)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "# [pybulletの起動](#toc0_)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 1, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stderr", 49 | "output_type": "stream", 50 | "text": [ 51 | "pybullet build time: Nov 28 2023 23:45:17\n" 52 | ] 53 | }, 54 | { 55 | "name": "stdout", 56 | "output_type": "stream", 57 | "text": [ 58 | "startThreads creating 1 threads.\n", 59 | "starting thread 0\n", 60 | "started thread 0 \n", 61 | "argc=2\n", 62 | "argv[0] = --unused\n", 63 | "argv[1] = --start_demo_name=Physics Server\n", 64 | "ExampleBrowserThreadFunc started\n", 65 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "Creating context\n", 68 | "Created GL 3.3 context\n", 69 | "Direct GLX rendering context obtained\n", 70 | "Making context current\n", 71 | "GL_VENDOR=Microsoft Corporation\n", 72 | "GL_RENDERER=D3D12 (AMD Radeon Graphics)\n", 73 | "GL_VERSION=4.2 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 74 | "GL_SHADING_LANGUAGE_VERSION=4.20\n", 75 | "pthread_getconcurrency()=0\n", 76 | "Version = 4.2 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 77 | "Vendor = Microsoft Corporation\n", 78 | "Renderer = D3D12 (AMD Radeon Graphics)\n", 79 | "b3Printf: Selected demo: Physics Server\n", 80 | "startThreads creating 1 threads.\n", 81 | "starting thread 0\n", 82 | "started thread 0 \n", 83 | "MotionThreadFunc thread started\n" 84 | ] 85 | }, 86 | { 87 | "name": "stdout", 88 | "output_type": "stream", 89 | "text": [ 90 | "ven = Microsoft Corporation\n", 91 | "ven = Microsoft Corporation\n" 92 | ] 93 | } 94 | ], 95 | "source": [ 96 | "import pybullet\n", 97 | "import pybullet_data\n", 98 | "physics_client = pybullet.connect(pybullet.GUI) " 99 | ] 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "metadata": {}, 104 | "source": [ 105 | "# [pybulletの初期設定](#toc0_)" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": 5, 111 | "metadata": {}, 112 | "outputs": [], 113 | "source": [ 114 | "pybullet.resetSimulation() # シミュレーション空間をリセット\n", 115 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # pybulletに必要なデータへのパスを追加\n", 116 | "pybullet.setGravity(0.0, 0.0, -9.8) # 地球上における重力に設定\n", 117 | "time_step = 1./240.\n", 118 | "pybullet.setTimeStep(time_step) # 1stepあたりに経過する時間の設定\n", 119 | "\n", 120 | "#床の読み込み\n", 121 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 122 | "\n", 123 | "# GUIモードの際のカメラの位置などを設定\n", 124 | "camera_distance = 2.0\n", 125 | "camera_yaw = 0.0 # deg\n", 126 | "camera_pitch = -20 # deg\n", 127 | "camera_target_position = [0.0, 0.0, 0.0]\n", 128 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 129 | ] 130 | }, 131 | { 132 | "cell_type": "markdown", 133 | "metadata": {}, 134 | "source": [ 135 | "# [移動ロボットの生成](#toc0_)\n", 136 | "今回は、2輪ロボット`simple_two_wheel_car.urdf`を生成します。 \n", 137 | "このロボットは下図のような構成になっています(センサーについては、`mobile_robot_sensor.ipynb`にて解説しています。) \n", 138 | "\n", 139 | "![](../images/MobileRobot/mobile_robot1.png)\n", 140 | "\n", 141 | "![](../images/MobileRobot/mobile_robot2.png)" 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": 6, 147 | "metadata": {}, 148 | "outputs": [], 149 | "source": [ 150 | "# ロボットの読み込み\n", 151 | "car_start_pos = [0, 0, 0.1] # 初期位置(x,y,z)を設定\n", 152 | "car_start_orientation = pybullet.getQuaternionFromEuler([0,0,0]) # 初期姿勢(roll, pitch, yaw)を設定\n", 153 | "# urdfファイルのmeshはテクスチャが反映されないっぽいので各linkにrgbaタグで色を付けている\n", 154 | "car_id = pybullet.loadURDF(\"../urdf/simple_two_wheel_car.urdf\",car_start_pos, car_start_orientation)" 155 | ] 156 | }, 157 | { 158 | "cell_type": "markdown", 159 | "metadata": {}, 160 | "source": [ 161 | "# [シミュレーションの実行](#toc0_)\n", 162 | "`setJointMotorControl2` では、指定したロボットの指定したジョイントを動作させることができます。 \n", 163 | "ここでは、速度指令で動作させるため、第三引数に`VELOCITY_CONTROL`を指定しています。\n" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": 7, 169 | "metadata": {}, 170 | "outputs": [ 171 | { 172 | "ename": "", 173 | "evalue": "", 174 | "output_type": "error", 175 | "traceback": [ 176 | "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", 177 | "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", 178 | "\u001b[1;31mClick here for more info. \n", 179 | "\u001b[1;31mView Jupyter log for further details." 180 | ] 181 | } 182 | ], 183 | "source": [ 184 | "import time\n", 185 | "RIGHT_WHEEL_JOINT_IDX = 0\n", 186 | "LEFT_WHEEL_JOINT_IDX = 1\n", 187 | "for i in range(500):\n", 188 | " # 速度10で前進\n", 189 | " pybullet.setJointMotorControl2(car_id, LEFT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 190 | " pybullet.setJointMotorControl2(car_id, RIGHT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 191 | "\n", 192 | " # 速度5で後進\n", 193 | " # pybullet.setJointMotorControl2(carId, leftWheelJointIdx, pybullet.VELOCITY_CONTROL, targetVelocity=-5)\n", 194 | " # pybullet.setJointMotorControl2(carId, rightWheelJointIdx, pybullet.VELOCITY_CONTROL, targetVelocity=-5)\n", 195 | "\n", 196 | " # 右旋回\n", 197 | " # pybullet.setJointMotorControl2(carId, leftWheelJointIdx, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 198 | " # pybullet.setJointMotorControl2(carId, rightWheelJointIdx, pybullet.VELOCITY_CONTROL, targetVelocity=2)\n", 199 | "\n", 200 | " # 左旋回\n", 201 | " # pybullet.setJointMotorControl2(carId, leftWheelJointIdx, pybullet.VELOCITY_CONTROL, targetVelocity=2)\n", 202 | " # pybullet.setJointMotorControl2(carId, rightWheelJointIdx, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 203 | "\n", 204 | " pybullet.stepSimulation()\n", 205 | " time.sleep(time_step)" 206 | ] 207 | } 208 | ], 209 | "metadata": { 210 | "kernelspec": { 211 | "display_name": "Python 3", 212 | "language": "python", 213 | "name": "python3" 214 | }, 215 | "language_info": { 216 | "codemirror_mode": { 217 | "name": "ipython", 218 | "version": 3 219 | }, 220 | "file_extension": ".py", 221 | "mimetype": "text/x-python", 222 | "name": "python", 223 | "nbconvert_exporter": "python", 224 | "pygments_lexer": "ipython3", 225 | "version": "3.10.12" 226 | } 227 | }, 228 | "nbformat": 4, 229 | "nbformat_minor": 2 230 | } 231 | -------------------------------------------------------------------------------- /PybulletBasic/pybullet_basic_en.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [What is pybullet](#toc1_) \n", 9 | "- [Importing Necessary Libraries](#toc2_) \n", 10 | "- [Starting pybullet](#toc3_) \n", 11 | "- [Initial Setup for Simulation](#toc4_) \n", 12 | "- [Loading the Floor](#toc5_) \n", 13 | "- [Loading Objects](#toc6_) \n", 14 | "- [Loading the Robot](#toc7_) \n", 15 | "- [Setting the Camera Position](#toc8_) \n", 16 | "- [Displaying Debug Text](#toc9_) \n", 17 | "- [Running the Simulation](#toc10_) \n", 18 | "\n", 19 | "\n", 26 | "" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "# [What is pybullet](#toc0_)\n", 34 | "pybullet is a physics simulation engine and a library for controlling robots and performing physical simulations.\n", 35 | "\n", 36 | "While ROS (Robot Operating System) is often used for robot simulation and control, setting up the ROS environment can be complex. On the other hand, pybullet is a Python library, making it easy to use as long as you have a Python environment.\n", 37 | "\n", 38 | "Here, we will introduce only the basic functions. \n", 39 | "For a manual summarizing the functions available in pybullet, refer to [this link](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf)." 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "# [Importing Necessary Libraries](#toc0_)\n", 47 | "\n", 48 | "When using pybullet, you need to import `pybullet`. \n", 49 | "Additionally, to load files used by pybullet, you also need to import `pybullet_data`." 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 1, 55 | "metadata": {}, 56 | "outputs": [ 57 | { 58 | "name": "stderr", 59 | "output_type": "stream", 60 | "text": [ 61 | "pybullet build time: Nov 28 2023 23:45:17\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "import pybullet\n", 67 | "import pybullet_data" 68 | ] 69 | }, 70 | { 71 | "cell_type": "markdown", 72 | "metadata": {}, 73 | "source": [ 74 | "# [Starting pybullet](#toc0_)\n", 75 | "When using pybullet, you start the server for physical simulation using `pybullet.connect`.\n", 76 | "\n", 77 | "The types of servers available are:\n", 78 | "- `pybullet.GUI`\n", 79 | "- `pybullet.DIRECT`\n", 80 | "- `pybullet.SHARED_MEMORY`\n", 81 | "- `pybullet.UDP`\n", 82 | "- `pybullet.TCP`\n", 83 | "\n", 84 | "Basically, use `pybullet.DIRECT` for CUI and `pybullet.GUI` for GUI.\n", 85 | "\n", 86 | "
\n", 87 | "\n", 88 | "This time, we will use the GUI, so we will start the server by specifying `pybullet.GUI`.\n", 89 | "Running the cell below will start the pybullet GUI. \n", 90 | "(Note that in environments like Google Colab where the GUI cannot be displayed, you need to start the server by specifying `pybullet.DIRECT`.)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 2, 96 | "metadata": {}, 97 | "outputs": [ 98 | { 99 | "name": "stdout", 100 | "output_type": "stream", 101 | "text": [ 102 | "startThreads creating 1 threads.\n", 103 | "starting thread 0\n", 104 | "started thread 0 \n", 105 | "argc=2\n", 106 | "argv[0] = --unused\n", 107 | "argv[1] = --start_demo_name=Physics Server\n", 108 | "ExampleBrowserThreadFunc started\n", 109 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 110 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 111 | "Creating context\n", 112 | "Created GL 3.3 context\n", 113 | "Direct GLX rendering context obtained\n", 114 | "Making context current\n", 115 | "GL_VENDOR=Mesa\n", 116 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 117 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 118 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 119 | "pthread_getconcurrency()=0\n", 120 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 121 | "Vendor = Mesa\n", 122 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 123 | "b3Printf: Selected demo: Physics Server\n", 124 | "startThreads creating 1 threads.\n", 125 | "starting thread 0\n", 126 | "started thread 0 \n", 127 | "MotionThreadFunc thread started\n" 128 | ] 129 | } 130 | ], 131 | "source": [ 132 | "physics_client = pybullet.connect(pybullet.GUI) " 133 | ] 134 | }, 135 | { 136 | "cell_type": "markdown", 137 | "metadata": {}, 138 | "source": [ 139 | "# [Initial Setup for Simulation](#toc0_)\n", 140 | "Before starting the simulation, we perform initial setup.\n", 141 | "- Reset the simulation space\n", 142 | "- Add paths to necessary data for pybullet\n", 143 | "- Set gravity\n", 144 | "- Set the time elapsed per step (in seconds)" 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "execution_count": 3, 150 | "metadata": {}, 151 | "outputs": [ 152 | { 153 | "name": "stdout", 154 | "output_type": "stream", 155 | "text": [ 156 | "ven = Mesa\n", 157 | "ven = Mesa\n" 158 | ] 159 | } 160 | ], 161 | "source": [ 162 | "pybullet.resetSimulation() # Reset the simulation space\n", 163 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # Add paths to necessary data for pybullet\n", 164 | "pybullet.setGravity(0.0, 0.0, -9.8) # Set gravity as on Earth\n", 165 | "time_step = 1./240.\n", 166 | "pybullet.setTimeStep(time_step) # Set the time elapsed per step" 167 | ] 168 | }, 169 | { 170 | "cell_type": "markdown", 171 | "metadata": {}, 172 | "source": [ 173 | "# [Loading the Floor](#toc0_)\n", 174 | "pybullet provides several standard models. Here, we will load `plane.urdf`, a file that defines the floor.\n", 175 | "\n", 176 | "---\n", 177 | "\n", 178 | "A urdf file is an \"XML format file that defines robot models\" and can define links and joints of a robot. \n", 179 | "While it is generally used to define robots, `plane.urdf` defines a floor model instead of a robot.\n", 180 | "\n", 181 | "---" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 4, 187 | "metadata": {}, 188 | "outputs": [], 189 | "source": [ 190 | "# Load the floor\n", 191 | "plane_id = pybullet.loadURDF(\"plane.urdf\")" 192 | ] 193 | }, 194 | { 195 | "cell_type": "markdown", 196 | "metadata": {}, 197 | "source": [ 198 | "# [Loading Objects](#toc0_)\n", 199 | "By using the `createCollisionShape`, `createVisualShape`, and `createMultiBody` functions, you can create objects of arbitrary sizes (such as boxes and spheres)." 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": 5, 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [ 208 | "# Load the box\n", 209 | "# Determine the weight, size, position, and orientation of the box\n", 210 | "mass = 5 # kg\n", 211 | "box_size = [0.3, 0.3, 0.3]\n", 212 | "position = [2, 0, 0.3]\n", 213 | "orientation = [1, 0, 0, 0] # Quaternion\n", 214 | "box_collision_id = pybullet.createCollisionShape(pybullet.GEOM_BOX, halfExtents=box_size)\n", 215 | "box_visual_id = pybullet.createVisualShape(pybullet.GEOM_BOX, halfExtents=box_size, rgbaColor=[1,0,0,1]) # Red, semi-transparent\n", 216 | "box_body_id = pybullet.createMultiBody(mass, box_collision_id, box_visual_id, position, orientation)" 217 | ] 218 | }, 219 | { 220 | "cell_type": "markdown", 221 | "metadata": {}, 222 | "source": [ 223 | "# [Loading the Robot](#toc0_)\n", 224 | "We will load a urdf file of a robot that we have defined ourselves.\n", 225 | "By specifying the \"path to the urdf file\" as an argument to the `loadURDF` function, you can generate the robot." 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": 6, 231 | "metadata": {}, 232 | "outputs": [], 233 | "source": [ 234 | "# Load the robot\n", 235 | "car_start_pos = [0, 0, 0.1] # Set the initial position (x, y, z)\n", 236 | "car_start_orientation = pybullet.getQuaternionFromEuler([0, 0, 0]) # Set the initial orientation (roll, pitch, yaw)\n", 237 | "# The mesh in the urdf file does not seem to reflect textures, so each link is colored with the rgba tag\n", 238 | "car_id = pybullet.loadURDF(\"../urdf/simple_two_wheel_car.urdf\", car_start_pos, car_start_orientation)" 239 | ] 240 | }, 241 | { 242 | "cell_type": "markdown", 243 | "metadata": {}, 244 | "source": [ 245 | "# [Setting the Camera Position](#toc0_)\n", 246 | "You can set the camera in GUI mode using the `resetDebugVisualizerCamera` function." 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": null, 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "# Set the camera position and other parameters in GUI mode\n", 256 | "camera_distance = 4.0\n", 257 | "camera_yaw = 0.0 # deg\n", 258 | "camera_pitch = -20 # deg\n", 259 | "camera_target_position = [0.0, 0.0, 0.0]\n", 260 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 261 | ] 262 | }, 263 | { 264 | "cell_type": "markdown", 265 | "metadata": {}, 266 | "source": [ 267 | "# [Displaying Debug Text](#toc0_)\n", 268 | "By using the `addUserDebugText` function, you can display text at any position in the simulation space.\n", 269 | "\n", 270 | "Additionally, with `addUserDebugLine`, you can draw a line connecting any two points in the simulation space, which is useful for visualizing the movement path of a mobile robot or the trajectory of a robot arm's end-effector.\n", 271 | "\n", 272 | "In this way, pybullet provides convenient features for visualizing simulation results." 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": 8, 278 | "metadata": {}, 279 | "outputs": [ 280 | { 281 | "data": { 282 | "text/plain": [ 283 | "0" 284 | ] 285 | }, 286 | "execution_count": 8, 287 | "metadata": {}, 288 | "output_type": "execute_result" 289 | } 290 | ], 291 | "source": [ 292 | "# Display text on the screen\n", 293 | "text_position = [0.0, 0.0, 2.0]\n", 294 | "life_time = 10.0 # Display duration (seconds)\n", 295 | "pybullet.addUserDebugText(\"test text\", text_position, textSize=2, lifeTime=life_time)" 296 | ] 297 | }, 298 | { 299 | "cell_type": "markdown", 300 | "metadata": {}, 301 | "source": [ 302 | "# [Running the Simulation](#toc0_)\n", 303 | "By using the `stepSimulation` function, time in the simulation space advances by the amount set with `setTimeStep`. \n", 304 | "Here, we are running the simulation for 200 time steps while giving speed commands to both wheels of the mobile robot." 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": 9, 310 | "metadata": {}, 311 | "outputs": [ 312 | { 313 | "ename": "", 314 | "evalue": "", 315 | "output_type": "error", 316 | "traceback": [ 317 | "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", 318 | "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", 319 | "\u001b[1;31mClick here for more info. \n", 320 | "\u001b[1;31mView Jupyter log for further details." 321 | ] 322 | } 323 | ], 324 | "source": [ 325 | "import time\n", 326 | "RIGHT_WHEEL_JOINT_IDX = 0\n", 327 | "LEFT_WHEEL_JOINT_IDX = 1\n", 328 | "for i in range(200):\n", 329 | " pybullet.setJointMotorControl2(car_id, LEFT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 330 | " pybullet.setJointMotorControl2(car_id, RIGHT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 331 | " pybullet.stepSimulation()\n", 332 | " time.sleep(time_step)" 333 | ] 334 | } 335 | ], 336 | "metadata": { 337 | "kernelspec": { 338 | "display_name": "Python 3", 339 | "language": "python", 340 | "name": "python3" 341 | }, 342 | "language_info": { 343 | "codemirror_mode": { 344 | "name": "ipython", 345 | "version": 3 346 | }, 347 | "file_extension": ".py", 348 | "mimetype": "text/x-python", 349 | "name": "python", 350 | "nbconvert_exporter": "python", 351 | "pygments_lexer": "ipython3", 352 | "version": "3.10.12" 353 | } 354 | }, 355 | "nbformat": 4, 356 | "nbformat_minor": 2 357 | } 358 | -------------------------------------------------------------------------------- /PybulletBasic/pybullet_basic_jp.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [pybulletとは](#toc1_) \n", 9 | "- [必要なライブラリのインポート](#toc2_) \n", 10 | "- [pybulletの起動](#toc3_) \n", 11 | "- [シミュレーションの初期設定](#toc4_) \n", 12 | "- [床の読み込み](#toc5_) \n", 13 | "- [物体の読み込み](#toc6_) \n", 14 | "- [ロボットの読み込み](#toc7_) \n", 15 | "- [カメラ位置の設定](#toc8_) \n", 16 | "- [デバック用の文字表示](#toc9_) \n", 17 | "- [シミュレーションの実行](#toc10_) \n", 18 | "\n", 19 | "\n", 26 | "" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "# [pybulletとは](#toc0_)\n", 34 | "pybulletは、物理シミュレーションエンジンの一つであり、ロボットの制御や物理シミュレーションを行うためのライブラリです。\n", 35 | "\n", 36 | "ロボットのシミュレーションや制御にはROS(Robot Operating System)が使われることが多いですが、ROSは環境構築が複雑です。一方で、pybulletはpythonのライブラリなので、pythonが使える環境であれば簡単に扱うことができます。\n", 37 | "\n", 38 | "こちらでは、基本的な機能のみを紹介します。 \n", 39 | "pybulletで使用可能な関数がまとめられたマニュアルについては[こちら](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf)を参照してください。" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "# [必要なライブラリのインポート](#toc0_)\n", 47 | "\n", 48 | "pybulletを使用する際は、`pybullet`をインポートする必要があります。 \n", 49 | "また、pybulletに使用するファイルを読み込むために`import pybullet_data`もインポートします。" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 1, 55 | "metadata": {}, 56 | "outputs": [ 57 | { 58 | "name": "stderr", 59 | "output_type": "stream", 60 | "text": [ 61 | "pybullet build time: Nov 28 2023 23:45:17\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "import pybullet\n", 67 | "import pybullet_data" 68 | ] 69 | }, 70 | { 71 | "cell_type": "markdown", 72 | "metadata": {}, 73 | "source": [ 74 | "# [pybulletの起動](#toc0_)\n", 75 | "pybulletを使用する際は、`pybullet.connect`を使用して、物理シミュレーションを行うためのサーバーを起動します。\n", 76 | "\n", 77 | "サーバの種類には以下の種類が存在します\n", 78 | "- `pybullet.GUI`\n", 79 | "- `pybullet.DIRECT`\n", 80 | "- `pybullet.SHARED_MEMORY`\n", 81 | "- `pybullet.UDP`\n", 82 | "- `pybullet.TCP`\n", 83 | "\n", 84 | "基本的には、CUIで使用する場合は`pybullet.DIRECT`、GUIで使用する場合は`pybullet.GUI`を使用します。\n", 85 | "\n", 86 | "
\n", 87 | "\n", 88 | "今回はGUIを使用するため、`pybullet.GUI`を指定してサーバーを起動します。\n", 89 | "以下セルを実行すると、pybulletのGUIが起動されます。 \n", 90 | "(google colabなどではGUIが表示されないため、`pybullet.DIRECT`を指定してサーバーを起動する必要があります。)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 2, 96 | "metadata": {}, 97 | "outputs": [ 98 | { 99 | "name": "stdout", 100 | "output_type": "stream", 101 | "text": [ 102 | "startThreads creating 1 threads.\n", 103 | "starting thread 0\n", 104 | "started thread 0 \n", 105 | "argc=2\n", 106 | "argv[0] = --unused\n", 107 | "argv[1] = --start_demo_name=Physics Server\n", 108 | "ExampleBrowserThreadFunc started\n", 109 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 110 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 111 | "Creating context\n", 112 | "Created GL 3.3 context\n", 113 | "Direct GLX rendering context obtained\n", 114 | "Making context current\n", 115 | "GL_VENDOR=Mesa\n", 116 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 117 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 118 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 119 | "pthread_getconcurrency()=0\n", 120 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 121 | "Vendor = Mesa\n", 122 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 123 | "b3Printf: Selected demo: Physics Server\n", 124 | "startThreads creating 1 threads.\n", 125 | "starting thread 0\n", 126 | "started thread 0 \n", 127 | "MotionThreadFunc thread started\n" 128 | ] 129 | } 130 | ], 131 | "source": [ 132 | "physics_client = pybullet.connect(pybullet.GUI) " 133 | ] 134 | }, 135 | { 136 | "cell_type": "markdown", 137 | "metadata": {}, 138 | "source": [ 139 | "# [シミュレーションの初期設定](#toc0_)\n", 140 | "シミュレーションを開始するにあたって、初期設定を行います。\n", 141 | "- シミュレーション空間のリセット\n", 142 | "- pybulletに必要なデータへのパスの追加\n", 143 | "- 重力の設定\n", 144 | "- 1stepあたりに経過する時間(秒単位)" 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "execution_count": 3, 150 | "metadata": {}, 151 | "outputs": [ 152 | { 153 | "name": "stdout", 154 | "output_type": "stream", 155 | "text": [ 156 | "ven = Mesa\n", 157 | "ven = Mesa\n" 158 | ] 159 | } 160 | ], 161 | "source": [ 162 | "pybullet.resetSimulation() # シミュレーション空間をリセット\n", 163 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # pybulletに必要なデータへのパスを追加\n", 164 | "pybullet.setGravity(0.0, 0.0, -9.8) # 地球上における重力に設定\n", 165 | "time_step = 1./240.\n", 166 | "pybullet.setTimeStep(time_step) # 1stepあたりに経過する時間の設定" 167 | ] 168 | }, 169 | { 170 | "cell_type": "markdown", 171 | "metadata": {}, 172 | "source": [ 173 | "# [床の読み込み](#toc0_)\n", 174 | "pybulletでは、標準で幾つかのモデルが用意されています。ここでは、その中で床が定義されているファイルである `plane.urdf`を読み込みます。 \n", 175 | "\n", 176 | "---\n", 177 | "\n", 178 | "urdfファイルは「ロボットモデルについて定義されるxml形式のファイル」になり、ロボットのリンクやジョイントについて定義できます。 \n", 179 | "基本的には、ロボットに関して定義するものになりますが、`plane.urdf`では、ロボットではなく床のモデルが定義されています。\n", 180 | "\n", 181 | "---" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 4, 187 | "metadata": {}, 188 | "outputs": [], 189 | "source": [ 190 | "#床の読み込み\n", 191 | "plane_id = pybullet.loadURDF(\"plane.urdf\")" 192 | ] 193 | }, 194 | { 195 | "cell_type": "markdown", 196 | "metadata": {}, 197 | "source": [ 198 | "# [物体の読み込み](#toc0_)\n", 199 | "`createCollisionShape` `createVisualShape` `createMultiBody`関数を使用することで、任意のサイズのオブジェクト(ボックスや球など)" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": 5, 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [ 208 | "# ボックスの読み込み\n", 209 | "## ボックスの重さ、サイズ、位置·姿勢を決める\n", 210 | "mass = 5 # kg\n", 211 | "box_size = [0.3, 0.3, 0.3]\n", 212 | "position = [2, 0, 0.3]\n", 213 | "orientation = [1, 0, 0, 0] # 四元数\n", 214 | "box_collision_id = pybullet.createCollisionShape(pybullet.GEOM_BOX, halfExtents=box_size, physicsClientId=physics_client)\n", 215 | "box_visual_id = pybullet.createVisualShape(pybullet.GEOM_BOX, halfExtents=box_size, physicsClientId=physics_client, rgbaColor=[1,0,0,1]) # 赤・半透明\n", 216 | "box_body_id = pybullet.createMultiBody(mass, box_collision_id, box_visual_id, position, orientation, physicsClientId=physics_client)" 217 | ] 218 | }, 219 | { 220 | "cell_type": "markdown", 221 | "metadata": {}, 222 | "source": [ 223 | "# [ロボットの読み込み](#toc0_)\n", 224 | "自身で定義したロボットのurdfファイルを読み込みます。\n", 225 | "`loadURDF`関数の引数に「urdfファイルまでのパス」を指定することでロボットを生成することができます" 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": 6, 231 | "metadata": {}, 232 | "outputs": [], 233 | "source": [ 234 | "# ロボットの読み込み\n", 235 | "car_start_pos = [0, 0, 0.1] # 初期位置(x,y,z)を設定\n", 236 | "car_start_orientation = pybullet.getQuaternionFromEuler([0,0,0]) # 初期姿勢(roll, pitch, yaw)を設定\n", 237 | "# urdfファイルのmeshはテクスチャが反映されないっぽいので各linkにrgbaタグで色を付けている\n", 238 | "car_id = pybullet.loadURDF(\"../urdf/simple_two_wheel_car.urdf\",car_start_pos, car_start_orientation)" 239 | ] 240 | }, 241 | { 242 | "cell_type": "markdown", 243 | "metadata": {}, 244 | "source": [ 245 | "# [カメラ位置の設定](#toc0_)\n", 246 | "`resetDebugVisualizerCamera`関数で、GUIモードの際のカメラを設定できます。" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": 7, 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "# GUIモードの際のカメラの位置などを設定\n", 256 | "camera_distance = 4.0\n", 257 | "camera_yaw = 0.0 # deg\n", 258 | "camera_pitch = -20 # deg\n", 259 | "camera_target_position = [0.0, 0.0, 0.0]\n", 260 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 261 | ] 262 | }, 263 | { 264 | "cell_type": "markdown", 265 | "metadata": {}, 266 | "source": [ 267 | "# [デバック用の文字表示](#toc0_)\n", 268 | "`addUserDebugText`関数を使用することで、シミュレーション空間上の任意の位置にテキストを表示させることができます。 \n", 269 | "\n", 270 | "他にも、`addUserDebugLine`では、シミュレーション空間上の任意の2点間を結ぶを結ぶ線を描画することができ、移動ロボットの移動経路やロボットアームの手先の軌跡などを描画する際に便利です。\n", 271 | "\n", 272 | "このように、pybulletではシミュレーション結果を可視化するのに便利な機能が搭載されています。" 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": 8, 278 | "metadata": {}, 279 | "outputs": [ 280 | { 281 | "data": { 282 | "text/plain": [ 283 | "0" 284 | ] 285 | }, 286 | "execution_count": 8, 287 | "metadata": {}, 288 | "output_type": "execute_result" 289 | } 290 | ], 291 | "source": [ 292 | "# 画面上に文字を表示\n", 293 | "text_position = [0.0, 0.0, 2.0]\n", 294 | "life_time = 10.0 # 表示期間(秒)\n", 295 | "pybullet.addUserDebugText(f\"test text\", text_position, textSize=2, lifeTime=life_time)" 296 | ] 297 | }, 298 | { 299 | "cell_type": "markdown", 300 | "metadata": {}, 301 | "source": [ 302 | "# [シミュレーションの実行](#toc0_)\n", 303 | "`stepSimulation`関数を使用すると、`setTimeStep`で設定した時間分シミュレーション空間上で時間が経過します。 \n", 304 | "ここでは、移動ロボットの両輪に速度指令を与えて200時刻分シミュレーションを実行しています。\n" 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": 9, 310 | "metadata": {}, 311 | "outputs": [ 312 | { 313 | "ename": "", 314 | "evalue": "", 315 | "output_type": "error", 316 | "traceback": [ 317 | "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", 318 | "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", 319 | "\u001b[1;31mClick here for more info. \n", 320 | "\u001b[1;31mView Jupyter log for further details." 321 | ] 322 | } 323 | ], 324 | "source": [ 325 | "import time\n", 326 | "RIGHT_WHEEL_JOINT_IDX = 0\n", 327 | "LEFT_WHEEL_JOINT_IDX = 1\n", 328 | "for i in range(200):\n", 329 | " pybullet.setJointMotorControl2(car_id, LEFT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 330 | " pybullet.setJointMotorControl2(car_id, RIGHT_WHEEL_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=10)\n", 331 | " pybullet.stepSimulation()\n", 332 | " time.sleep(time_step)" 333 | ] 334 | } 335 | ], 336 | "metadata": { 337 | "kernelspec": { 338 | "display_name": "Python 3", 339 | "language": "python", 340 | "name": "python3" 341 | }, 342 | "language_info": { 343 | "codemirror_mode": { 344 | "name": "ipython", 345 | "version": 3 346 | }, 347 | "file_extension": ".py", 348 | "mimetype": "text/x-python", 349 | "name": "python", 350 | "nbconvert_exporter": "python", 351 | "pygments_lexer": "ipython3", 352 | "version": "3.10.12" 353 | } 354 | }, 355 | "nbformat": 4, 356 | "nbformat_minor": 2 357 | } 358 | -------------------------------------------------------------------------------- /README_jp.md: -------------------------------------------------------------------------------- 1 | # PybulletRobotics 2 | 3 | [Click here for English README](https://github.com/akinami3/PybulletRobotics/blob/main/README.md) 4 | 5 | 本リポジトリでは、ロボティクスにおけるpybulletの基本的な使用方法や、ロボティクスに関連する様々なアルゴリズムをpybulletを用いて実装する方法を紹介します。 6 | 7 | # 目次 8 | - [PybulletRobotics](#pybulletrobotics) 9 | - [目次](#目次) 10 | - [インストール方法](#インストール方法) 11 | - [全般](#全般) 12 | - [Pybulletの基本的な使用方法](#pybulletの基本的な使用方法) 13 | - [ARマーカーの位置·姿勢の検出](#arマーカーの位置姿勢の検出) 14 | - [移動ロボット](#移動ロボット) 15 | - [基本](#基本) 16 | - [移動ロボットの基本的な制御](#移動ロボットの基本的な制御) 17 | - [移動ロボットにおけるセンサーの利用](#移動ロボットにおけるセンサーの利用) 18 | - [移動ロボットによるライントレース](#移動ロボットによるライントレース) 19 | - [wheel odometryによる位置推定](#wheel-odometryによる位置推定) 20 | - [particle filterによる位置推定](#particle-filterによる位置推定) 21 | - [拡張カルマンフィルタ(EKF)による位置推定](#拡張カルマンフィルタekfによる位置推定) 22 | - [A\*によるグローバルパスプランニング](#aによるグローバルパスプランニング) 23 | - [Dynamic Window Approach(DWA)によるローカルパスプランニング](#dynamic-window-approachdwaによるローカルパスプランニング) 24 | - [Q学習(強化学習)によるライントレース](#q学習強化学習によるライントレース) 25 | - [そのた追加予定のコンテンツ](#そのた追加予定のコンテンツ) 26 | - [ロボットアーム](#ロボットアーム) 27 | - [基本](#基本-1) 28 | - [ロボットアームの基本的な制御](#ロボットアームの基本的な制御) 29 | - [ロボットアームにおけるセンサーの利用](#ロボットアームにおけるセンサーの利用) 30 | - [衝突判定](#衝突判定) 31 | - [運動学](#運動学) 32 | - [三角関数による順運動学](#三角関数による順運動学) 33 | - [同次変換行列を用いた順運動学](#同次変換行列を用いた順運動学) 34 | - [解析的解法による逆運動学](#解析的解法による逆運動学) 35 | - [数値的解法によるヤコビ行列を用いた逆運動学](#数値的解法によるヤコビ行列を用いた逆運動学) 36 | - [eye to hand による物体位置の推定](#eye-to-hand-による物体位置の推定) 37 | - [eye in hand による物体位置の推定](#eye-in-hand-による物体位置の推定) 38 | - [image based visual servo](#image-based-visual-servo) 39 | - [その他、追加予定のコンテンツ](#その他追加予定のコンテンツ) 40 | - [参考サイトなど](#参考サイトなど) 41 | - [筆者作成](#筆者作成) 42 | 43 | 44 | 52 | 53 | # インストール方法 54 | pybulletでロボティクスのシミュレーションを実施する際に必要なライブラリをインストールする手順は以下になります。 55 | 56 | なお、動作確認した際の各ライブラリのバージョンは、各コマンドの隣にコメントとして記載しています。 57 | 58 | 動作確認環境:Ubuntu (WSLでも可) 59 | 60 | ```bash 61 | sudo apt update 62 | ``` 63 | 64 | ```bash 65 | sudo apt install python3-pip 66 | ``` 67 | 68 | ```bash 69 | pip3 install pybullet # version 3.2.6 70 | ``` 71 | 72 | ```bash 73 | pip3 install numpy # version 1.24.3 74 | ``` 75 | 76 | ```bash 77 | pip3 install matplotlib # version 3.5.1 78 | ``` 79 | 80 | ```bash 81 | pip3 install opencv-contrib-python # version 4.5.4 82 | ``` 83 | 84 | ```bash 85 | pip install scipy # version 1.11.3 86 | ``` 87 | 88 | ```bash 89 | pip install ffmpeg-python # version 0.2.0 90 | ``` 91 | 92 | ```bash 93 | sudo apt install ffmpeg 94 | ``` 95 | 96 | # 全般 97 | ## Pybulletの基本的な使用方法 98 | 以下に示すpybulletを用いたロボティクスの基本的な使用方法を紹介します。 99 | - 環境の作成 100 | - urdfファイルからのロボットの読み込み 101 | - シミュレーションの実行 102 | 103 | 詳細は「[pybullet_basic.ipynb](./PybulletBasic/pybullet_basic_jp.ipynb)」を参照。 104 | 105 | ## ARマーカーの位置·姿勢の検出 106 | カメラから取得した画像から、ARマーカーの位置と姿勢を検出する方法を紹介します。 107 | 108 | 109 | 110 |
111 | 112 | また、ARマーカを貼り付けたボックスを作成する方法も紹介します(blenderを使用)。 113 | 114 | 115 | 116 | 117 | 詳細は「[ar_marker_detect.ipynb](./PybulletBasic/ar_marker_detect_jp.ipynb)」を参照。 118 | 119 | # 移動ロボット 120 | 121 | ## 基本 122 | 123 | 124 | ### 移動ロボットの基本的な制御 125 | 2輪移動ロボットを移動させるシンプルなコードを紹介します。 126 | 127 | 128 | 129 | 130 | 詳細は「[mobile_robot_basic.ipynb](./MobileRobot/mobile_robot_basic_jp.ipynb)」を参照。 131 | 132 | ### 移動ロボットにおけるセンサーの利用 133 | 2輪移動ロボットにおける以下のセンサーの利用方法を紹介します。 134 | - 超音波センサー 135 | - カメラ 136 | - LiDAR 137 | - 落下防止センサー 138 | - バンパー 139 | 140 | 141 | 142 |
143 | 144 | **超音波センサ** 145 | 146 | 147 |
148 | 149 | **カメラ** 150 | 151 | 152 |
153 | 154 | **Lidar** 155 | 156 | 157 |
158 | 159 | **落下防止センサー** 160 | 161 | 162 |
163 | 164 | **バンパー** 165 | 166 | 167 | 詳細は「[mobile_robot_sensor.ipynb](./MobileRobot/mobile_robot_sensor_jp.ipynb)」を参照。 168 | ## 移動ロボットによるライントレース 169 | 2輪の輪動ロボットのボトムカメラを使用してライントレースを行う方法を紹介します。 170 | 171 | 172 | 173 | 詳細は「[mobile_robot_line_trace.ipynb](./MobileRobot/mobile_robot_line_trace_jp.ipynb)」を参照。 174 | 175 | ## wheel odometryによる位置推定 176 | 177 | 2輪移動ロボットにおけるwheel odometryによる位置推定を行う方法を紹介します。 178 | 179 | 180 | 181 | 詳細は「[mobile_robot_wheel_odometry.ipynb](./MobileRobot/mobile_robot_wheel_odometry_jp.ipynb)」を参照。 182 | 183 | ## particle filterによる位置推定 184 | 185 | 2輪移動ロボットにおけるparticle filterによる位置推定を行う方法を紹介します。 186 | 187 | 188 | 189 | 詳細は、「[mobile_robot_particle_filter.ipynb](./MobileRobot/mobile_robot_particle_filter_jp.ipynb)」を参照。 190 | 191 | ## 拡張カルマンフィルタ(EKF)による位置推定 192 | 193 | 2輪移動ロボットにおける拡張カルマンフィルタによる位置推定を行う方法を紹介します。 194 | 195 | (赤点が動作モデルのみによる推定位置、緑がEKFによる推定位置) 196 | 197 | 198 | 199 | 詳細は「[mobile_robot_extended_kalman_filter.ipynb](./MobileRobot/mobile_robot_extended_kalman_filter_jp.ipynb)」を参照。 200 | 201 | ## A*によるグローバルパスプランニング 202 | 203 | 2輪移動ロボットにおけるA*によるグローバルパスプランニングを行う方法を紹介します。 204 | 205 | 206 | 207 | 208 | 209 | 詳細は、「[mobile_robot_global_path_planning_a_star.ipynb](./MobileRobot/mobile_robot_global_path_planning_a_star_jp.ipynb)」を参照。 210 | 211 | ## Dynamic Window Approach(DWA)によるローカルパスプランニング 212 | 213 | 2輪移動ロボットにおけるDynamic Window Approach(DWA)によるローカルパスプランニングを行う方法を紹介します。 214 | 215 | 216 | 217 | 詳細は「[mobile_robot_local_path_planning_dwa.ipynb](./MobileRobot/mobile_robot_local_path_planning_dwa_jp.ipynb)」を参照。 218 | 219 | ## Q学習(強化学習)によるライントレース 220 | 221 | 2輪移動ロボットにおけるQ学習によるライントレースを行う方法を紹介します。 222 | 223 | 224 | 225 | 詳細は「[mobile_robot_q_learning.ipynb](./MobileRobot/mobile_robot_reinforcement_q_learning_jp.ipynb)」を参照。 226 | 227 | ## そのた追加予定のコンテンツ 228 | - SLAM 229 | - 強化学習(Q学習) 230 | 231 | # ロボットアーム 232 | 233 | ## 基本 234 | 235 | ### ロボットアームの基本的な制御 236 | 2軸ロボットアームを制御(位置制御、速度制御、トルク制御)するシンプルなコードを紹介します。 237 | 238 | **位置制御** 239 | 240 | 241 | 242 |
243 | 244 | **速度制御** 245 | 246 | 247 | 248 | 249 |
250 | 251 | **トルク制御** 252 | 253 | 254 | 255 | 256 | 詳細は「[robot_arm_basic.ipynb](./RobotArm/robot_arm_basic_jp.ipynb)」を参照。 257 | 258 | ### ロボットアームにおけるセンサーの利用 259 | 2軸ロボットアームにおける以下のセンサーの利用方法を紹介します。 260 | - 手先のカメラ 261 | - 手先の力センサー 262 | 263 | 264 | 265 |
266 | 267 | **手先のカメラ** 268 | 269 | 270 | 271 |
272 | 273 | **手先の力センサー** 274 | 275 | 276 | 277 | 278 | 詳細は「[robot_arm_sensor.ipynb](./RobotArm/robot_arm_sensor_jp.ipynb)」を参照。 279 | 280 | ### 衝突判定 281 | ロボットアームと物体の衝突判定を行う方法を紹介します。 282 | 283 | 284 | 285 | 詳細は「[robot_arm_collision_check.ipynb](./RobotArm/robot_arm_collision_check_jp.ipynb)」を参照。 286 | 287 | ## 運動学 288 | 289 | ### 三角関数による順運動学 290 | 三角関数を用いたロボットアームの順運動学を紹介します。 291 | 292 | ![](./images/RobotArm/robot_arm_trigonometric_forward_kinematics/2d_arm_forward_kinematics_overview.png) 293 | 294 | 詳細は「[robot_arm_trigonometric_forward_kinematics.ipynb](./RobotArm/robot_arm_trigonometric_forward_kinematics_jp.ipynb)」を参照。 295 | 296 | ### 同次変換行列を用いた順運動学 297 | 同次変換行列を用いたロボットアームの順運動学を紹介します。 298 | 299 | ![](./images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/overview.gif) 300 | 301 | 詳細は「[robot_arm_homogeneous_matrix_forward_kinematics.ipynb](./RobotArm/robot_arm_homogeneous_matrix_forward_kinematics_jp.ipynb)」を参照。 302 | 303 | ### 解析的解法による逆運動学 304 | 解析的解法を用いたロボットアームの逆運動学を紹介します。 305 | 306 | ![](./images/RobotArm/robot_arm_analytical_inverse_kinematics/inverse_kinematics_analytical_overview.png) 307 | 308 | 詳細は「[robot_arm_analytical_inverse_kinematics.ipynb](./RobotArm/robot_arm_analytical_inverse_kinematics_jp.ipynb)」を参照。 309 | 310 | ### 数値的解法によるヤコビ行列を用いた逆運動学 311 | 数値的解法を用いたロボットアームの逆運動学を紹介します。 312 | 313 | ![](./images/RobotArm/robot_arm_jacobian_inverse_kinemarics/inverse_kinematics_jacobian_pybullet.gif) 314 | 315 | 詳細は「[robot_arm_jacobian_inverse_kinematics.ipynb](./RobotArm/robot_arm_jacobian_inverse_kinematics_jp.ipynb)」を参照。 316 | 317 | ## eye to hand による物体位置の推定 318 | eye to hand による物体位置の推定を行う方法を紹介します。 319 | 320 | 321 | 322 | 詳細は「[robot_arm_eye_to_hand.ipynb](./RobotArm/robot_arm_eye_to_hand_estimate_obj_pose_jp.ipynb)」を参照。 323 | 324 | ## eye in hand による物体位置の推定 325 | eye in hand による物体位置の推定を行う方法を紹介します。 326 | 327 | 328 | 329 | 詳細は「[robot_arm_eye_in_hand.ipynb](./RobotArm/robot_arm_eye_in_hand_estimate_obj_pose_jp.ipynb)」を参照。 330 | 331 | ## image based visual servo 332 | 画像ベースのビジュアルサーボを行う方法を紹介します。 333 | 334 | 335 | 336 | 詳細は「[robot_arm_image_based_visual_servo.ipynb](./RobotArm/robot_arm_image_based_visual_servo_jp.ipynb)」を参照。 337 | 338 | ## その他、追加予定のコンテンツ 339 | - PID制御 340 | - コンフィギュレーション空間を用いた動作計画 341 | - 力制御 342 | - 位置と力のハイブリッド制御 343 | - コンプライアンス制御 344 | 345 | # 参考サイトなど 346 | 347 | - [pybullet_quickstart_guide](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf):Pybulletで使用可能な関数がまとめられたスタートガイドです(公式) 348 | - [PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics):Pythonで実装されたロボティクスのアルゴリズムがまとめられたリポジトリです。様々なアルゴリズムが掲載されており、ロボティクスの勉強に最適な神リポジトリです。 349 | 350 | 351 | ## 筆者作成 352 | - [pybulletの公式サンプル129個を全部解説する - Qiita](https://qiita.com/akinami/items/7b433b60aeb5115ba4d7):pybulletの公式サンプル129個を解説(する予定の)記事です 353 | - [【図解】ロボティクスの辞書【python】](https://qiita.com/akinami/items/eb0741b0d9c322e5d5ec):ロボティクスに関する技術を解説した記事の、まとめ記事です 354 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_analytical_inverse_kinematics_en.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [Robot Arm](#toc1_) \n", 9 | "- [pybulletの起動](#toc2_) \n", 10 | "- [Initial Setup for pybullet](#toc3_) \n", 11 | "- [Generating the Robot Arm](#toc4_) \n", 12 | "- [Defining Functions for Inverse Kinematics Using Analytical Methods](#toc5_) \n", 13 | "- [Running the Simulation](#toc6_) \n", 14 | "\n", 15 | "\n", 22 | "" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "# [Robot Arm](#toc0_)\n", 30 | "\n", 31 | "In this notebook, we will generate a 2-axis robot arm and explain how to solve inverse kinematics using analytical methods.\n", 32 | "\n", 33 | "(For a manual summarizing the functions available in pybullet, please refer to [this link](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf).)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "# [pybulletの起動](#toc0_)" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 1, 46 | "metadata": {}, 47 | "outputs": [ 48 | { 49 | "name": "stderr", 50 | "output_type": "stream", 51 | "text": [ 52 | "pybullet build time: Nov 28 2023 23:45:17\n" 53 | ] 54 | }, 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "startThreads creating 1 threads.\n", 60 | "starting thread 0\n", 61 | "started thread 0 \n", 62 | "argc=2\n", 63 | "argv[0] = --unused\n", 64 | "argv[1] = --start_demo_name=Physics Server\n", 65 | "ExampleBrowserThreadFunc started\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 68 | "Creating context\n", 69 | "Created GL 3.3 context\n", 70 | "Direct GLX rendering context obtained\n", 71 | "Making context current\n", 72 | "GL_VENDOR=Mesa\n", 73 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 74 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 75 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 76 | "pthread_getconcurrency()=0\n", 77 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 78 | "Vendor = Mesa\n", 79 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 80 | "b3Printf: Selected demo: Physics Server\n", 81 | "startThreads creating 1 threads.\n", 82 | "starting thread 0\n", 83 | "started thread 0 \n", 84 | "MotionThreadFunc thread started\n" 85 | ] 86 | }, 87 | { 88 | "name": "stdout", 89 | "output_type": "stream", 90 | "text": [ 91 | "ven = Mesa\n", 92 | "ven = Mesa\n" 93 | ] 94 | } 95 | ], 96 | "source": [ 97 | "import pybullet\n", 98 | "import pybullet_data\n", 99 | "physics_client = pybullet.connect(pybullet.GUI) " 100 | ] 101 | }, 102 | { 103 | "cell_type": "markdown", 104 | "metadata": {}, 105 | "source": [ 106 | "# [Initial Setup for pybullet](#toc0_)" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": 2, 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "pybullet.resetSimulation() # Reset the simulation space\n", 116 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # Add path to necessary data for pybullet\n", 117 | "pybullet.setGravity(0.0, 0.0, -9.8) # Set gravity as on Earth\n", 118 | "time_step = 1./240.\n", 119 | "pybullet.setTimeStep(time_step) # Set the elapsed time per step\n", 120 | "\n", 121 | "# Load the floor\n", 122 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 123 | "\n", 124 | "# Set the camera position for GUI mode\n", 125 | "camera_distance = 2.0\n", 126 | "camera_yaw = 0.0 # deg\n", 127 | "camera_pitch = -20 # deg\n", 128 | "camera_target_position = [0.0, 0.0, 0.0]\n", 129 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "# [Generating the Robot Arm](#toc0_)\n", 137 | "This time, we will generate a 2-axis robot arm `simple_2d_arm.urdf`. \n", 138 | "The robot is configured as shown in the diagram below (sensors are explained in `robot_arm_sensor.ipynb`). \n", 139 | "\n", 140 | "![](../images/RobotArm/2d_robot_arm.png)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 3, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "name": "stdout", 150 | "output_type": "stream", 151 | "text": [ 152 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 153 | "\n", 154 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 155 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 156 | "\n", 157 | "b3Printf: target_position_vertual_link\n" 158 | ] 159 | } 160 | ], 161 | "source": [ 162 | "# Load the robot\n", 163 | "arm_start_pos = [0, 0, 0.1] # Set initial position (x, y, z)\n", 164 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0, 0, 0]) # Set initial orientation (roll, pitch, yaw)\n", 165 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\", arm_start_pos, arm_start_orientation, useFixedBase=True) # Fix the root link to prevent the robot from falling over by setting useFixedBase=True\n", 166 | "\n", 167 | "# Set the camera position for GUI mode\n", 168 | "camera_distance = 1.5\n", 169 | "camera_yaw = 180.0 # deg\n", 170 | "camera_pitch = -10 # deg\n", 171 | "camera_target_position = [0.0, 0.0, 1.0]\n", 172 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 173 | ] 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "metadata": {}, 178 | "source": [ 179 | "# [Defining Functions for Inverse Kinematics Using Analytical Methods](#toc0_)\n", 180 | "We will define functions for inverse kinematics using analytical methods.\n", 181 | "\n", 182 | "
\n", 183 | "\n", 184 | "There are two methods for solving inverse kinematics analytically:\n", 185 | "- Algebraic method\n", 186 | "- Geometric method" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": 4, 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "import math\n", 196 | "def inverse_kinematics_2link(link1_length, link2_length, xe, ye):\n", 197 | " \"\"\"\n", 198 | " Calculate the inverse kinematics of a 2-link robot arm in a 2D plane\n", 199 | " \n", 200 | " Parameters\n", 201 | " ----------\n", 202 | " link1_length : float\n", 203 | " Length of link 1\n", 204 | " link2_length : float\n", 205 | " Length of link 2\n", 206 | " xe : float\n", 207 | " x-coordinate of the end effector\n", 208 | " ye : float\n", 209 | " y-coordinate of the end effector\n", 210 | "\n", 211 | " Returns\n", 212 | " -------\n", 213 | " theta1 : float\n", 214 | " Joint angle of link 1 (rad)\n", 215 | " theta2 : float\n", 216 | " Joint angle of link 2 (rad)\n", 217 | " \"\"\"\n", 218 | " try:\n", 219 | " # Inverse kinematics equations derived algebraically\n", 220 | " theta1 = -math.acos((xe**2 + ye**2 + link1_length**2 - link2_length**2)/(2 * link1_length * math.sqrt(xe**2 + ye**2))) + math.atan2(ye, xe)\n", 221 | " theta2 = math.acos((xe**2 + ye**2 - link1_length**2 - link2_length**2)/(2 * link1_length * link2_length))\n", 222 | "\n", 223 | " # Inverse kinematics equations derived geometrically (uncomment the following lines if you want to try the \"geometric\" method)\n", 224 | " # theta1 = math.atan2(ye, xe) - math.acos((-l2**2 + l1**2 + xe**2 + ye**2)/(2 * l1 * math.sqrt(xe**2 + ye**2)))\n", 225 | " # theta2 = math.pi - math.acos((-xe**2 - ye**2 + l2**2 + l1**2)/(2 * l2 * l1))\n", 226 | " except: # If the input (xe, ye) does not have a solution, output None\n", 227 | " theta1 = None\n", 228 | " theta2 = None\n", 229 | " return theta1, theta2" 230 | ] 231 | }, 232 | { 233 | "cell_type": "markdown", 234 | "metadata": {}, 235 | "source": [ 236 | "# [Running the Simulation](#toc0_)\n", 237 | "\n", 238 | "When the simulation is run, the end-effector of the robot arm will be set to the \"specified end-effector position `(xe, ye)`\".\n" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": 21, 244 | "metadata": {}, 245 | "outputs": [], 246 | "source": [ 247 | "# Length of the robot links\n", 248 | "LINK1_LENGTH = 0.5 # Length of link1 (z-direction length of link1 in \"simple2d_arm.urdf\")\n", 249 | "LINK2_LENGTH = 0.55 # Length of link2 (z-direction length of link2 + force_sensor_link in \"simple2d_arm.urdf\")\n", 250 | "\n", 251 | "# Index of each joint of the robot\n", 252 | "LINK1_JOINT_IDX = 0\n", 253 | "LINK2_JOINT_IDX = 1\n", 254 | "\n", 255 | "##### Changing this will change the result (please specify a feasible end-effector position) #####\n", 256 | "# Coordinates of the end effector\n", 257 | "xe = 0.0\n", 258 | "ye = 0.8\n", 259 | "############################################\n", 260 | "\n", 261 | "# Calculate inverse kinematics using analytical methods\n", 262 | "theta1, theta2 = inverse_kinematics_2link(LINK1_LENGTH, LINK2_LENGTH, xe, ye)\n", 263 | "\n", 264 | "# Set the joint angles obtained by inverse kinematics to the robot (confirm that the specified end-effector (xe, ye) is set)\n", 265 | "if theta1 is not None and theta2 is not None:\n", 266 | " pybullet.resetJointState(arm_id, LINK1_JOINT_IDX, theta1)\n", 267 | " pybullet.resetJointState(arm_id, LINK2_JOINT_IDX, theta2)\n", 268 | " pybullet.stepSimulation()\n", 269 | "else:\n", 270 | " print(\"No solution exists for the inverse kinematics\")" 271 | ] 272 | } 273 | ], 274 | "metadata": { 275 | "kernelspec": { 276 | "display_name": "Python 3", 277 | "language": "python", 278 | "name": "python3" 279 | }, 280 | "language_info": { 281 | "codemirror_mode": { 282 | "name": "ipython", 283 | "version": 3 284 | }, 285 | "file_extension": ".py", 286 | "mimetype": "text/x-python", 287 | "name": "python", 288 | "nbconvert_exporter": "python", 289 | "pygments_lexer": "ipython3", 290 | "version": "3.10.12" 291 | } 292 | }, 293 | "nbformat": 4, 294 | "nbformat_minor": 2 295 | } 296 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_analytical_inverse_kinematics_jp.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [ロボットアーム](#toc1_) \n", 9 | "- [pybulletの起動](#toc2_) \n", 10 | "- [pybulletの初期設定](#toc3_) \n", 11 | "- [ロボットアームの生成](#toc4_) \n", 12 | "- [解析的解法による逆運動学の関数を定義](#toc5_) \n", 13 | "- [シミュレーションの実行](#toc6_) \n", 14 | "\n", 15 | "\n", 22 | "" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "# [ロボットアーム](#toc0_)\n", 30 | "\n", 31 | "本notebookでは2軸のロボットアームを生成し、解析的解法により逆運動学を求める方法を解説します。\n", 32 | "\n", 33 | "(pybulletで使用可能な関数がまとめられたマニュアルについては[こちら](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf)を参照してください。)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "# [pybulletの起動](#toc0_)" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 1, 46 | "metadata": {}, 47 | "outputs": [ 48 | { 49 | "name": "stderr", 50 | "output_type": "stream", 51 | "text": [ 52 | "pybullet build time: Nov 28 2023 23:45:17\n" 53 | ] 54 | }, 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "startThreads creating 1 threads.\n", 60 | "starting thread 0\n", 61 | "started thread 0 \n", 62 | "argc=2\n", 63 | "argv[0] = --unused\n", 64 | "argv[1] = --start_demo_name=Physics Server\n", 65 | "ExampleBrowserThreadFunc started\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 68 | "Creating context\n", 69 | "Created GL 3.3 context\n", 70 | "Direct GLX rendering context obtained\n", 71 | "Making context current\n", 72 | "GL_VENDOR=Mesa\n", 73 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 74 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 75 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 76 | "pthread_getconcurrency()=0\n", 77 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 78 | "Vendor = Mesa\n", 79 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 80 | "b3Printf: Selected demo: Physics Server\n", 81 | "startThreads creating 1 threads.\n", 82 | "starting thread 0\n", 83 | "started thread 0 \n", 84 | "MotionThreadFunc thread started\n" 85 | ] 86 | }, 87 | { 88 | "name": "stdout", 89 | "output_type": "stream", 90 | "text": [ 91 | "ven = Mesa\n", 92 | "ven = Mesa\n" 93 | ] 94 | } 95 | ], 96 | "source": [ 97 | "import pybullet\n", 98 | "import pybullet_data\n", 99 | "physics_client = pybullet.connect(pybullet.GUI) " 100 | ] 101 | }, 102 | { 103 | "cell_type": "markdown", 104 | "metadata": {}, 105 | "source": [ 106 | "# [pybulletの初期設定](#toc0_)" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": 2, 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "pybullet.resetSimulation() # シミュレーション空間をリセット\n", 116 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # pybulletに必要なデータへのパスを追加\n", 117 | "pybullet.setGravity(0.0, 0.0, -9.8) # 地球上における重力に設定\n", 118 | "time_step = 1./240.\n", 119 | "pybullet.setTimeStep(time_step) # 1stepあたりに経過する時間の設定\n", 120 | "\n", 121 | "#床の読み込み\n", 122 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 123 | "\n", 124 | "# GUIモードの際のカメラの位置などを設定\n", 125 | "camera_distance = 2.0\n", 126 | "camera_yaw = 0.0 # deg\n", 127 | "camera_pitch = -20 # deg\n", 128 | "camera_target_position = [0.0, 0.0, 0.0]\n", 129 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "# [ロボットアームの生成](#toc0_)\n", 137 | "今回は、2軸ロボットアーム`simple_2d_arm.urdf`を生成します。 \n", 138 | "ロボットは下図のような構成になっています(センサーについては、`robot_arm_sensor.ipynb`にて解説しています。) \n", 139 | "\n", 140 | "![](../images/RobotArm/2d_robot_arm.png)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 3, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "name": "stdout", 150 | "output_type": "stream", 151 | "text": [ 152 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 153 | "\n", 154 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 155 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 156 | "\n", 157 | "b3Printf: target_position_vertual_link\n" 158 | ] 159 | } 160 | ], 161 | "source": [ 162 | "# ロボットの読み込み\n", 163 | "arm_start_pos = [0, 0, 0.1] # 初期位置(x,y,z)を設定\n", 164 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0,0,0]) # 初期姿勢(roll, pitch, yaw)を設定\n", 165 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\",arm_start_pos, arm_start_orientation, useFixedBase=True) # ロボットが倒れないように、useFixedBase=Trueでルートのリンクを固定\n", 166 | "\n", 167 | "# GUIモードの際のカメラの位置などを設定\n", 168 | "camera_distance = 1.5\n", 169 | "camera_yaw = 180.0 # deg\n", 170 | "camera_pitch = -10 # deg\n", 171 | "camera_target_position = [0.0, 0.0, 1.0]\n", 172 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 173 | ] 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "metadata": {}, 178 | "source": [ 179 | "# [解析的解法による逆運動学の関数を定義](#toc0_)\n", 180 | "逆運動学の解析的解法による関数を定義します。 \n", 181 | "\n", 182 | "
\n", 183 | "\n", 184 | "逆運動学の解析的解法には、以下の2つの方法があります。\n", 185 | "- 代数的に求める方法\n", 186 | "- 幾何学的に求める方法" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": 4, 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "import math\n", 196 | "def inverse_kinematics_2link(link1_length, link2_length, xe, ye):\n", 197 | " \"\"\"\n", 198 | " 2次元平面上の2リンクロボットアームの逆運動学を求める\n", 199 | " \n", 200 | " Parameters\n", 201 | " ----------\n", 202 | " link1_length : float\n", 203 | " リンク1の長さ\n", 204 | " link2_length : float\n", 205 | " リンク2の長さ\n", 206 | " xe : float\n", 207 | " 手先のx座標\n", 208 | " ye : float\n", 209 | " 手先のy座標\n", 210 | "\n", 211 | " Returns\n", 212 | " -------\n", 213 | " theta1 : float\n", 214 | " リンク1の関節角度(rad)\n", 215 | " theta2 : float\n", 216 | " リンク2の関節角度(rad)\n", 217 | " \"\"\"\n", 218 | " try:\n", 219 | " # 代数的に求めた逆運動学の式\n", 220 | " theta1 = -math.acos((xe**2 + ye**2 + link1_length**2 - link2_length**2)/(2 * link1_length * math.sqrt(xe**2 + ye**2))) + math.atan2(ye, xe)\n", 221 | " theta2 = math.acos((xe**2 + ye**2 - link1_length**2 - link2_length**2)/(2 * link1_length * link2_length))\n", 222 | "\n", 223 | " # 幾何学的に求めた逆運動学の式(「幾何学的」な手法を試したい場合は、下記のコメントを外してください)\n", 224 | " # theta1 = math.atan2(ye,xe) - math.acos((-l2**2 + l1**2 + xe**2 + ye**2)/(2 * l1 * math.sqrt(xe**2 + ye**2)))\n", 225 | " # theta2 = math.pi - math.acos((-xe**2 - ye**2 + l2**2 + l1**2)/(2 * l2* l1))\n", 226 | " except: # 解が存在しない(xe, ye)を入力した場合、Noneを出力\n", 227 | " theta1 = None\n", 228 | " theta2 = None\n", 229 | " return theta1, theta2" 230 | ] 231 | }, 232 | { 233 | "cell_type": "markdown", 234 | "metadata": {}, 235 | "source": [ 236 | "# [シミュレーションの実行](#toc0_)\n", 237 | "\n", 238 | "シミュレーションを実行すると、「指定した手先位置`(xe, ye)`」にロボットアームの手先が設定されます。\n" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": 21, 244 | "metadata": {}, 245 | "outputs": [], 246 | "source": [ 247 | "# ロボットのリンクの長さ\n", 248 | "LINK1_LENGTH = 0.5 # link1の長さ(「simple2d_arm.urdf」のlink1のz方向の長さ)\n", 249 | "LINK2_LENGTH = 0.55 # link2の長さ(「simple2d_arm.urdf」のlink2+force_sensor_linkのz方向の長さ)\n", 250 | "\n", 251 | "# ロボットの各関節のインデックス\n", 252 | "LINK1_JOINT_IDX = 0\n", 253 | "LINK2_JOINT_IDX = 1\n", 254 | "\n", 255 | "##### ここを変更すると結果が変わります(実現可能な手先位置を指定してください) #####\n", 256 | "# 手先の座標\n", 257 | "xe = 0.0\n", 258 | "ye = 0.8\n", 259 | "############################################\n", 260 | "\n", 261 | "# 解析的解法による逆運動学の計算\n", 262 | "theta1, theta2 = inverse_kinematics_2link(LINK1_LENGTH, LINK2_LENGTH, xe, ye)\n", 263 | "\n", 264 | "# 逆運動学によって求めた関節角度をロボットに設定(指定した手先(xe, ye)に、設定されていることを確認してください)\n", 265 | "if theta1 is not None and theta2 is not None:\n", 266 | " pybullet.resetJointState(arm_id, LINK1_JOINT_IDX, theta1)\n", 267 | " pybullet.resetJointState(arm_id, LINK2_JOINT_IDX, theta2)\n", 268 | " pybullet.stepSimulation()\n", 269 | "else:\n", 270 | " print(\"逆運動学の解が存在しません\")" 271 | ] 272 | } 273 | ], 274 | "metadata": { 275 | "kernelspec": { 276 | "display_name": "Python 3", 277 | "language": "python", 278 | "name": "python3" 279 | }, 280 | "language_info": { 281 | "codemirror_mode": { 282 | "name": "ipython", 283 | "version": 3 284 | }, 285 | "file_extension": ".py", 286 | "mimetype": "text/x-python", 287 | "name": "python", 288 | "nbconvert_exporter": "python", 289 | "pygments_lexer": "ipython3", 290 | "version": "3.10.12" 291 | } 292 | }, 293 | "nbformat": 4, 294 | "nbformat_minor": 2 295 | } 296 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_basic_en.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [Robot Arm](#toc1_) \n", 9 | "- [Starting pybullet](#toc2_) \n", 10 | "- [Initial Setup for pybullet](#toc3_) \n", 11 | "- [Generating the Robot Arm](#toc4_) \n", 12 | "- [Running the Simulation](#toc5_) \n", 13 | "\n", 14 | "\n", 21 | "" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "# [Robot Arm](#toc0_)\n", 29 | "\n", 30 | "In this notebook, we will explain the steps to generate and control a 2-axis robot arm.\n", 31 | "\n", 32 | "(For a manual summarizing the functions available in pybullet, refer to [this link](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf).)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "# [Starting pybullet](#toc0_)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 1, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stderr", 49 | "output_type": "stream", 50 | "text": [ 51 | "pybullet build time: Nov 28 2023 23:45:17\n" 52 | ] 53 | }, 54 | { 55 | "name": "stdout", 56 | "output_type": "stream", 57 | "text": [ 58 | "startThreads creating 1 threads.\n", 59 | "starting thread 0\n", 60 | "started thread 0 \n", 61 | "argc=2\n", 62 | "argv[0] = --unused\n", 63 | "argv[1] = --start_demo_name=Physics Server\n", 64 | "ExampleBrowserThreadFunc started\n", 65 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "Creating context\n", 68 | "Created GL 3.3 context\n", 69 | "Direct GLX rendering context obtained\n", 70 | "Making context current\n", 71 | "GL_VENDOR=Mesa\n", 72 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 73 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 74 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 75 | "pthread_getconcurrency()=0\n", 76 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 77 | "Vendor = Mesa\n", 78 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 79 | "b3Printf: Selected demo: Physics Server\n", 80 | "startThreads creating 1 threads.\n", 81 | "starting thread 0\n", 82 | "started thread 0 \n", 83 | "MotionThreadFunc thread started\n" 84 | ] 85 | } 86 | ], 87 | "source": [ 88 | "import pybullet\n", 89 | "import pybullet_data\n", 90 | "physics_client = pybullet.connect(pybullet.GUI) " 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "# [Initial Setup for pybullet](#toc0_)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 2, 103 | "metadata": {}, 104 | "outputs": [ 105 | { 106 | "name": "stdout", 107 | "output_type": "stream", 108 | "text": [ 109 | "ven = Mesa\n", 110 | "ven = Mesa\n" 111 | ] 112 | } 113 | ], 114 | "source": [ 115 | "pybullet.resetSimulation() # Reset the simulation space\n", 116 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # Add paths to necessary data for pybullet\n", 117 | "pybullet.setGravity(0.0, 0.0, -9.8) # Set gravity as on Earth\n", 118 | "time_step = 1./240.\n", 119 | "pybullet.setTimeStep(time_step) # Set the time elapsed per step\n", 120 | "\n", 121 | "# Load the floor\n", 122 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 123 | "\n", 124 | "# Set the camera position and other parameters in GUI mode\n", 125 | "camera_distance = 2.0\n", 126 | "camera_yaw = 0.0 # deg\n", 127 | "camera_pitch = -20 # deg\n", 128 | "camera_target_position = [0.0, 0.0, 0.0]\n", 129 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "# [Generating the Robot Arm](#toc0_)\n", 137 | "This time, we will generate a 2-axis robot arm `simple_2d_arm.urdf`. \n", 138 | "The robot is structured as shown in the diagram below (sensors are explained in `robot_arm_sensor.ipynb`).\n", 139 | "\n", 140 | "![](../images/RobotArm/2d_robot_arm.png)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 3, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "name": "stdout", 150 | "output_type": "stream", 151 | "text": [ 152 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 153 | "\n", 154 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 155 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 156 | "\n", 157 | "b3Printf: target_position_vertual_link\n" 158 | ] 159 | } 160 | ], 161 | "source": [ 162 | "# Load the robot\n", 163 | "arm_start_pos = [0, 0, 0.1] # Set the initial position (x, y, z)\n", 164 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0, 0, 0]) # Set the initial orientation (roll, pitch, yaw)\n", 165 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\", arm_start_pos, arm_start_orientation, useFixedBase=True) # Fix the root link with useFixedBase=True to prevent the robot from falling\n", 166 | "\n", 167 | "# Set the camera position and other parameters in GUI mode\n", 168 | "camera_distance = 1.5\n", 169 | "camera_yaw = 180.0 # deg\n", 170 | "camera_pitch = -10 # deg\n", 171 | "camera_target_position = [0.0, 0.0, 1.0]\n", 172 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 173 | ] 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "metadata": {}, 178 | "source": [ 179 | "# [Running the Simulation](#toc0_)\n", 180 | "With `setJointMotorControl2`, you can operate the specified joints of the specified robot.\n", 181 | "\n", 182 | "First, to operate with a velocity command, specify `POSITION_CONTROL` as the third argument.\n" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": 4, 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [ 191 | "import time\n", 192 | "import math\n", 193 | "\n", 194 | "# Indices of the robot's joints\n", 195 | "LINK1_JOINT_IDX = 0\n", 196 | "LINK2_JOINT_IDX = 1\n", 197 | "\n", 198 | "# Angles for each joint\n", 199 | "joint1_angle = 90.0\n", 200 | "joint2_angle = 90.0\n", 201 | "joint1_rad = math.radians(joint1_angle)\n", 202 | "joint2_rad = math.radians(joint2_angle)\n", 203 | "\n", 204 | "for i in range(200):\n", 205 | " # Move the joints to the specified angles\n", 206 | " pybullet.setJointMotorControl2(arm_id, LINK1_JOINT_IDX, pybullet.POSITION_CONTROL, targetPosition=joint1_rad)\n", 207 | " pybullet.setJointMotorControl2(arm_id, LINK2_JOINT_IDX, pybullet.POSITION_CONTROL, targetPosition=joint2_rad)\n", 208 | " pybullet.stepSimulation()\n", 209 | " time.sleep(time_step)" 210 | ] 211 | }, 212 | { 213 | "cell_type": "markdown", 214 | "metadata": {}, 215 | "source": [ 216 | "Next, to operate with a velocity command, specify `VELOCITY_CONTROL` as the third argument." 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": 5, 222 | "metadata": {}, 223 | "outputs": [], 224 | "source": [ 225 | "joint1_velocity = 2.0\n", 226 | "joint2_velocity = 2.0\n", 227 | "\n", 228 | "for i in range(500):\n", 229 | " # Move the joints at the specified speed\n", 230 | " pybullet.setJointMotorControl2(arm_id, LINK1_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=joint1_velocity)\n", 231 | " pybullet.setJointMotorControl2(arm_id, LINK2_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=joint2_velocity)\n", 232 | " pybullet.stepSimulation()\n", 233 | " time.sleep(time_step)" 234 | ] 235 | }, 236 | { 237 | "cell_type": "markdown", 238 | "metadata": {}, 239 | "source": [ 240 | "Finally, to operate with torque control, specify `TORQUE_CONTROL` as the third argument." 241 | ] 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": 6, 246 | "metadata": {}, 247 | "outputs": [], 248 | "source": [ 249 | "joint1_torque = 50.0\n", 250 | "joint2_torque = 50.0\n", 251 | "\n", 252 | "for i in range(500):\n", 253 | " # Move the joints with the specified torque\n", 254 | " pybullet.setJointMotorControl2(arm_id, LINK1_JOINT_IDX, pybullet.TORQUE_CONTROL, force=joint1_torque)\n", 255 | " pybullet.setJointMotorControl2(arm_id, LINK2_JOINT_IDX, pybullet.TORQUE_CONTROL, force=joint2_torque)\n", 256 | " pybullet.stepSimulation()\n", 257 | " time.sleep(time_step)" 258 | ] 259 | } 260 | ], 261 | "metadata": { 262 | "kernelspec": { 263 | "display_name": "Python 3", 264 | "language": "python", 265 | "name": "python3" 266 | }, 267 | "language_info": { 268 | "codemirror_mode": { 269 | "name": "ipython", 270 | "version": 3 271 | }, 272 | "file_extension": ".py", 273 | "mimetype": "text/x-python", 274 | "name": "python", 275 | "nbconvert_exporter": "python", 276 | "pygments_lexer": "ipython3", 277 | "version": "3.10.12" 278 | } 279 | }, 280 | "nbformat": 4, 281 | "nbformat_minor": 2 282 | } 283 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_basic_jp.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [ロボットアーム](#toc1_) \n", 9 | "- [pybulletの起動](#toc2_) \n", 10 | "- [pybulletの初期設定](#toc3_) \n", 11 | "- [ロボットアームの生成](#toc4_) \n", 12 | "- [シミュレーションの実行](#toc5_) \n", 13 | "\n", 14 | "\n", 21 | "" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "# [ロボットアーム](#toc0_)\n", 29 | "\n", 30 | "本notebookでは2軸のロボットアームを生成し、制御する手順について解説します。\n", 31 | "\n", 32 | "(pybulletで使用可能な関数がまとめられたマニュアルについては[こちら](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf)を参照してください。)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "# [pybulletの起動](#toc0_)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 1, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stderr", 49 | "output_type": "stream", 50 | "text": [ 51 | "pybullet build time: Nov 28 2023 23:45:17\n" 52 | ] 53 | }, 54 | { 55 | "name": "stdout", 56 | "output_type": "stream", 57 | "text": [ 58 | "startThreads creating 1 threads.\n", 59 | "starting thread 0\n", 60 | "started thread 0 \n", 61 | "argc=2\n", 62 | "argv[0] = --unused\n", 63 | "argv[1] = --start_demo_name=Physics Server\n", 64 | "ExampleBrowserThreadFunc started\n", 65 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "Creating context\n", 68 | "Created GL 3.3 context\n", 69 | "Direct GLX rendering context obtained\n", 70 | "Making context current\n", 71 | "GL_VENDOR=Mesa\n", 72 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 73 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 74 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 75 | "pthread_getconcurrency()=0\n", 76 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 77 | "Vendor = Mesa\n", 78 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 79 | "b3Printf: Selected demo: Physics Server\n", 80 | "startThreads creating 1 threads.\n", 81 | "starting thread 0\n", 82 | "started thread 0 \n", 83 | "MotionThreadFunc thread started\n" 84 | ] 85 | } 86 | ], 87 | "source": [ 88 | "import pybullet\n", 89 | "import pybullet_data\n", 90 | "physics_client = pybullet.connect(pybullet.GUI) " 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "# [pybulletの初期設定](#toc0_)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 2, 103 | "metadata": {}, 104 | "outputs": [ 105 | { 106 | "name": "stdout", 107 | "output_type": "stream", 108 | "text": [ 109 | "ven = Mesa\n", 110 | "ven = Mesa\n" 111 | ] 112 | } 113 | ], 114 | "source": [ 115 | "pybullet.resetSimulation() # シミュレーション空間をリセット\n", 116 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # pybulletに必要なデータへのパスを追加\n", 117 | "pybullet.setGravity(0.0, 0.0, -9.8) # 地球上における重力に設定\n", 118 | "time_step = 1./240.\n", 119 | "pybullet.setTimeStep(time_step) # 1stepあたりに経過する時間の設定\n", 120 | "\n", 121 | "#床の読み込み\n", 122 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 123 | "\n", 124 | "# GUIモードの際のカメラの位置などを設定\n", 125 | "camera_distance = 2.0\n", 126 | "camera_yaw = 0.0 # deg\n", 127 | "camera_pitch = -20 # deg\n", 128 | "camera_target_position = [0.0, 0.0, 0.0]\n", 129 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "# [ロボットアームの生成](#toc0_)\n", 137 | "今回は、2軸ロボットアーム`simple_2d_arm.urdf`を生成します。 \n", 138 | "ロボットは下図のような構成になっています(センサーについては、`robot_arm_sensor.ipynb`にて解説しています。) \n", 139 | "\n", 140 | "![](../images/RobotArm/2d_robot_arm.png)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 3, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "name": "stdout", 150 | "output_type": "stream", 151 | "text": [ 152 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 153 | "\n", 154 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 155 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 156 | "\n", 157 | "b3Printf: target_position_vertual_link\n" 158 | ] 159 | } 160 | ], 161 | "source": [ 162 | "# ロボットの読み込み\n", 163 | "arm_start_pos = [0, 0, 0.1] # 初期位置(x,y,z)を設定\n", 164 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0,0,0]) # 初期姿勢(roll, pitch, yaw)を設定\n", 165 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\",arm_start_pos, arm_start_orientation, useFixedBase=True) # ロボットが倒れないように、useFixedBase=Trueでルートのリンクを固定\n", 166 | "\n", 167 | "# GUIモードの際のカメラの位置などを設定\n", 168 | "camera_distance = 1.5\n", 169 | "camera_yaw = 180.0 # deg\n", 170 | "camera_pitch = -10 # deg\n", 171 | "camera_target_position = [0.0, 0.0, 1.0]\n", 172 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 173 | ] 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "metadata": {}, 178 | "source": [ 179 | "# [シミュレーションの実行](#toc0_)\n", 180 | "`setJointMotorControl2` では、指定したロボットの指定したジョイントを動作させることができます。\n", 181 | "\n", 182 | "最初に、速度指令で動作させるため、第三引数に`POSITION_CONTROL`を指定します。\n" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": 4, 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [ 191 | "import time\n", 192 | "import math\n", 193 | "\n", 194 | "# ロボットの各関節のインデックス\n", 195 | "LINK1_JOINT_IDX = 0\n", 196 | "LINK2_JOINT_IDX = 1\n", 197 | "\n", 198 | "# 各関節の角度\n", 199 | "joint1_angle = 90.0\n", 200 | "joint2_angle = 90.0\n", 201 | "joint1_rad = math.radians(joint1_angle)\n", 202 | "joint2_rad = math.radians(joint2_angle)\n", 203 | "\n", 204 | "for i in range(200):\n", 205 | " # 指定した角度に関節を動かす\n", 206 | " pybullet.setJointMotorControl2(arm_id, LINK1_JOINT_IDX, pybullet.POSITION_CONTROL, targetPosition=joint1_rad)\n", 207 | " pybullet.setJointMotorControl2(arm_id, LINK2_JOINT_IDX, pybullet.POSITION_CONTROL, targetPosition=joint2_rad)\n", 208 | " pybullet.stepSimulation()\n", 209 | " time.sleep(time_step)" 210 | ] 211 | }, 212 | { 213 | "cell_type": "markdown", 214 | "metadata": {}, 215 | "source": [ 216 | "次に、速度指令で動作させるため、第三引数に`VELOCITY_CONTROL`を指定します。" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": 5, 222 | "metadata": {}, 223 | "outputs": [], 224 | "source": [ 225 | "joint1_velocity = 2.0\n", 226 | "joint2_velocity = 2.0\n", 227 | "\n", 228 | "for i in range(500):\n", 229 | " # 指定した速度で関節を動かす\n", 230 | " pybullet.setJointMotorControl2(arm_id, LINK1_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=joint1_velocity)\n", 231 | " pybullet.setJointMotorControl2(arm_id, LINK2_JOINT_IDX, pybullet.VELOCITY_CONTROL, targetVelocity=joint2_velocity)\n", 232 | " pybullet.stepSimulation()\n", 233 | " time.sleep(time_step)\n" 234 | ] 235 | }, 236 | { 237 | "cell_type": "markdown", 238 | "metadata": {}, 239 | "source": [ 240 | "最後に、トルク制御で動作させるため、第三引数に`TORQUE_CONTROL`を指定します。" 241 | ] 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": 6, 246 | "metadata": {}, 247 | "outputs": [], 248 | "source": [ 249 | "joint1_torque = 50.0\n", 250 | "joint2_torque = 50.0\n", 251 | "\n", 252 | "for i in range(500):\n", 253 | " # 指定したトルクで関節を動かす\n", 254 | " pybullet.setJointMotorControl2(arm_id, LINK1_JOINT_IDX, pybullet.TORQUE_CONTROL, force=joint1_torque)\n", 255 | " pybullet.setJointMotorControl2(arm_id, LINK2_JOINT_IDX, pybullet.TORQUE_CONTROL, force=joint2_torque)\n", 256 | " pybullet.stepSimulation()\n", 257 | " time.sleep(time_step)" 258 | ] 259 | } 260 | ], 261 | "metadata": { 262 | "kernelspec": { 263 | "display_name": "Python 3", 264 | "language": "python", 265 | "name": "python3" 266 | }, 267 | "language_info": { 268 | "codemirror_mode": { 269 | "name": "ipython", 270 | "version": 3 271 | }, 272 | "file_extension": ".py", 273 | "mimetype": "text/x-python", 274 | "name": "python", 275 | "nbconvert_exporter": "python", 276 | "pygments_lexer": "ipython3", 277 | "version": "3.10.12" 278 | } 279 | }, 280 | "nbformat": 4, 281 | "nbformat_minor": 2 282 | } 283 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_collision_check_en.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [Robot Arm collision check](#toc1_) \n", 9 | "- [Starting pybullet](#toc2_) \n", 10 | "- [Initial Setup](#toc3_) \n", 11 | "- [Function Definitions](#toc4_) \n", 12 | "- [Running the Simulation](#toc5_) \n", 13 | "\n", 14 | "\n", 21 | "" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "# [Robot Arm collision check](#toc0_)\n", 29 | "\n", 30 | "In this notebook, we will introduce how to perform collision detection between a \"2-axis robot arm\" and an \"object\".\n", 31 | "\n", 32 | "(For a manual summarizing the functions available in pybullet, refer to [this link](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf).)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "# [Starting pybullet](#toc0_)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 1, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stderr", 49 | "output_type": "stream", 50 | "text": [ 51 | "pybullet build time: Nov 28 2023 23:45:17\n" 52 | ] 53 | }, 54 | { 55 | "name": "stdout", 56 | "output_type": "stream", 57 | "text": [ 58 | "startThreads creating 1 threads.\n", 59 | "starting thread 0\n", 60 | "started thread 0 \n", 61 | "argc=2\n", 62 | "argv[0] = --unused\n", 63 | "argv[1] = --start_demo_name=Physics Server\n", 64 | "ExampleBrowserThreadFunc started\n", 65 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "Creating context\n", 68 | "Created GL 3.3 context\n", 69 | "Direct GLX rendering context obtained\n", 70 | "Making context current\n", 71 | "GL_VENDOR=Mesa\n", 72 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 73 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 74 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 75 | "pthread_getconcurrency()=0\n", 76 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 77 | "Vendor = Mesa\n", 78 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 79 | "b3Printf: Selected demo: Physics Server\n", 80 | "startThreads creating 1 threads.\n", 81 | "starting thread 0\n", 82 | "started thread 0 \n", 83 | "MotionThreadFunc thread started\n" 84 | ] 85 | } 86 | ], 87 | "source": [ 88 | "import pybullet\n", 89 | "import pybullet_data\n", 90 | "physics_client = pybullet.connect(pybullet.GUI) " 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "# [Initial Setup](#toc0_)\n", 98 | "We will perform initial setup such as creating the floor, generating box objects, creating the robot, and setting the camera position." 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 2, 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "name": "stdout", 108 | "output_type": "stream", 109 | "text": [ 110 | "ven = Mesa\n", 111 | "ven = Mesa\n", 112 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 113 | "\n", 114 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 115 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 116 | "\n", 117 | "b3Printf: target_position_vertual_link\n" 118 | ] 119 | } 120 | ], 121 | "source": [ 122 | "pybullet.resetSimulation() # Reset the simulation space\n", 123 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # Add paths to necessary data for pybullet\n", 124 | "pybullet.setGravity(0.0, 0.0, -9.8) # Set gravity as on Earth\n", 125 | "time_step = 1./240.\n", 126 | "pybullet.setTimeStep(time_step) # Set the time elapsed per step\n", 127 | "\n", 128 | "# Load the floor\n", 129 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 130 | "\n", 131 | "# Set the camera position and other parameters in GUI mode\n", 132 | "camera_distance = 2.0\n", 133 | "camera_yaw = 0.0 # deg\n", 134 | "camera_pitch = -20 # deg\n", 135 | "camera_target_position = [0.0, 0.0, 0.0]\n", 136 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)\n", 137 | "\n", 138 | "# Load the robot\n", 139 | "arm_start_pos = [0, 0, 0.1] # Set the initial position (x, y, z)\n", 140 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0, 0, 0]) # Set the initial orientation (roll, pitch, yaw)\n", 141 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\", arm_start_pos, arm_start_orientation, useFixedBase=True) # Fix the root link with useFixedBase=True to prevent the robot from falling\n", 142 | "\n", 143 | "# Set the camera position and other parameters in GUI mode\n", 144 | "camera_distance = 1.5\n", 145 | "camera_yaw = 180.0 # deg\n", 146 | "camera_pitch = -10 # deg\n", 147 | "camera_target_position = [0.0, 0.0, 1.0]\n", 148 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)\n", 149 | "\n", 150 | "# Load the box\n", 151 | "## Determine the weight, size, position, and orientation of the box\n", 152 | "mass = 5 # kg\n", 153 | "box_size = [0.3, 0.6, 0.5]\n", 154 | "position = [0.8, 0, 0.5]\n", 155 | "orientation = [1, 0, 0, 0] # Quaternion\n", 156 | "box_collision_id = pybullet.createCollisionShape(pybullet.GEOM_BOX, halfExtents=box_size)\n", 157 | "box_visual_id = pybullet.createVisualShape(pybullet.GEOM_BOX, halfExtents=box_size, rgbaColor=[1,0,0,1]) # Red, semi-transparent\n", 158 | "box_body_id = pybullet.createMultiBody(mass, box_collision_id, box_visual_id, position, orientation)" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "# [Function Definitions](#toc0_)\n", 166 | "We will define the following functions necessary for enabling/disabling collisions between objects.\n", 167 | "- `GetNumLinks`: Get the number of links of the specified object\n", 168 | "- `EnableCollisionBetweenObjects`: Enable/disable collision between two objects\n", 169 | "- `JudgeContact`: Determine whether two objects are in contact" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 3, 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "def get_num_links(objId):\n", 179 | " \"\"\"\n", 180 | " Get the number of links of the specified object.\n", 181 | " Parameters\n", 182 | " ----------\n", 183 | " objId : int\n", 184 | " Index of the object.\n", 185 | " \n", 186 | " Returns\n", 187 | " -------\n", 188 | " linkNum : int\n", 189 | " Number of links of the object.\n", 190 | " \"\"\"\n", 191 | " current_link_idx = 0\n", 192 | " link_num = 0\n", 193 | " while True:\n", 194 | " result = pybullet.getLinkState(objId, current_link_idx)\n", 195 | " if result is None:\n", 196 | " link_num = current_link_idx + 1\n", 197 | " break\n", 198 | " current_link_idx += 1\n", 199 | " return link_num\n", 200 | "\n", 201 | "def enable_collision_between_objects(obj1_id, obj2_id, enable):\n", 202 | " \"\"\"\n", 203 | " Enable/disable collision between the specified objects.\n", 204 | " Parameters\n", 205 | " ----------\n", 206 | " obj1_id : int\n", 207 | " Index of the first object.\n", 208 | " obj2_id : int\n", 209 | " Index of the second object.\n", 210 | " enable : bool\n", 211 | " True to enable collision detection, False to disable it.\n", 212 | "\n", 213 | " Returns\n", 214 | " -------\n", 215 | " None\n", 216 | " \"\"\"\n", 217 | " # Get the number of links of each object\n", 218 | " obj1_link_num = get_num_links(obj1_id)\n", 219 | " obj2_link_num = get_num_links(obj2_id)\n", 220 | "\n", 221 | " # Set collision detection to enable/disable\n", 222 | " for obj1_link_idx in range(-1, obj1_link_num):\n", 223 | " for obj2_link_idx in range(-1, obj2_link_num):\n", 224 | " pybullet.setCollisionFilterPair(obj1_id, obj2_id, obj1_link_idx, obj2_link_idx, enable)\n", 225 | "\n", 226 | "def judge_collision(obj1_id, obj2_id):\n", 227 | " \"\"\"\n", 228 | " Determine whether two objects are in contact.\n", 229 | " Parameters\n", 230 | " ----------\n", 231 | " obj1_id : int\n", 232 | " Index of the first object.\n", 233 | " obj2_id : int\n", 234 | " Index of the second object.\n", 235 | "\n", 236 | " Returns\n", 237 | " -------\n", 238 | " isCollision : bool\n", 239 | " True if the two objects are in contact, False otherwise.\n", 240 | " \"\"\"\n", 241 | " # Get contact information between the two objects\n", 242 | " pts = pybullet.getClosestPoints(obj1_id, obj2_id, distance=100)\n", 243 | "\n", 244 | " # Determine whether the two objects are in contact\n", 245 | " is_collision = False\n", 246 | " for pt in pts:\n", 247 | " distance = pt[8]\n", 248 | " # Determine whether there are points in contact between the two objects (i.e., points with a distance less than 0)\n", 249 | " if distance < 0.0:\n", 250 | " is_collision = True\n", 251 | " break\n", 252 | " \n", 253 | " return is_collision" 254 | ] 255 | }, 256 | { 257 | "cell_type": "markdown", 258 | "metadata": {}, 259 | "source": [ 260 | "# [Running the Simulation](#toc0_)\n", 261 | "Move the robot arm and perform collision detection with the object.\n", 262 | "\n", 263 | "When you run the code below, you can control the robot arm with the mouse in the GUI screen. If the robot arm touches the box, \"collision\" will be displayed on the screen." 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": 4, 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [ 272 | "import time\n", 273 | "\n", 274 | "pybullet.setRealTimeSimulation(1) # Enable real-time simulation (allows objects to be manipulated with the mouse in the GUI screen)\n", 275 | "\n", 276 | "# Disable collision detection between the robot arm and the box\n", 277 | "enable_collision_between_objects(arm_id, box_body_id, False)\n", 278 | "\n", 279 | "# Display whether the robot arm and the box are colliding on the screen\n", 280 | "while (pybullet.isConnected()):\n", 281 | "\n", 282 | " # Determine whether the robot arm and the box are colliding\n", 283 | " is_collision = judge_collision(arm_id, box_body_id)\n", 284 | "\n", 285 | " # If they are colliding, display it on the screen\n", 286 | " if is_collision:\n", 287 | " pybullet.addUserDebugText(\"collision\", [0.5, 0.0, 1.5], textSize=5, lifeTime=0.1, textColorRGB=[1,0,0])\n", 288 | " \n", 289 | " time.sleep(time_step)" 290 | ] 291 | } 292 | ], 293 | "metadata": { 294 | "kernelspec": { 295 | "display_name": "Python 3", 296 | "language": "python", 297 | "name": "python3" 298 | }, 299 | "language_info": { 300 | "codemirror_mode": { 301 | "name": "ipython", 302 | "version": 3 303 | }, 304 | "file_extension": ".py", 305 | "mimetype": "text/x-python", 306 | "name": "python", 307 | "nbconvert_exporter": "python", 308 | "pygments_lexer": "ipython3", 309 | "version": "3.10.12" 310 | } 311 | }, 312 | "nbformat": 4, 313 | "nbformat_minor": 2 314 | } 315 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_collision_check_jp.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [ロボットアーム](#toc1_) \n", 9 | "- [pybulletの起動](#toc2_) \n", 10 | "- [初期設定](#toc3_) \n", 11 | "- [関数の定義](#toc4_) \n", 12 | "- [シミュレーションの実行](#toc5_) \n", 13 | "\n", 14 | "\n", 21 | "" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "# [ロボットアーム](#toc0_)\n", 29 | "\n", 30 | "本notebookでは「2軸のロボットアーム」と「オブジェクト」間の衝突判定を行う方法を紹介します。\n", 31 | "\n", 32 | "(pybulletで使用可能な関数がまとめられたマニュアルについては[こちら](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf)を参照してください。)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "# [pybulletの起動](#toc0_)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 1, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stderr", 49 | "output_type": "stream", 50 | "text": [ 51 | "pybullet build time: Nov 28 2023 23:45:17\n" 52 | ] 53 | }, 54 | { 55 | "name": "stdout", 56 | "output_type": "stream", 57 | "text": [ 58 | "startThreads creating 1 threads.\n", 59 | "starting thread 0\n", 60 | "started thread 0 \n", 61 | "argc=2\n", 62 | "argv[0] = --unused\n", 63 | "argv[1] = --start_demo_name=Physics Server\n", 64 | "ExampleBrowserThreadFunc started\n", 65 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 66 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 67 | "Creating context\n", 68 | "Created GL 3.3 context\n", 69 | "Direct GLX rendering context obtained\n", 70 | "Making context current\n", 71 | "GL_VENDOR=Mesa\n", 72 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 73 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 74 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 75 | "pthread_getconcurrency()=0\n", 76 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 77 | "Vendor = Mesa\n", 78 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 79 | "b3Printf: Selected demo: Physics Server\n", 80 | "startThreads creating 1 threads.\n", 81 | "starting thread 0\n", 82 | "started thread 0 \n", 83 | "MotionThreadFunc thread started\n" 84 | ] 85 | } 86 | ], 87 | "source": [ 88 | "import pybullet\n", 89 | "import pybullet_data\n", 90 | "physics_client = pybullet.connect(pybullet.GUI) " 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "# [初期設定](#toc0_)\n", 98 | "床の生成、ボックスオブジェクトの生成、ロボットの生成、カメラ位置の設定などの初期設定を行います。" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 2, 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "name": "stdout", 108 | "output_type": "stream", 109 | "text": [ 110 | "ven = Mesa\n", 111 | "ven = Mesa\n", 112 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 113 | "\n", 114 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 115 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 116 | "\n", 117 | "b3Printf: target_position_vertual_link\n" 118 | ] 119 | } 120 | ], 121 | "source": [ 122 | "pybullet.resetSimulation() # シミュレーション空間をリセット\n", 123 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # pybulletに必要なデータへのパスを追加\n", 124 | "pybullet.setGravity(0.0, 0.0, -9.8) # 地球上における重力に設定\n", 125 | "time_step = 1./240.\n", 126 | "pybullet.setTimeStep(time_step) # 1stepあたりに経過する時間の設定\n", 127 | "\n", 128 | "#床の読み込み\n", 129 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 130 | "\n", 131 | "# GUIモードの際のカメラの位置などを設定\n", 132 | "camera_distance = 2.0\n", 133 | "camera_yaw = 0.0 # deg\n", 134 | "camera_pitch = -20 # deg\n", 135 | "camera_target_position = [0.0, 0.0, 0.0]\n", 136 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)\n", 137 | "\n", 138 | "# ロボットの読み込み\n", 139 | "arm_start_pos = [0, 0, 0.1] # 初期位置(x,y,z)を設定\n", 140 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0,0,0]) # 初期姿勢(roll, pitch, yaw)を設定\n", 141 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\",arm_start_pos, arm_start_orientation, useFixedBase=True) # ロボットが倒れないように、useFixedBase=Trueでルートのリンクを固定\n", 142 | "\n", 143 | "# GUIモードの際のカメラの位置などを設定\n", 144 | "camera_distance = 1.5\n", 145 | "camera_yaw = 180.0 # deg\n", 146 | "camera_pitch = -10 # deg\n", 147 | "camera_target_position = [0.0, 0.0, 1.0]\n", 148 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)\n", 149 | "\n", 150 | "# ボックスの読み込み\n", 151 | "## ボックスの重さ、サイズ、位置·姿勢を決める\n", 152 | "mass = 5 # kg\n", 153 | "box_size = [0.3, 0.6, 0.5]\n", 154 | "position = [0.8, 0, 0.5]\n", 155 | "orientation = [1, 0, 0, 0] # 四元数\n", 156 | "box_collision_id = pybullet.createCollisionShape(pybullet.GEOM_BOX, halfExtents=box_size, physicsClientId=physics_client)\n", 157 | "box_visual_id = pybullet.createVisualShape(pybullet.GEOM_BOX, halfExtents=box_size, physicsClientId=physics_client, rgbaColor=[1,0,0,1]) # 赤・半透明\n", 158 | "box_body_id = pybullet.createMultiBody(mass, box_collision_id, box_visual_id, position, orientation, physicsClientId=physics_client)" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "# [関数の定義](#toc0_)\n", 166 | "物体間の衝突の有効化/無効化に必要な、以下の関数を定義します。\n", 167 | "- `GetNumLinks`:指定したオブジェクトのリンク数を取得\n", 168 | "- `EnableCollisionBetweenObjects`:2つのオブジェクト間の衝突を有効/無効化する\n", 169 | "- `JudgeContact`:2つのオブジェクトが接触しているかどうかを判定" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 3, 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "def get_num_links(objId):\n", 179 | " \"\"\"\n", 180 | " 指定されたオブジェクトのリンク数を取得する。\n", 181 | " Parameters\n", 182 | " ----------\n", 183 | " objId : int\n", 184 | " オブジェクトのインデックス。\n", 185 | " \n", 186 | " Returns\n", 187 | " -------\n", 188 | " linkNum : int\n", 189 | " オブジェクトのリンク数\n", 190 | " \"\"\"\n", 191 | " current_link_idx = 0\n", 192 | " link_num = 0\n", 193 | " while True:\n", 194 | " result = pybullet.getLinkState(objId, current_link_idx)\n", 195 | " if result is None:\n", 196 | " link_num = current_link_idx + 1\n", 197 | " break\n", 198 | " current_link_idx += 1\n", 199 | " return link_num\n", 200 | "\n", 201 | "def enable_collision_between_objects(obj1_id, obj2_id, enable):\n", 202 | " \"\"\"\n", 203 | " 指定されたオブジェクト間の衝突を有効/無効にする。\n", 204 | " Parameters\n", 205 | " ----------\n", 206 | " obj1_id : int\n", 207 | " 1つ目のオブジェクトのインデックス。\n", 208 | " obj2_id : int\n", 209 | " 2つ目のオブジェクトのインデックス。\n", 210 | " enable : bool\n", 211 | " 衝突判定を有効にする場合はTrue、無効にする場合はFalse。\n", 212 | "\n", 213 | " Returns\n", 214 | " -------\n", 215 | " None\n", 216 | " \"\"\"\n", 217 | " # 各オブジェクトのリンク数を取得\n", 218 | " obj1_link_num = get_num_links(obj1_id)\n", 219 | " obj2_link_num = get_num_links(obj2_id)\n", 220 | "\n", 221 | " # 衝突判定の有効/無効を設定\n", 222 | " for obj1_link_idx in range(-1, obj1_link_num):\n", 223 | " for obj2_link_idx in range(-1, obj2_link_num):\n", 224 | " pybullet.setCollisionFilterPair(obj1_id, obj2_id, obj1_link_idx, obj2_link_idx, enable)\n", 225 | "\n", 226 | "def judge_collision(obj1_id, obj2_id):\n", 227 | " \"\"\"\n", 228 | " 2つのオブジェクトが接触しているかどうかを判定する。\n", 229 | " Parameters\n", 230 | " ----------\n", 231 | " obj1_id : int\n", 232 | " 1つ目のオブジェクトのインデックス。\n", 233 | " obj2_id : int\n", 234 | " 2つ目のオブジェクトのインデックス。\n", 235 | "\n", 236 | " Returns\n", 237 | " -------\n", 238 | " isCollision : bool\n", 239 | " 2つのオブジェクトが接触している場合はTrue、接触していない場合はFalse。\n", 240 | " \"\"\"\n", 241 | " # 2つのオブジェクト間の接触情報を取得\n", 242 | " pts = pybullet.getClosestPoints(obj1_id, obj2_id, distance=100)\n", 243 | "\n", 244 | " # 2つのオブジェクトが接触しているかどうかを判定\n", 245 | " is_collision = False\n", 246 | " for pt in pts:\n", 247 | " distance = pt[8]\n", 248 | " # 2つのオブジェクト間で接触している点(=距離が0未満の点)があるかを判定\n", 249 | " if distance < 0.0:\n", 250 | " is_collision = True\n", 251 | " break\n", 252 | "\t\n", 253 | " return is_collision" 254 | ] 255 | }, 256 | { 257 | "cell_type": "markdown", 258 | "metadata": {}, 259 | "source": [ 260 | "# [シミュレーションの実行](#toc0_)\n", 261 | "ロボットアームを動かし、オブジェクトとの衝突判定を行います。\n", 262 | "\n", 263 | "下記コードを実行すると、GUI画面上のロボットアームをマウスで操作することができ、ロボットアームをボックスに触れさせると、画面上に「collision」と表示されます。" 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": 4, 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [ 272 | "import time\n", 273 | "\n", 274 | "pybullet.setRealTimeSimulation(1) # リアルタイムシミュレーションを有効化(GUI画面上で、オブジェクトをマウスで操作することが可能になる)\n", 275 | "\n", 276 | "# ロボットアームとボックス間の衝突判定を無効化\n", 277 | "enable_collision_between_objects(arm_id, box_body_id, False)\n", 278 | "\n", 279 | "# ロボットアームとボックスが衝突しているかどうかを画面上に表示\n", 280 | "while (pybullet.isConnected()):\n", 281 | "\n", 282 | "\t# ロボットアームとボックスが衝突しているかどうかを判定\n", 283 | "\tis_collision = judge_collision(arm_id, box_body_id)\n", 284 | "\n", 285 | "\t# 衝突している場合は画面上に表示\n", 286 | "\tif is_collision:\n", 287 | "\t\tpybullet.addUserDebugText(\"collision\", [0.5, 0.0, 1.5], textSize=5, lifeTime=0.1, textColorRGB=[1,0,0])\n", 288 | "\t\n", 289 | "\ttime.sleep(time_step)" 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": null, 295 | "metadata": {}, 296 | "outputs": [], 297 | "source": [] 298 | } 299 | ], 300 | "metadata": { 301 | "kernelspec": { 302 | "display_name": "Python 3", 303 | "language": "python", 304 | "name": "python3" 305 | }, 306 | "language_info": { 307 | "codemirror_mode": { 308 | "name": "ipython", 309 | "version": 3 310 | }, 311 | "file_extension": ".py", 312 | "mimetype": "text/x-python", 313 | "name": "python", 314 | "nbconvert_exporter": "python", 315 | "pygments_lexer": "ipython3", 316 | "version": "3.10.12" 317 | } 318 | }, 319 | "nbformat": 4, 320 | "nbformat_minor": 2 321 | } 322 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_homogeneous_matrix_forward_kinematics_jp.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [同次変換行列を用いた順運動学](#toc1_) \n", 9 | " - [pybulletの起動](#toc1_1_1_) \n", 10 | " - [pybulletの初期設定](#toc1_1_2_) \n", 11 | " - [ロボットアームの生成](#toc1_1_3_) \n", 12 | " - [2次元平面における同次変換の定義](#toc1_1_4_) \n", 13 | " - [同次変換行列による順運動学の実行](#toc1_1_5_) \n", 14 | "\n", 15 | "\n", 22 | "" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "# [同次変換行列を用いた順運動学](#toc0_)\n", 30 | "\n", 31 | "本notebookでは、2軸ロボットアームを用いて「同次変換行列を用いた順運動学」をPybulletで実装する手順について解説します。\n", 32 | "\n", 33 | "(pybulletで使用可能な関数がまとめられたマニュアルについては[こちら](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf)を参照してください。)\n", 34 | "\n", 35 | "
\n", 36 | "\n", 37 | "同次変換行列による順運動学では、下動画のように\n", 38 | "- 各リンクに座標系を張り付け、\n", 39 | "- 「リンク1座標系$\\Sigma_{1}$」→「リンク2座標系$\\Sigma_{2}$」→ ... →「エンドエフェクタ座標系$\\Sigma_{\\mathrm{e}}$」と順番に座標変換を繰り返すことで\n", 40 | "- 「リンク1座標系$\\Sigma_{1}$」から見た時の「エンドエフェクタ座標系$\\Sigma_{\\mathrm{e}}$」の位置·姿勢を求める\n", 41 | "\n", 42 | "ことができます。\n", 43 | "\n", 44 | "![](../images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/overview.gif)" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "### [pybulletの起動](#toc0_)" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 1, 57 | "metadata": {}, 58 | "outputs": [ 59 | { 60 | "name": "stderr", 61 | "output_type": "stream", 62 | "text": [ 63 | "pybullet build time: Nov 28 2023 23:45:17\n" 64 | ] 65 | }, 66 | { 67 | "name": "stdout", 68 | "output_type": "stream", 69 | "text": [ 70 | "startThreads creating 1 threads.\n", 71 | "starting thread 0\n", 72 | "started thread 0 \n", 73 | "argc=2\n", 74 | "argv[0] = --unused\n", 75 | "argv[1] = --start_demo_name=Physics Server\n", 76 | "ExampleBrowserThreadFunc started\n", 77 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 78 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 79 | "Creating context\n", 80 | "Created GL 3.3 context\n", 81 | "Direct GLX rendering context obtained\n", 82 | "Making context current\n", 83 | "GL_VENDOR=Mesa\n", 84 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 85 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 86 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 87 | "pthread_getconcurrency()=0\n", 88 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 89 | "Vendor = Mesa\n", 90 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 91 | "b3Printf: Selected demo: Physics Server\n", 92 | "startThreads creating 1 threads.\n", 93 | "starting thread 0\n", 94 | "started thread 0 \n", 95 | "MotionThreadFunc thread started\n" 96 | ] 97 | } 98 | ], 99 | "source": [ 100 | "import pybullet\n", 101 | "import pybullet_data\n", 102 | "physics_client = pybullet.connect(pybullet.GUI) " 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "metadata": {}, 108 | "source": [ 109 | "### [pybulletの初期設定](#toc0_)" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 2, 115 | "metadata": {}, 116 | "outputs": [ 117 | { 118 | "name": "stdout", 119 | "output_type": "stream", 120 | "text": [ 121 | "ven = Mesa\n", 122 | "ven = Mesa\n" 123 | ] 124 | } 125 | ], 126 | "source": [ 127 | "pybullet.resetSimulation() # シミュレーション空間をリセット\n", 128 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # pybulletに必要なデータへのパスを追加\n", 129 | "pybullet.setGravity(0.0, 0.0, -9.8) # 地球上における重力に設定\n", 130 | "time_step = 1./240.\n", 131 | "pybullet.setTimeStep(time_step) # 1stepあたりに経過する時間の設定\n", 132 | "\n", 133 | "#床の読み込み\n", 134 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 135 | "\n", 136 | "# GUIモードの際のカメラの位置などを設定\n", 137 | "camera_distance = 2.0\n", 138 | "camera_yaw = 0.0 # deg\n", 139 | "camera_pitch = -20 # deg\n", 140 | "camera_target_position = [0.0, 0.0, 0.0]\n", 141 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 142 | ] 143 | }, 144 | { 145 | "cell_type": "markdown", 146 | "metadata": {}, 147 | "source": [ 148 | "### [ロボットアームの生成](#toc0_)\n", 149 | "今回は、2軸ロボットアーム`simple_2d_arm.urdf`を生成します。 \n", 150 | "ロボットは下図のような構成になっています(センサーについては、`robot_arm_sensor.ipynb`にて解説しています。) \n", 151 | "\n", 152 | "![](../images/RobotArm/2d_robot_arm.png)" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 3, 158 | "metadata": {}, 159 | "outputs": [ 160 | { 161 | "name": "stdout", 162 | "output_type": "stream", 163 | "text": [ 164 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 165 | "\n", 166 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 167 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 168 | "\n", 169 | "b3Printf: target_position_vertual_link\n" 170 | ] 171 | } 172 | ], 173 | "source": [ 174 | "# ロボットの読み込み\n", 175 | "arm_start_pos = [0, 0, 0.1] # 初期位置(x,y,z)を設定\n", 176 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0,0,0]) # 初期姿勢(roll, pitch, yaw)を設定\n", 177 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\",arm_start_pos, arm_start_orientation, useFixedBase=True) # ロボットが倒れないように、useFixedBase=Trueでルートのリンクを固定\n", 178 | "\n", 179 | "# GUIモードの際のカメラの位置などを設定\n", 180 | "camera_distance = 1.5\n", 181 | "camera_yaw = 180.0 # deg\n", 182 | "camera_pitch = -10 # deg\n", 183 | "camera_target_position = [0.0, 0.0, 1.0]\n", 184 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 185 | ] 186 | }, 187 | { 188 | "cell_type": "markdown", 189 | "metadata": {}, 190 | "source": [ 191 | "### [2次元平面における同次変換行列の定義](#toc0_)\n", 192 | "\n", 193 | "\n", 194 | "2次元平面におけるロボットアームでは、一般に\n", 195 | "- リンク座標系はリンクの長さ方向に$x$軸をとります。\n", 196 | "![](../images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_cordinate.png)\n", 197 | "\n", 198 | "
\n", 199 | "\n", 200 | "そのため、2次元平面におけるロボットアームの場合\n", 201 | "1. 原点周りに $\\theta$ 回転(回転行列)\n", 202 | "2. $x$ 軸方向(リンクの長さ方向)に 「リンク長$l$」 だけ平行移動する(平行移動行列)\n", 203 | "\n", 204 | "といった手順で座標変換することができます。\n", 205 | "![](../images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_homogeneous_matrix_forward_kinematics.gif)" 206 | ] 207 | }, 208 | { 209 | "cell_type": "markdown", 210 | "metadata": {}, 211 | "source": [ 212 | "下記コードでは、2次元平面における「回転行列`Hz`」と「平行移動行列`Hp`」を定義しています。 " 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": 4, 218 | "metadata": {}, 219 | "outputs": [], 220 | "source": [ 221 | "import numpy as np\n", 222 | "\n", 223 | "def Hz(theta):\n", 224 | " \"\"\"\n", 225 | " 2次元平面における回転行列を求める(2次元平面における回転はz軸周り)\n", 226 | "\n", 227 | " Parameters\n", 228 | " ----------\n", 229 | " theta : float\n", 230 | " 回転角度[rad]\n", 231 | "\n", 232 | " Returns\n", 233 | " -------\n", 234 | " numpy.ndarray\n", 235 | " 回転行列\n", 236 | " \"\"\"\n", 237 | " return np.array([[np.cos(theta), -np.sin(theta), 0], \n", 238 | " [np.sin(theta), np.cos(theta), 0], \n", 239 | " [0, 0, 1]])\n", 240 | "\n", 241 | "def Hp(x, y):\n", 242 | " \"\"\"\n", 243 | " 2次元平面における平行移動行列を求める\n", 244 | "\n", 245 | " Parameters\n", 246 | " ----------\n", 247 | " x : float\n", 248 | " x方向の平行移動量\n", 249 | " y : float\n", 250 | " y方向の平行移動量\n", 251 | "\n", 252 | " Returns\n", 253 | " -------\n", 254 | " numpy.ndarray\n", 255 | " 平行移動行列\n", 256 | " \"\"\"\n", 257 | " return np.array([[1, 0, x], \n", 258 | " [0, 1, y], \n", 259 | " [0, 0, 1]])" 260 | ] 261 | }, 262 | { 263 | "cell_type": "markdown", 264 | "metadata": {}, 265 | "source": [ 266 | "また、「回転行列`Hz`」と「平行移動行列`Hp`」を組み合わせることで、上記の「原点周りの回転」「平行移動」を同時に行う行列「同次変換行列`H`」を定義することができます。\n", 267 | "\n", 268 | "\n", 269 | "**【pythonコード例】**\n", 270 | "```python\n", 271 | "H = Hz(theta) @ Hp(LINK_LENGTH, 0) # 回転行列と平行移動行列を組み合わせて、同次変換行列を作成\n", 272 | "```\n" 273 | ] 274 | }, 275 | { 276 | "cell_type": "markdown", 277 | "metadata": {}, 278 | "source": [ 279 | "
\n", 280 | "\n", 281 | "なお、同次変換行列は「チェーンルール」により\n", 282 | "- **リンク1座標系** → リンク2座標系 への同次変換行列`H12`\n", 283 | "- リンク2座標系 → **エンドエフェクタ座標系** への同次変換行列`H2e`\n", 284 | " \n", 285 | "を「**リンク1座標系** → **エンドエフェクタ座標系** への同次変換行列`H1e`」として、一つの同次変換行列にまとめることができます。\n", 286 | "\n", 287 | "\n", 288 | "**【pythonコード例】**\n", 289 | "\n", 290 | "```python\n", 291 | "H12 = Hz(theta1) @ Hp(LINK1_LENGTH, 0) # リンク1座標系 → リンク2座標系 への同次変換行列\n", 292 | "H2e = Hz(theta2) @ Hp(LINK2_LENGTH, 0) # リンク2座標系 → エンドエフェクタ座標系 への同次変換行列\n", 293 | "H1e = H12 @ H2e # リンク1座標系 → エンドエフェクタ座標系 への同次変換行列\n", 294 | "```\n", 295 | "\n", 296 | "![](../images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/direct_transformation.gif)\n" 297 | ] 298 | }, 299 | { 300 | "cell_type": "markdown", 301 | "metadata": {}, 302 | "source": [ 303 | "※ 軸数が増えていく場合も同様に\n", 304 | "- 「**リンク1座標系** → リンク2座標系 への同次変換行列`H12`」\n", 305 | "- 「リンク2座標系 → リンク3座標系 への同次変換行列`H23`」\n", 306 | "- ...\n", 307 | "- 「リンクn座標系 → **エンドエフェクタ座標系** への同次変換行列`Hne`」\n", 308 | "\n", 309 | "を\n", 310 | "\n", 311 | "「**リンク1座標系** → **エンドエフェクタ座標系** への同次変換行列`H1e`」として、一つの同次変換行列にまとめることができます。" 312 | ] 313 | }, 314 | { 315 | "cell_type": "markdown", 316 | "metadata": {}, 317 | "source": [ 318 | "
\n", 319 | "\n", 320 | "### [同次変換行列による順運動学の実行](#toc0_)\n", 321 | "\n", 322 | "以下コードを実行すると、「2軸ロボットアームの同次変換行列による順運動学」が計算され、pybullet上に結果(エンドエフェクタの位置)が表示されます。\n", 323 | "\n", 324 | "```python\n", 325 | "※ 画面上に表示される「エンドエフェクタの位置」は「ロボットアームの根本(=link1座標系)を原点とした座標位置」であることに注意してください(pybulletのワールド座標系を原点とした位置ではありません)。\n", 326 | "\n" 327 | ] 328 | }, 329 | { 330 | "cell_type": "code", 331 | "execution_count": 11, 332 | "metadata": {}, 333 | "outputs": [ 334 | { 335 | "name": "stdout", 336 | "output_type": "stream", 337 | "text": [ 338 | "(xe, ye)= 0.15449350228318842 0.9746569660489338\n" 339 | ] 340 | }, 341 | { 342 | "data": { 343 | "text/plain": [ 344 | "()" 345 | ] 346 | }, 347 | "execution_count": 11, 348 | "metadata": {}, 349 | "output_type": "execute_result" 350 | } 351 | ], 352 | "source": [ 353 | "LINK1_LENGTH = 0.5 # link1の長さ(「simple2d_arm.urdf」のlink1のz方向の長さ)\n", 354 | "LINK2_LENGTH = 0.55 # link2の長さ(「simple2d_arm.urdf」のlink2+force_sensor_linkの長さ)\n", 355 | "\n", 356 | "# ここの値を変えると、結果が変わります##########\n", 357 | "# 各リンクの回転角度を定義\n", 358 | "link1_angle_deg = 60\n", 359 | "link2_angle_deg = 40\n", 360 | "############################################\n", 361 | "\n", 362 | "link1_angle_rad = np.deg2rad(link1_angle_deg)\n", 363 | "link2_angle_rad = np.deg2rad(link2_angle_deg)\n", 364 | "\n", 365 | "# リンク1座標系 -> リンク2座標系への変換行列\n", 366 | "H12 = Hz(link1_angle_rad) @ Hp(LINK1_LENGTH, 0) # T12:link「1」座標系 -> link「2」座標系\n", 367 | "# リンク2座標系 -> エンドエフェクタ座標系への変換行列\n", 368 | "H2e = Hz(link2_angle_rad) @ Hp(LINK2_LENGTH, 0) # T2e:link「2」座標系 ->「e」nd effector座標系\n", 369 | "\n", 370 | "# 「link1座標系 -> エンドエフェクタ座標系に変換する同時変換行列H12e」をあらかじめ定義\n", 371 | "H1e = H12@H2e # T1e:link「1」座標系 ->「e」nd effector座標系\n", 372 | "\n", 373 | "x1, y1 = 0, 0 # link1座標系の原点\n", 374 | "# H12eを使って、「link1座標系の原点」から見た時の、end effector座標系の位置を求める\n", 375 | "oe = H1e@np.array([x1, y1, 1])\n", 376 | "xe, ye = oe[0], oe[1]\n", 377 | "print(\"(xe, ye)=\", xe, ye)\n", 378 | "\n", 379 | "# 画面上に結果を表示\n", 380 | "text_position = [0.5, 0.0, 2.0]\n", 381 | "life_time = 10.0 # 表示期間(秒)\n", 382 | "pybullet.addUserDebugText(f\"x, y = ({xe:.2f}, {ye:.2f})\", text_position, textSize=2, lifeTime=life_time)\n", 383 | "\n", 384 | "# 実際に関節を動かして、順運動学の結果と等しいかを確認\n", 385 | "pybullet.resetJointState(arm_id, 0, link1_angle_rad )\n", 386 | "pybullet.resetJointState(arm_id, 1, link2_angle_rad )\n", 387 | "pybullet.stepSimulation()" 388 | ] 389 | } 390 | ], 391 | "metadata": { 392 | "kernelspec": { 393 | "display_name": "Python 3", 394 | "language": "python", 395 | "name": "python3" 396 | }, 397 | "language_info": { 398 | "codemirror_mode": { 399 | "name": "ipython", 400 | "version": 3 401 | }, 402 | "file_extension": ".py", 403 | "mimetype": "text/x-python", 404 | "name": "python", 405 | "nbconvert_exporter": "python", 406 | "pygments_lexer": "ipython3", 407 | "version": "3.10.12" 408 | } 409 | }, 410 | "nbformat": 4, 411 | "nbformat_minor": 2 412 | } 413 | -------------------------------------------------------------------------------- /RobotArm/robot_arm_trigonometric_forward_kinematics_en.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "**Table of contents** \n", 8 | "- [Forward Kinematics Using Trigonometric Functions](#toc1_) \n", 9 | "- [Forward Kinematics Using Trigonometric Functions for a \"2-Axis Rotational Joint Robot Arm\"](#toc2_) \n", 10 | " - [Starting pybullet](#toc2_1_1_) \n", 11 | " - [Initial Setup for pybullet](#toc2_1_2_) \n", 12 | " - [Generating the Robot Arm](#toc2_1_3_) \n", 13 | " - [Defining the Function for Forward Kinematics Using Trigonometric Functions](#toc2_1_4_) \n", 14 | " - [Executing Forward Kinematics Using Trigonometric Functions](#toc2_1_5_) \n", 15 | "\n", 16 | "\n", 23 | "" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "# [Forward Kinematics Using Trigonometric Functions](#toc0_)\n", 31 | "\n", 32 | "In this notebook, we will explain the procedure to implement \"forward kinematics using trigonometric functions\" with a 2-axis robot arm in Pybullet.\n", 33 | "\n", 34 | "(For a manual summarizing the functions available in pybullet, refer to [this link](https://github.com/bulletphysics/bullet3/blob/master/docs/pybullet_quickstartguide.pdf).)\n" 35 | ] 36 | }, 37 | { 38 | "cell_type": "markdown", 39 | "metadata": {}, 40 | "source": [ 41 | "# [Forward Kinematics Using Trigonometric Functions for a \"2-Axis Rotational Joint Robot Arm\"](#toc0_)\n", 42 | "\n", 43 | "From here, we will explain the theory and implementation of forward kinematics using trigonometric functions for a \"2-axis rotational joint robot arm.\"" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "### [Starting pybullet](#toc0_)" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 1, 56 | "metadata": {}, 57 | "outputs": [ 58 | { 59 | "name": "stderr", 60 | "output_type": "stream", 61 | "text": [ 62 | "pybullet build time: Nov 28 2023 23:45:17\n" 63 | ] 64 | }, 65 | { 66 | "name": "stdout", 67 | "output_type": "stream", 68 | "text": [ 69 | "startThreads creating 1 threads.\n", 70 | "starting thread 0\n", 71 | "started thread 0 \n", 72 | "argc=2\n", 73 | "argv[0] = --unused\n", 74 | "argv[1] = --start_demo_name=Physics Server\n", 75 | "ExampleBrowserThreadFunc started\n", 76 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 77 | "X11 functions dynamically loaded using dlopen/dlsym OK!\n", 78 | "Creating context\n", 79 | "Created GL 3.3 context\n", 80 | "Direct GLX rendering context obtained\n", 81 | "Making context current\n", 82 | "GL_VENDOR=Mesa\n", 83 | "GL_RENDERER=llvmpipe (LLVM 15.0.7, 256 bits)\n", 84 | "GL_VERSION=4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 85 | "GL_SHADING_LANGUAGE_VERSION=4.50\n", 86 | "pthread_getconcurrency()=0\n", 87 | "Version = 4.5 (Core Profile) Mesa 23.2.1-1ubuntu3.1~22.04.2\n", 88 | "Vendor = Mesa\n", 89 | "Renderer = llvmpipe (LLVM 15.0.7, 256 bits)\n", 90 | "b3Printf: Selected demo: Physics Server\n", 91 | "startThreads creating 1 threads.\n", 92 | "starting thread 0\n", 93 | "started thread 0 \n", 94 | "MotionThreadFunc thread started\n" 95 | ] 96 | } 97 | ], 98 | "source": [ 99 | "import pybullet\n", 100 | "import pybullet_data\n", 101 | "physics_client = pybullet.connect(pybullet.GUI) " 102 | ] 103 | }, 104 | { 105 | "cell_type": "markdown", 106 | "metadata": {}, 107 | "source": [ 108 | "### [Initial Setup for pybullet](#toc0_)" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 2, 114 | "metadata": {}, 115 | "outputs": [ 116 | { 117 | "name": "stdout", 118 | "output_type": "stream", 119 | "text": [ 120 | "ven = Mesa\n", 121 | "ven = Mesa\n" 122 | ] 123 | } 124 | ], 125 | "source": [ 126 | "pybullet.resetSimulation() # Reset the simulation space\n", 127 | "pybullet.setAdditionalSearchPath(pybullet_data.getDataPath()) # Add paths to necessary data for pybullet\n", 128 | "pybullet.setGravity(0.0, 0.0, -9.8) # Set gravity as on Earth\n", 129 | "time_step = 1./240.\n", 130 | "pybullet.setTimeStep(time_step) # Set the elapsed time per step\n", 131 | "\n", 132 | "# Load the floor\n", 133 | "plane_id = pybullet.loadURDF(\"plane.urdf\")\n", 134 | "\n", 135 | "# Set the camera position and other parameters in GUI mode\n", 136 | "camera_distance = 2.0\n", 137 | "camera_yaw = 0.0 # deg\n", 138 | "camera_pitch = -20 # deg\n", 139 | "camera_target_position = [0.0, 0.0, 0.0]\n", 140 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "### [Generating the Robot Arm](#toc0_)\n", 148 | "This time, we will generate a 2-axis robot arm `simple_2d_arm.urdf`. \n", 149 | "The robot is structured as shown in the diagram below (sensors are explained in `robot_arm_sensor.ipynb`). \n", 150 | "\n", 151 | "![](../images/RobotArm/2d_robot_arm.png)" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": 3, 157 | "metadata": {}, 158 | "outputs": [ 159 | { 160 | "name": "stdout", 161 | "output_type": "stream", 162 | "text": [ 163 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 164 | "\n", 165 | "b3Printf: No inertial data for link, using mass=1, localinertiadiagonal = 1,1,1, identity local inertial frame\n", 166 | "b3Printf: b3Warning[examples/Importers/ImportURDFDemo/BulletUrdfImporter.cpp,126]:\n", 167 | "\n", 168 | "b3Printf: target_position_vertual_link\n" 169 | ] 170 | } 171 | ], 172 | "source": [ 173 | "# Load the robot\n", 174 | "arm_start_pos = [0, 0, 0.1] # Set the initial position (x, y, z)\n", 175 | "arm_start_orientation = pybullet.getQuaternionFromEuler([0, 0, 0]) # Set the initial orientation (roll, pitch, yaw)\n", 176 | "arm_id = pybullet.loadURDF(\"../urdf/simple2d_arm.urdf\", arm_start_pos, arm_start_orientation, useFixedBase=True) # Fix the root link with useFixedBase=True to prevent the robot from falling\n", 177 | "\n", 178 | "# Set the camera position and other parameters in GUI mode\n", 179 | "camera_distance = 1.5\n", 180 | "camera_yaw = 180.0 # deg\n", 181 | "camera_pitch = -10 # deg\n", 182 | "camera_target_position = [0.0, 0.0, 1.0]\n", 183 | "pybullet.resetDebugVisualizerCamera(camera_distance, camera_yaw, camera_pitch, camera_target_position)" 184 | ] 185 | }, 186 | { 187 | "cell_type": "markdown", 188 | "metadata": {}, 189 | "source": [ 190 | "### [Defining the Function for Forward Kinematics Using Trigonometric Functions](#toc0_)\n", 191 | "We will define the function for \"forward kinematics using trigonometric functions.\"\n" 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": 4, 197 | "metadata": {}, 198 | "outputs": [], 199 | "source": [ 200 | "import numpy as np\n", 201 | "\n", 202 | "def forward_kinematics2d(link_length, theta):\n", 203 | " \"\"\"\n", 204 | " Calculate forward kinematics (using trigonometric functions) in a 2D plane\n", 205 | " Parameters\n", 206 | " ----------\n", 207 | " link_length : float\n", 208 | " Length of the link\n", 209 | " theta : float\n", 210 | " Rotation angle (rad)\n", 211 | "\n", 212 | " Returns\n", 213 | " -------\n", 214 | " x: x-coordinate of the link's end\n", 215 | " y: y-coordinate of the link's end\n", 216 | " \"\"\"\n", 217 | " x = link_length * np.cos(theta)\n", 218 | " y = link_length * np.sin(theta)\n", 219 | " return x, y" 220 | ] 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "metadata": {}, 225 | "source": [ 226 | "### [Executing Forward Kinematics Using Trigonometric Functions](#toc0_)\n", 227 | "\n", 228 | "Next, by running the code below, forward kinematics will be calculated using the angles set in `link1_angle_deg` and `link2_angle_deg`, and the calculated \"end-effector position\" will be displayed on the screen. (Note that the origin is based on the root of the robot arm (i.e., the link1 coordinate system).)\n", 229 | "\n", 230 | "Additionally, the angles of each joint will be set to `link1_angle_deg` and `link2_angle_deg`, so please verify that the result of the forward kinematics matches the actual position of the end-effector." 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": 5, 236 | "metadata": {}, 237 | "outputs": [ 238 | { 239 | "name": "stdout", 240 | "output_type": "stream", 241 | "text": [ 242 | "x, y = (0.15801270189221944, 0.7263139720814413)\n" 243 | ] 244 | }, 245 | { 246 | "data": { 247 | "text/plain": [ 248 | "()" 249 | ] 250 | }, 251 | "execution_count": 5, 252 | "metadata": {}, 253 | "output_type": "execute_result" 254 | }, 255 | { 256 | "ename": "", 257 | "evalue": "", 258 | "output_type": "error", 259 | "traceback": [ 260 | "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", 261 | "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", 262 | "\u001b[1;31mClick here for more info. \n", 263 | "\u001b[1;31mView Jupyter log for further details." 264 | ] 265 | } 266 | ], 267 | "source": [ 268 | "import math\n", 269 | "\n", 270 | "LINK1_LENGTH = 0.5 # Length of link1 (length of link1 in \"simple2d_arm.urdf\")\n", 271 | "LINK2_LENGTH = 0.55 # Length of link2 (length of link2 + force_sensor_link in \"simple2d_arm.urdf\")\n", 272 | "\n", 273 | "# Change these values to see different results ##########\n", 274 | "link1_angle_deg = 30\n", 275 | "link2_angle_deg = 90\n", 276 | "###############################################\n", 277 | "\n", 278 | "link1_angle_rad = math.radians(link1_angle_deg)\n", 279 | "link2_angle_rad = math.radians(link2_angle_deg)\n", 280 | "\n", 281 | "# Calculate the initial end-effector position P_current using forward kinematics with trigonometric functions\n", 282 | "# Position of the end of link1 when viewed from the root of link1\n", 283 | "x1_to_2, y1_to_2 = forward_kinematics2d(LINK1_LENGTH, link1_angle_rad)\n", 284 | "\n", 285 | "# Position of the end of link2 (i.e., the end-effector position) when viewed from the root of link2\n", 286 | "x2_to_e, y2_to_e = forward_kinematics2d(LINK2_LENGTH, link1_angle_rad + link2_angle_rad)\n", 287 | "\n", 288 | "# Position of the end-effector when viewed from the root of link1 (origin coordinates)\n", 289 | "xe = x1_to_2 + x2_to_e\n", 290 | "ye = y1_to_2 + y2_to_e\n", 291 | "\n", 292 | "# Display the result in the terminal\n", 293 | "print(f\"x, y = ({xe}, {ye})\")\n", 294 | "\n", 295 | "# Display the result on the screen\n", 296 | "text_position = [0.5, 0.0, 2.0]\n", 297 | "life_time = 10.0 # Display duration (seconds)\n", 298 | "pybullet.addUserDebugText(f\"x, y = ({xe:.2f}, {ye:.2f})\", text_position, textSize=2, lifeTime=life_time)\n", 299 | "\n", 300 | "# Move the joints to verify that the result of the forward kinematics matches the actual end-effector position\n", 301 | "pybullet.resetJointState(arm_id, 0, link1_angle_rad)\n", 302 | "pybullet.resetJointState(arm_id, 1, link2_angle_rad)\n", 303 | "pybullet.stepSimulation()" 304 | ] 305 | } 306 | ], 307 | "metadata": { 308 | "kernelspec": { 309 | "display_name": "Python 3", 310 | "language": "python", 311 | "name": "python3" 312 | }, 313 | "language_info": { 314 | "codemirror_mode": { 315 | "name": "ipython", 316 | "version": 3 317 | }, 318 | "file_extension": ".py", 319 | "mimetype": "text/x-python", 320 | "name": "python", 321 | "nbconvert_exporter": "python", 322 | "pygments_lexer": "ipython3", 323 | "version": "3.10.12" 324 | } 325 | }, 326 | "nbformat": 4, 327 | "nbformat_minor": 2 328 | } 329 | -------------------------------------------------------------------------------- /images/Common/Lidar.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/Lidar.gif -------------------------------------------------------------------------------- /images/Common/ar_marker_detect/ar_marker_blender.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/ar_marker_detect/ar_marker_blender.png -------------------------------------------------------------------------------- /images/Common/ar_marker_detect/ar_marker_detect.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/ar_marker_detect/ar_marker_detect.gif -------------------------------------------------------------------------------- /images/Common/bumper_sensor.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/bumper_sensor.gif -------------------------------------------------------------------------------- /images/Common/camera.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/camera.gif -------------------------------------------------------------------------------- /images/Common/camera_explain.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/camera_explain.jpeg -------------------------------------------------------------------------------- /images/Common/mobile_robot_basic.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/mobile_robot_basic.gif -------------------------------------------------------------------------------- /images/Common/mobile_robot_line_trace.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/mobile_robot_line_trace.gif -------------------------------------------------------------------------------- /images/Common/protect_drop_sensor.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/protect_drop_sensor.gif -------------------------------------------------------------------------------- /images/Common/raycast_explain.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/raycast_explain.jpeg -------------------------------------------------------------------------------- /images/Common/robot_arm_basic_position_control.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/robot_arm_basic_position_control.gif -------------------------------------------------------------------------------- /images/Common/robot_arm_basic_torque_control.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/robot_arm_basic_torque_control.gif -------------------------------------------------------------------------------- /images/Common/robot_arm_basic_velocity_control.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/robot_arm_basic_velocity_control.gif -------------------------------------------------------------------------------- /images/Common/robot_arm_collision_check.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/robot_arm_collision_check.gif -------------------------------------------------------------------------------- /images/Common/robot_arm_tip_camera.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/robot_arm_tip_camera.gif -------------------------------------------------------------------------------- /images/Common/robot_arm_tip_force_sensor.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/robot_arm_tip_force_sensor.gif -------------------------------------------------------------------------------- /images/Common/ultra_sonic_sensor.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/Common/ultra_sonic_sensor.gif -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot1.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot2.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_extended_kalman_filter/extended_kalman_filter.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_extended_kalman_filter/extended_kalman_filter.gif -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/a_star_animation.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/a_star_animation.gif -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/a_star_pybullet.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/a_star_pybullet.gif -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/cost_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/cost_example.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/next_node.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/next_node.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/next_node_cost.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/next_node_cost.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/nodes_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/nodes_map.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/obstacle_expansion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/obstacle_expansion.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/obstacle_real_to_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/obstacle_real_to_map.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_global_path_planning_a_star/start_node_cost_set.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_global_path_planning_a_star/start_node_cost_set.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_local_path_planning_dwa/dynamic_window_approach.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_local_path_planning_dwa/dynamic_window_approach.gif -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_local_path_planning_dwa/dynamic_window_approach_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_local_path_planning_dwa/dynamic_window_approach_overview.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_local_path_planning_dwa/global_path_planning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_local_path_planning_dwa/global_path_planning.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_local_path_planning_dwa/local_path_planning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_local_path_planning_dwa/local_path_planning.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_particle_filter/particle_filter.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_particle_filter/particle_filter.gif -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/q_learn.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/q_learn.gif -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/q_learn_and_sarsa.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/q_learn_and_sarsa.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/q_table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/q_table.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/q_table_after_learning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/q_table_after_learning.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/r_t1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/r_t1.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/reward.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/reward.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/sim_environment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/sim_environment.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/td_error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/td_error.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_reinforcement_q_learning/td_target.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_reinforcement_q_learning/td_target.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_sensor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_sensor.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_sensor_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_sensor_en.png -------------------------------------------------------------------------------- /images/MobileRobot/mobile_robot_wheel_odometry/wheel_odometry.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/MobileRobot/mobile_robot_wheel_odometry/wheel_odometry.gif -------------------------------------------------------------------------------- /images/RobotArm/2d_robot_arm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/2d_robot_arm.png -------------------------------------------------------------------------------- /images/RobotArm/2d_robot_arm_sensor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/2d_robot_arm_sensor.png -------------------------------------------------------------------------------- /images/RobotArm/2d_robot_arm_sensor_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/2d_robot_arm_sensor_en.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_analytical_inverse_kinematics/inverse_kinematics_analytical_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_analytical_inverse_kinematics/inverse_kinematics_analytical_overview.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_eye_in_hand_estimate_obj_pose/eye_in_hand.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_eye_in_hand_estimate_obj_pose/eye_in_hand.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_eye_to_hand_estimate_obj_pose/eye_to_hand.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_eye_to_hand_estimate_obj_pose/eye_to_hand.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_cordinate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_cordinate.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_cordinate_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_cordinate_en.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_homogeneous_matrix_forward_kinematics.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_homogeneous_matrix_forward_kinematics.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_homogeneous_matrix_forward_kinematics_en.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/2d_robot_arm_link_homogeneous_matrix_forward_kinematics_en.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/chain_rule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/chain_rule.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/coordinate_transformation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/coordinate_transformation.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/direct_transformation.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/direct_transformation.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/direct_transformation_en.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/direct_transformation_en.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/homogeneour_matrix_formulation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/homogeneour_matrix_formulation.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/homogeneour_matrix_forward_kinematics_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/homogeneour_matrix_forward_kinematics_overview.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/overview.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/overview.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/overview_en.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/overview_en.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/rotation_matrix_formulation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/rotation_matrix_formulation.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/rotation_matrix_important_point.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/rotation_matrix_important_point.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/step_transformation.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/step_transformation.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/translation_matrix_formulation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/translation_matrix_formulation.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/translation_matrix_important_point.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_homogeneous_matrix_forward_kinematics/translation_matrix_important_point.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_image_based_visual_servo/pybullet_ibvs.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_image_based_visual_servo/pybullet_ibvs.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_p.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_p.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_p_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_p_en.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_q_delta_p_jacpbian.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_q_delta_p_jacpbian.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_q_delta_p_jacpbian_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_jacobian_inverse_kinemarics/delta_q_delta_p_jacpbian_en.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_jacobian_inverse_kinemarics/inverse_kinematics_jacobian_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_jacobian_inverse_kinemarics/inverse_kinematics_jacobian_overview.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_jacobian_inverse_kinemarics/inverse_kinematics_jacobian_overview_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_jacobian_inverse_kinemarics/inverse_kinematics_jacobian_overview_en.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_jacobian_inverse_kinemarics/inverse_kinematics_jacobian_pybullet.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_jacobian_inverse_kinemarics/inverse_kinematics_jacobian_pybullet.gif -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_trigonometric_forward_kinematics/2d_arm_forward_kinematics_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_trigonometric_forward_kinematics/2d_arm_forward_kinematics_overview.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_trigonometric_forward_kinematics/2d_arm_forward_kinematics_overview_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_trigonometric_forward_kinematics/2d_arm_forward_kinematics_overview_en.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_trigonometric_forward_kinematics/note_link_root.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_trigonometric_forward_kinematics/note_link_root.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_trigonometric_forward_kinematics/overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_trigonometric_forward_kinematics/overview.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_trigonometric_forward_kinematics/step1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_trigonometric_forward_kinematics/step1.png -------------------------------------------------------------------------------- /images/RobotArm/robot_arm_trigonometric_forward_kinematics/step2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/images/RobotArm/robot_arm_trigonometric_forward_kinematics/step2.png -------------------------------------------------------------------------------- /obj/ar_marker_box.obj: -------------------------------------------------------------------------------- 1 | # Blender 4.0.2 2 | # www.blender.org 3 | mtllib ar_marker_box.mtl 4 | o Cube 5 | v 0.140000 0.140000 -0.140000 6 | v 0.140000 -0.140000 -0.140000 7 | v 0.140000 0.140000 0.140000 8 | v 0.140000 -0.140000 0.140000 9 | v -0.140000 0.140000 -0.140000 10 | v -0.140000 -0.140000 -0.140000 11 | v -0.140000 0.140000 0.140000 12 | v -0.140000 -0.140000 0.140000 13 | vn -0.0000 1.0000 -0.0000 14 | vn -0.0000 -0.0000 1.0000 15 | vn -1.0000 -0.0000 -0.0000 16 | vn -0.0000 -1.0000 -0.0000 17 | vn 1.0000 -0.0000 -0.0000 18 | vn -0.0000 -0.0000 -1.0000 19 | vt 0.666667 0.500000 20 | vt 1.000000 0.500000 21 | vt 1.000000 0.750000 22 | vt 0.666667 0.750000 23 | vt 0.333333 0.750000 24 | vt 0.666667 1.000000 25 | vt 0.333333 1.000000 26 | vt 0.333333 0.000000 27 | vt 0.666667 0.000000 28 | vt 0.666667 0.250000 29 | vt 0.333333 0.250000 30 | vt 0.000000 0.500000 31 | vt 0.333333 0.500000 32 | vt 0.000000 0.750000 33 | s 0 34 | usemtl Material 35 | f 1/1/1 5/2/1 7/3/1 3/4/1 36 | f 4/5/2 3/4/2 7/6/2 8/7/2 37 | f 8/8/3 7/9/3 5/10/3 6/11/3 38 | f 6/12/4 2/13/4 4/5/4 8/14/4 39 | f 2/13/5 1/1/5 3/4/5 4/5/5 40 | f 6/11/6 5/10/6 1/1/6 2/13/6 41 | -------------------------------------------------------------------------------- /texture/ar_marker_box.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/texture/ar_marker_box.png -------------------------------------------------------------------------------- /texture/line_trace_ground.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/texture/line_trace_ground.png -------------------------------------------------------------------------------- /texture/random_line_trace_ground.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akinami3/PybulletRobotics/671c1b2d3cef759b57412ac30de43d566d96f0f9/texture/random_line_trace_ground.png -------------------------------------------------------------------------------- /urdf/ar_marker_box.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /urdf/horizontal_box.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /urdf/pin_hole_box.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | -------------------------------------------------------------------------------- /urdf/plane_box.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /urdf/simple2d_arm.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | -------------------------------------------------------------------------------- /urdf/simple6d_arm_with_force_sensor.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 274 | 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 323 | 324 | 325 | 326 | 327 | 328 | 329 | 330 | 331 | 332 | 333 | 334 | 335 | 336 | 337 | 338 | 339 | 340 | 341 | 342 | 343 | 344 | 345 | 346 | 347 | 348 | 349 | 350 | 351 | 352 | 353 | 354 | 355 | 356 | 357 | 358 | 359 | 360 | 361 | 362 | 363 | 364 | 365 | 370 | 371 | 372 | 373 | 374 | 375 | 376 | 377 | 378 | 379 | 380 | 381 | 382 | 383 | 384 | 385 | 386 | 387 | 388 | 389 | 390 | 391 | 392 | 393 | 400 | 401 | 402 | 403 | 404 | 405 | 406 | 407 | 408 | 409 | 410 | 411 | 412 | 413 | 414 | 415 | 416 | 417 | 418 | 419 | 420 | 421 | 422 | 423 | 424 | 425 | 426 | 427 | 432 | 433 | 434 | 435 | 436 | 437 | 438 | 439 | 440 | 441 | 442 | 443 | -------------------------------------------------------------------------------- /urdf/simple_box.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /urdf/simple_camera.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /urdf/simple_two_wheel_car.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 300 | 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | 327 | 328 | 329 | 330 | 331 | 332 | 333 | 334 | 335 | 336 | 337 | 342 | 343 | 344 | 345 | 346 | 347 | 348 | 349 | 350 | 351 | 352 | 353 | 354 | 355 | 356 | 357 | 358 | 359 | 364 | 365 | 366 | 367 | 368 | 369 | 370 | 371 | 372 | 373 | 374 | -------------------------------------------------------------------------------- /utility/generate_ar_marker.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | 4 | # arucoライブラリの辞書を定義 5 | aruco_dict = cv2.aruco.Dictionary_get(cv2.aruco.DICT_4X4_50) 6 | 7 | # IDが0のマーカを生成 8 | marker_id = 0 9 | marker_size = 1000 # マーカのサイズ(ピクセル) 10 | marker_img = np.zeros((marker_size, marker_size), dtype=np.uint8) 11 | marker_img = cv2.aruco.drawMarker(aruco_dict, marker_id, marker_size, marker_img, 1) 12 | 13 | # マーカのサイズを10cmにするためのピクセル数(DPIを100とする) 14 | dpi = 100 15 | cm_to_inch = 2.54 16 | marker_size_cm = 10 17 | marker_size_inch = marker_size_cm / cm_to_inch 18 | marker_size_pixel = int(dpi * marker_size_inch) 19 | 20 | # 余白のサイズ(2cm) 21 | margin_cm = int(marker_size_cm) * 0.2 # 20%の余白 22 | margin_inch = margin_cm / cm_to_inch 23 | margin_pixel = int(dpi * margin_inch) 24 | 25 | # ボックスの辺の長さ 26 | box_size_cm = margin_cm + marker_size_cm 27 | box_size_inch = box_size_cm / cm_to_inch 28 | box_size_pixel = int(dpi * box_size_inch) 29 | 30 | # 白いキャンバスを作成 31 | canvas_width = 4 * box_size_pixel 32 | canvas_height = 3 * box_size_pixel 33 | canvas = 255 * np.ones((canvas_width, canvas_height), dtype=np.uint8) 34 | 35 | # マーカーをリサイズして余白を設ける 36 | resized_marker = cv2.resize(marker_img, (marker_size_pixel, marker_size_pixel)) 37 | marker_with_margin = 255 * np.ones((box_size_pixel, box_size_pixel), dtype=np.uint8) 38 | start_point = (box_size_pixel - marker_size_pixel) // 2 39 | marker_with_margin[start_point:start_point + marker_size_pixel, start_point:start_point + marker_size_pixel] = resized_marker 40 | 41 | # 中央の一面(展開図の中央)にマーカーを配置 42 | canvas[box_size_pixel:2 * box_size_pixel, box_size_pixel:2 * box_size_pixel] = marker_with_margin 43 | 44 | # 画像を保存 45 | output_file = '../texture/ar_marker_box.png' 46 | cv2.imwrite(output_file, canvas) 47 | 48 | print(f"立方体の展開図を{output_file}に保存しました。") --------------------------------------------------------------------------------