├── .gitignore ├── LICENSE.txt ├── README.md ├── examples ├── 7_axis.py ├── analytical_inv.py ├── delta_trajectory.py ├── delta_workspace.py ├── forward.py ├── inverse.py └── trajectory.py ├── pics ├── 7_axis.gif ├── analytical_inv.gif ├── delta_params.png ├── delta_trajectory.gif ├── delta_workspace.gif ├── forward.png ├── inverse.png └── trajectory.gif ├── setup.cft ├── setup.py └── visual_kinematics ├── Frame.py ├── Robot.py ├── RobotDelta.py ├── RobotSerial.py ├── RobotTrajectory.py ├── __init__.py └── utility.py /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | e_* 3 | e_*/ 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | pip-wheel-metadata/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 99 | __pypackages__/ 100 | 101 | # Celery stuff 102 | celerybeat-schedule 103 | celerybeat.pid 104 | 105 | # SageMath parsed files 106 | *.sage.py 107 | 108 | # Environments 109 | .env 110 | .venv 111 | env/ 112 | venv/ 113 | ENV/ 114 | env.bak/ 115 | venv.bak/ 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mkdocs documentation 125 | /site 126 | 127 | # mypy 128 | .mypy_cache/ 129 | .dmypy.json 130 | dmypy.json 131 | 132 | # Pyre type checker 133 | .pyre/ 134 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Yue 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Visual-Kinematics 2 | 3 | This is a super easy-to-use and helpful python package for calculating the robot kinematics and visualizing trajectory in just a few lines of code. 4 | 5 | You don't have to deal with vector and matrix algebra or inverse kinematics. As long as there are robot's D-H parameters, you are good to go. 6 | 7 | If you are unfamiliar with D-H parameters, please refer to [here](https://en.wikipedia.org/wiki/Denavit%E2%80%93Hartenberg_parameters). 8 | 9 | # Install via pip 10 | 11 | ``` 12 | pip3 install visual-kinematics 13 | ``` 14 | 15 | # Explanation of example codes 16 | 17 | ## forward . py 18 | 19 | ```python 20 | dh_params = np.array([[0.163, 0., 0.5 * pi, 0.], 21 | [0., 0.632, pi, 0.5 * pi], 22 | [0., 0.6005, pi, 0.], 23 | [0.2013, 0., -0.5 * pi, -0.5 * pi], 24 | [0.1025, 0., 0.5 * pi, 0.], 25 | [0.094, 0., 0., 0.]]) 26 | robot = RobotSerial(dh_params) 27 | ``` 28 | 29 | To initialize an instance of Robot, DH parameters need to be provides. They should be given by an **n*4** matrix, where **n** is the number of axes the robot has, most commonly six. 30 | 31 | The matrix should be in following format: 32 | 33 | | d | a | alpha | theta | 34 | | :---: | :---: | :---: | :---: | 35 | | x | x | x | x | 36 | | x | x | x | x | 37 | | ... | ... | ... | ... | 38 | 39 | In this case, we use the DH parameters of [Aubo-i10](https://aubo-robotics.com/products/aubo-i10/). 40 | 41 | ```python 42 | theta = np.array([0., 0., -0.25 * pi, 0., 0., 0.]) 43 | f = robot.forward(theta) 44 | ``` 45 | 46 | To calculate the forward kinematics, we need to specify 6 axis angles. And the function returns end frame of the robot. 47 | 48 | You can also get the end frame by calling the Robot's property *end_frame*: 49 | ```python 50 | robot.end_frame 51 | ``` 52 | 53 | From the Frame object we can easily get the translational part and rotational part in different formats(rotaion matrix, eular angle, angle-axis and quaternion). 54 | 55 | ```python 56 | print("-------forward-------") 57 | print("end frame t_4_4:") 58 | print(f.t_4_4) 59 | print("end frame xyz:") 60 | print(f.t_3_1.reshape([3, ])) 61 | print("end frame abc:") 62 | print(f.euler_3) 63 | print("end frame rotational matrix:") 64 | print(f.r_3_3) 65 | print("end frame quaternion:") 66 | print(f.q_4) 67 | print("end frame angle-axis:") 68 | print(f.r_3) 69 | ``` 70 | Result: 71 | > -------forward------- 72 | end frame t_4_4: 73 | [[ 0.707 -0.707 -0. -0.497] 74 | [-0. 0. -1. -0.295] 75 | [ 0.707 0.707 -0. 1.292] 76 | [ 0. 0. 0. 1. ]] 77 | end frame xyz: 78 | [-0.497 -0.295 1.292] 79 | end frame abc: 80 | [-0. -0.785 1.571] 81 | end frame rotational matrix: 82 | [[ 0.707 -0.707 -0. ] 83 | [-0. 0. -1. ] 84 | [ 0.707 0.707 -0. ]] 85 | end frame quaternion: 86 | [ 0.653 -0.271 0.271 0.653] 87 | end frame angle-axis: 88 | [ 1.482 -0.614 0.614] 89 | 90 | And we can visualize the Robot by just: 91 | 92 | ```python 93 | robot.show() 94 | ``` 95 | 96 | And the result: 97 | 98 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/forward.png?raw=true) 99 | 100 | ## inverse . py 101 | 102 | Visual-Kinematics utilizes numerical method to solve inverse kinematics, so you don't have to solve the analytical solution by hand. However, if you solved it for your robot and want to implement, a later example will show how to do that. After all analytical solution runs much faster and can be more reliable. 103 | 104 | To calculate the axis angles, a end frame needs to provided. It can also be constructed in various formats (translation vector + rotaion matrix, eular angle, angle-axis or quaternion). Here we use ZYX eular angle (intrinsic rotations). 105 | 106 | ```python 107 | xyz = np.array([[0.28127], [0.], [1.13182]]) 108 | abc = np.array([0.5 * pi, 0., pi]) 109 | end = Frame.from_euler_3(abc, xyz) 110 | robot.inverse(end) 111 | ``` 112 | 113 | And the robot is already configured to the wanted pose. To get access to axis values, we call for the property *axis_values*. 114 | 115 | ```python 116 | print("inverse is successful: {0}".format(robot.is_reachable_inverse)) 117 | print("axis values: \n{0}".format(robot.axis_values)) 118 | robot.show() 119 | ``` 120 | 121 | And the result: 122 | 123 | >inverse is successful: True 124 | axis values: 125 | [ 2.344 -0.422 -1.049 0.943 -1.571 -0.798] 126 | 127 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/inverse.png?raw=true) 128 | 129 | ## trajectory . py 130 | 131 | Apart from solving kinematics for a single frame, Visual-Kinematics can also be used for trajectory visualizatiion. 132 | 133 | To do that, we need to specify a list of frames along the trajectory. 134 | 135 | ```python 136 | frames = [Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.28127], [0.], [1.13182]])), 137 | Frame.from_euler_3(np.array([0.25 * pi, 0., 0.75 * pi]), np.array([[0.48127], [0.], [1.13182]])), 138 | Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.48127], [0.], [0.63182]]))] 139 | ``` 140 | 141 | In this case, we define trajectory using 3 frames. A RobotTrajectory object has to be constructed using these frames. 142 | 143 | ```python 144 | trajectory = RobotTrajectory(robot, frames) 145 | ``` 146 | 147 | To visulize the trajectory, just: 148 | 149 | ```python 150 | trajectory.show(motion="p2p") 151 | ``` 152 | 153 | The method can be either "p2p" or "lin", which stands for point-to-point movement and linear movement. The first one interpolates the trajectory in the joint space while the second one in cartesian space. 154 | 155 | (Note: Currently it doesn't support specifying the motion type for each segment. Future development will focus on that.) 156 | 157 | The result: 158 | 159 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/trajectory.gif?raw=true) 160 | 161 | ## analytical_inv . py 162 | 163 | While defining the robot, we can set an analytical solution for solving its inverse kinematics. 164 | 165 | ```python 166 | def aubo10_inv(dh_params, f): 167 | # the analytical inverse solution 168 | # ... 169 | return is_reachable theta 170 | 171 | robot = RobotSerial(dh_params, analytical_inv=aubo10_inv) 172 | ``` 173 | 174 | If you look at the code, the function ***aubo10_inv*** in this case is quite complicated. We don't go into details about how it is derived. Just make sure that it has to take in the ***n\*4*** matrix containning all the DH parameters as well as a end frame, and returns an 1d-array representing n axis values. 175 | 176 | This time let try the linear movement: 177 | 178 | ```python 179 | trajectory.show(motion="lin") 180 | ``` 181 | 182 | Result: 183 | 184 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/analytical_inv.gif?raw=true) 185 | 186 | ## 7_axis . py 187 | 188 | It is pretty much the same to work with seven axis robots. The only differentce is the DH parameter becomes a ***7\*4*** matrix instead of a ***6\*4*** one. 189 | 190 | Here we use the DH parameters of [KUKA LBR iiwa 7 R800](https://www.kuka.com/en-au/products/robotics-systems/industrial-robots/lbr-iiwa). 191 | 192 | ```python 193 | dh_params = np.array([[0.34, 0., -pi / 2, 0.], 194 | [0., 0., pi / 2, 0.], 195 | [0.4, 0., -pi / 2, 0.], 196 | [0., 0., pi / 2, 0.], 197 | [0.4, 0., -pi / 2, 0.], 198 | [0., 0., pi / 2, 0.], 199 | [0.126, 0., 0., 0.]]) 200 | ``` 201 | 202 | The result: 203 | 204 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/7_axis.gif?raw=true) 205 | 206 | (Note: You see only 4 red dots, because the the frames of the 1st and 2nd axes share the same origin, so do the 3rd and the 4th, the 5th and the 6th.) 207 | 208 | ## delta_trajectory . py 209 | 210 | Since version 0.7.0, the package supports not only serial robots like 6R ot 7R arms showed above, but also one type of very popular parallel robot, delta robot. 211 | 212 | It has to be defined using 4 essential parameters: r1, r2, l1 and l2. 213 | 214 | ```python 215 | robot = RobotDelta(np.array([0.16, 0.06, 0.30, 0.50])) # r1 r2 l1 l2 216 | ``` 217 | 218 | The following figure shows how they are defined. 219 | 220 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/delta_params.png?raw=true) 221 | 222 | Visualization of the trajectory is quite similar as serial robots. 223 | 224 | ```python 225 | frames = [Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[0.], [0.], [-0.6]])), 226 | Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[0.0], [0.], [-0.45]])), 227 | Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[-0.2], [-0.2], [-0.45]])), 228 | Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[-0.2], [-0.2], [-0.6]]))] 229 | 230 | trajectory = RobotTrajectory(robot, frames) 231 | trajectory.show(motion="p2p") 232 | ``` 233 | 234 | Result: 235 | 236 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/delta_trajectory.gif?raw=true) 237 | 238 | ## delta_workspace . py 239 | 240 | Since version 0.7.0, the package supports the visualization of workspace of delta robots. 241 | 242 | ```python 243 | robot = RobotDelta(np.array([0.16, 0.06, 0.30, 0.50])) 244 | robot.ws_lim = np.array([[-pi/12, pi/2]]*3) 245 | robot.ws_division = 10 246 | robot.show(ws=True) 247 | ``` 248 | 249 | RobotDelta.ws_lim is a 3*2 matrix indicating the lower and upper bound of three axes values. RobotDelta.ws_division indicates how many points will be drawn per axes. (For instance if it is set to 10 then in total 10 ^ 3 = 1000 points will be drawn.) 250 | 251 | Result: 252 | 253 | ![](https://github.com/dbddqy/visual_kinematics/blob/master/pics/delta_workspace.gif?raw=true) 254 | 255 | -------------------------------------------------------------------------------- /examples/7_axis.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from visual_kinematics.RobotSerial import * 4 | from visual_kinematics.RobotTrajectory import * 5 | import numpy as np 6 | from math import pi 7 | 8 | 9 | def main(): 10 | np.set_printoptions(precision=3, suppress=True) 11 | 12 | dh_params = np.array([[0.34, 0., -pi / 2, 0.], 13 | [0., 0., pi / 2, 0.], 14 | [0.4, 0., -pi / 2, 0.], 15 | [0., 0., pi / 2, 0.], 16 | [0.4, 0., -pi / 2, 0.], 17 | [0., 0., pi / 2, 0.], 18 | [0.126, 0., 0., 0.]]) 19 | 20 | robot = RobotSerial(dh_params) 21 | 22 | # ===================================== 23 | # inverse 24 | # ===================================== 25 | 26 | frames = [Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.28127], [0.], [0.63182]])), 27 | Frame.from_euler_3(np.array([0.25 * pi, 0., 0.75 * pi]), np.array([[0.48127], [0.], [0.63182]])), 28 | Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.48127], [0.], [0.63182]])), 29 | Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.48127], [0.], [0.23182]]))] 30 | trajectory = RobotTrajectory(robot, frames) 31 | trajectory.show(motion="p2p") 32 | 33 | 34 | if __name__ == "__main__": 35 | main() 36 | -------------------------------------------------------------------------------- /examples/analytical_inv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from visual_kinematics.RobotSerial import * 4 | from visual_kinematics.RobotTrajectory import * 5 | import numpy as np 6 | from math import pi, sqrt, sin, cos, atan2 7 | 8 | 9 | def main(): 10 | np.set_printoptions(precision=3, suppress=True) 11 | 12 | dh_params = np.array([[0.163, 0., 0.5 * pi, 0.], 13 | [0., 0.632, pi, 0.5 * pi], 14 | [0., 0.6005, pi, 0.], 15 | [0.2013, 0., -0.5 * pi, -0.5 * pi], 16 | [0.1025, 0., 0.5 * pi, 0.], 17 | [0.094, 0., 0., 0.]]) 18 | 19 | # return (is_reachable, theta[n, ]) 20 | def aubo10_inv(dh_params, f): 21 | # save all 8 sets of solutions 22 | theta_all = np.zeros([8, 6]) 23 | for i in range(8): 24 | A1 = dh_params[5, 0] * f[1, 2] - f[1, 3] 25 | B1 = f[0, 3] - dh_params[5, 0] * f[0, 2] 26 | C1 = dh_params[3, 0] 27 | # theta_0 : 2 sets of solutions 28 | if i < 4: 29 | theta_all[i, 0] = atan2(C1, sqrt(A1 * A1 + B1 * B1 - C1 * C1)) - atan2(A1, B1) 30 | else: 31 | theta_all[i, 0] = atan2(C1, -sqrt(A1 * A1 + B1 * B1 - C1 * C1)) - atan2(A1, B1) 32 | 33 | # theta_4 : 2 sets of solutions 34 | b = f[0, 2] * sin(theta_all[i, 0]) - f[1, 2] * cos(theta_all[i, 0]) 35 | if i % 4 == 0 or i % 4 == 1: 36 | theta_all[i, 4] = atan2(sqrt(1 - b * b), b) 37 | else: 38 | theta_all[i, 4] = atan2(-sqrt(1 - b * b), b) 39 | # check singularity 40 | if abs(sin(theta_all[i, 4])) < 1e-6: 41 | print("Pose can not be reached!!") 42 | return False, np.zeros([6, ]) 43 | 44 | # theta_5 45 | A6 = (f[0, 1] * sin(theta_all[i, 0]) - f[1, 1] * cos(theta_all[i, 0])) / sin(theta_all[i, 4]) 46 | B6 = (f[1, 0] * cos(theta_all[i, 0]) - f[0, 0] * sin(theta_all[i, 0])) / sin(theta_all[i, 4]) 47 | theta_all[i, 5] = atan2(A6, B6) 48 | 49 | # theta_1 : 2 sets of solutions 50 | A234 = f[2, 2] / sin(theta_all[i, 4]) 51 | B234 = (f[0, 2] * cos(theta_all[i, 0]) + f[1, 2] * sin(theta_all[i, 0])) / sin(theta_all[i, 4]) 52 | M = dh_params[4, 0] * A234 - dh_params[5, 0] * sin(theta_all[i, 4]) * B234 + f[0, 3] * cos( 53 | theta_all[i, 0]) + f[1, 3] * sin(theta_all[i, 0]) 54 | N = -dh_params[4, 0] * B234 - dh_params[5, 0] * sin(theta_all[i, 4]) * A234 - dh_params[0, 0] + f[2, 3] 55 | L = (M * M + N * N + dh_params[1, 1] * dh_params[1, 1] - dh_params[2, 1] * dh_params[2, 1]) / ( 56 | 2 * dh_params[1, 1]) 57 | if i % 2 == 0: 58 | theta_all[i, 1] = atan2(N, M) - atan2(L, sqrt(M * M + N * N - L * L)) 59 | else: 60 | theta_all[i, 1] = atan2(N, M) - atan2(L, -sqrt(M * M + N * N - L * L)) 61 | 62 | # theta_2 and theta_3 63 | A23 = (-M - dh_params[1, 1] * sin(theta_all[i, 1])) / dh_params[2, 1] 64 | B23 = (N - dh_params[1, 1] * cos(theta_all[i, 1])) / dh_params[2, 1] 65 | theta_all[i, 2] = theta_all[i, 1] - atan2(A23, B23) 66 | theta_all[i, 3] = atan2(A234, B234) - atan2(A23, B23) 67 | 68 | # select the best solution 69 | diff_sum_min = 1e+5 70 | index = 0 71 | for i in range(8): 72 | diff_sum = 0 73 | for j in range(6): 74 | diff = theta_all[i, j] - dh_params[j, 3] 75 | while diff < -pi: 76 | diff += 2. * pi 77 | while diff > pi: 78 | diff -= 2. * pi 79 | diff_sum += abs(diff) 80 | if diff_sum < diff_sum_min: 81 | diff_sum_min = diff_sum 82 | index = i 83 | return True, theta_all[index] 84 | 85 | robot = RobotSerial(dh_params, analytical_inv=aubo10_inv) 86 | 87 | # ===================================== 88 | # trajectory 89 | # ===================================== 90 | 91 | frames = [Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.28127], [0.], [1.13182]])), 92 | Frame.from_euler_3(np.array([0.25 * pi, 0., 0.75 * pi]), np.array([[0.48127], [0.], [1.13182]])), 93 | Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.48127], [0.], [0.63182]]))] 94 | trajectory = RobotTrajectory(robot, frames) 95 | trajectory.show(motion="lin") 96 | 97 | 98 | if __name__ == "__main__": 99 | main() 100 | -------------------------------------------------------------------------------- /examples/delta_trajectory.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from visual_kinematics.RobotDelta import * 4 | from visual_kinematics.RobotTrajectory import * 5 | import numpy as np 6 | 7 | 8 | def main(): 9 | np.set_printoptions(precision=3, suppress=True) 10 | 11 | robot = RobotDelta(np.array([0.16, 0.06, 0.30, 0.50])) # r1 r2 l1 l2 12 | 13 | # ===================================== 14 | # trajectory 15 | # ===================================== 16 | 17 | frames = [Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[0.], [0.], [-0.6]])), 18 | Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[0.0], [0.], [-0.45]])), 19 | Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[-0.2], [-0.2], [-0.45]])), 20 | Frame.from_euler_3(np.array([0., 0., 0.]), np.array([[-0.2], [-0.2], [-0.6]]))] 21 | 22 | trajectory = RobotTrajectory(robot, frames) 23 | trajectory.show(motion="p2p") 24 | 25 | 26 | if __name__ == "__main__": 27 | main() 28 | -------------------------------------------------------------------------------- /examples/delta_workspace.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from visual_kinematics.RobotDelta import * 4 | from math import pi 5 | 6 | 7 | def main(): 8 | np.set_printoptions(precision=3, suppress=True) 9 | 10 | robot = RobotDelta(np.array([0.16, 0.06, 0.30, 0.50])) 11 | robot.ws_lim = np.array([[-pi/12, pi/2]]*3) 12 | robot.ws_division = 10 13 | robot.show(ws=True) 14 | 15 | 16 | if __name__ == "__main__": 17 | main() 18 | -------------------------------------------------------------------------------- /examples/forward.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from visual_kinematics.RobotSerial import * 4 | import numpy as np 5 | from math import pi 6 | 7 | 8 | def main(): 9 | np.set_printoptions(precision=3, suppress=True) 10 | 11 | dh_params = np.array([[0.163, 0., 0.5 * pi, 0.], 12 | [0., 0.632, pi, 0.5 * pi], 13 | [0., 0.6005, pi, 0.], 14 | [0.2013, 0., -0.5 * pi, -0.5 * pi], 15 | [0.1025, 0., 0.5 * pi, 0.], 16 | [0.094, 0., 0., 0.]]) 17 | robot = RobotSerial(dh_params) 18 | 19 | # ===================================== 20 | # forward 21 | # ===================================== 22 | 23 | theta = np.array([0., 0., -0.25 * pi, 0., 0., 0.]) 24 | f = robot.forward(theta) 25 | 26 | print("-------forward-------") 27 | print("end frame t_4_4:") 28 | print(f.t_4_4) 29 | print("end frame xyz:") 30 | print(f.t_3_1.reshape([3, ])) 31 | print("end frame abc:") 32 | print(f.euler_3) 33 | print("end frame rotational matrix:") 34 | print(f.r_3_3) 35 | print("end frame quaternion:") 36 | print(f.q_4) 37 | print("end frame angle-axis:") 38 | print(f.r_3) 39 | 40 | robot.show() 41 | 42 | 43 | if __name__ == "__main__": 44 | main() 45 | -------------------------------------------------------------------------------- /examples/inverse.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from visual_kinematics.RobotSerial import * 4 | import numpy as np 5 | from math import pi 6 | 7 | 8 | def main(): 9 | np.set_printoptions(precision=3, suppress=True) 10 | 11 | dh_params = np.array([[0.163, 0., 0.5 * pi, 0.], 12 | [0., 0.632, pi, 0.5 * pi], 13 | [0., 0.6005, pi, 0.], 14 | [0.2013, 0., -0.5 * pi, -0.5 * pi], 15 | [0.1025, 0., 0.5 * pi, 0.], 16 | [0.094, 0., 0., 0.]]) 17 | 18 | robot = RobotSerial(dh_params) 19 | 20 | # ===================================== 21 | # inverse 22 | # ===================================== 23 | 24 | xyz = np.array([[0.28127], [0.], [1.13182]]) 25 | abc = np.array([0.5 * pi, 0., pi]) 26 | end = Frame.from_euler_3(abc, xyz) 27 | robot.inverse(end) 28 | 29 | print("inverse is successful: {0}".format(robot.is_reachable_inverse)) 30 | print("axis values: \n{0}".format(robot.axis_values)) 31 | robot.show() 32 | 33 | # example of unsuccessful inverse kinematics 34 | xyz = np.array([[2.2], [0.], [1.9]]) 35 | end = Frame.from_euler_3(abc, xyz) 36 | robot.inverse(end) 37 | 38 | print("inverse is successful: {0}".format(robot.is_reachable_inverse)) 39 | 40 | 41 | if __name__ == "__main__": 42 | main() 43 | -------------------------------------------------------------------------------- /examples/trajectory.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from visual_kinematics.RobotSerial import * 4 | from visual_kinematics.RobotTrajectory import * 5 | import numpy as np 6 | from math import pi 7 | 8 | 9 | def main(): 10 | np.set_printoptions(precision=3, suppress=True) 11 | 12 | dh_params = np.array([[0.163, 0., 0.5 * pi, 0.], 13 | [0., 0.632, pi, 0.5 * pi], 14 | [0., 0.6005, pi, 0.], 15 | [0.2013, 0., -0.5 * pi, -0.5 * pi], 16 | [0.1025, 0., 0.5 * pi, 0.], 17 | [0.094, 0., 0., 0.]]) 18 | 19 | robot = RobotSerial(dh_params) 20 | 21 | # ===================================== 22 | # trajectory 23 | # ===================================== 24 | 25 | frames = [Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.28127], [0.], [1.13182]])), 26 | Frame.from_euler_3(np.array([0.25 * pi, 0., 0.75 * pi]), np.array([[0.48127], [0.], [1.13182]])), 27 | Frame.from_euler_3(np.array([0.5 * pi, 0., pi]), np.array([[0.48127], [0.], [0.63182]]))] 28 | time_points = np.array([0., 6., 10.]) 29 | trajectory = RobotTrajectory(robot, frames, time_points) 30 | trajectory.show(motion="p2p") 31 | 32 | 33 | if __name__ == "__main__": 34 | main() 35 | -------------------------------------------------------------------------------- /pics/7_axis.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/7_axis.gif -------------------------------------------------------------------------------- /pics/analytical_inv.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/analytical_inv.gif -------------------------------------------------------------------------------- /pics/delta_params.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/delta_params.png -------------------------------------------------------------------------------- /pics/delta_trajectory.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/delta_trajectory.gif -------------------------------------------------------------------------------- /pics/delta_workspace.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/delta_workspace.gif -------------------------------------------------------------------------------- /pics/forward.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/forward.png -------------------------------------------------------------------------------- /pics/inverse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/inverse.png -------------------------------------------------------------------------------- /pics/trajectory.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbddqy/visual_kinematics/55cb956869ac805b0caf629c40216e3149b3fd1a/pics/trajectory.gif -------------------------------------------------------------------------------- /setup.cft: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | setuptools.setup( 7 | name="visual_kinematics", 8 | version="0.2.1", 9 | author="Yue QI", 10 | author_email="dbddqy@outlook.com", 11 | description="A package for calculating robot kinematics and visualizing trajectory", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/dbddqy/visual_kinematics", 15 | packages=setuptools.find_packages(), 16 | keywords = ['robotics', ' kinematics', 'inverse kinematics', 'trajectory planning', 'visualization'], 17 | install_requires=[ 18 | 'numpy', 19 | 'scipy', 20 | 'matplotlib', 21 | ], 22 | classifiers=[ 23 | "Programming Language :: Python :: 3", 24 | "License :: OSI Approved :: MIT License", 25 | "Operating System :: OS Independent", 26 | ], 27 | python_requires='>=3.5', 28 | ) 29 | -------------------------------------------------------------------------------- /visual_kinematics/Frame.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.spatial.transform import Rotation 3 | from math import sin as s, cos as c 4 | 5 | 6 | class Frame: 7 | def __init__(self, t_4_4): 8 | self.t_4_4 = t_4_4 9 | 10 | def __mul__(self, other): 11 | if isinstance(other, Frame): 12 | return Frame(np.dot(self.t_4_4, other.t_4_4)) 13 | return Frame(np.dot(self.t_4_4, other)) 14 | 15 | def __getitem__(self, key): 16 | return self.t_4_4[key] 17 | 18 | def __str__(self): 19 | return self.t_4_4.__str__() 20 | 21 | # inverse of the frame 22 | @property 23 | def inv(self): 24 | t_4_4_new = self.t_4_4.copy() 25 | t_4_4_new[0:3, 3:4] = -self.t_4_4[0:3, 0:3].T.dot(self.t_4_4[0:3, 3:4]) 26 | t_4_4_new[0:3, 0:3] = self.t_4_4[0:3, 0:3].T 27 | return Frame(t_4_4_new) 28 | 29 | @property 30 | def copy(self): 31 | return Frame(self.t_4_4) 32 | 33 | # z axis vector of the frame 34 | @property 35 | def z_3_1(self): 36 | return self.t_4_4[0:3, 2:3] 37 | 38 | # translation vector of the frame 39 | @property 40 | def t_3_1(self): 41 | return self.t_4_4[0:3, 3:4] 42 | 43 | # rotation matrix of the frame 44 | @property 45 | def r_3_3(self): 46 | return self.t_4_4[0:3, 0:3] 47 | 48 | # rotation in quaternion format 49 | @property 50 | def q_4(self): 51 | return Rotation.from_matrix(self.r_3_3).as_quat() 52 | 53 | # rotation in angle-axis format 54 | @property 55 | def r_3(self): 56 | return Rotation.from_matrix(self.r_3_3).as_rotvec() 57 | 58 | # rotation in ZYX euler angel format 59 | @property 60 | def euler_3(self): 61 | return Rotation.from_matrix(self.r_3_3).as_euler("ZYX", degrees=False) 62 | 63 | # construct a frame using rotation matrix and translation vector 64 | @staticmethod 65 | def from_r_3_3(r_3_3, t_3_1): 66 | t_4_4 = np.eye(4) 67 | t_4_4[0:3, 0:3] = r_3_3 68 | t_4_4[0:3, 3:4] = t_3_1 69 | return Frame(t_4_4) 70 | 71 | # construct a frame using quaternion and translation vector 72 | @staticmethod 73 | def from_q_4(q_4, t_3_1): 74 | r_3_3 = Rotation.from_quat(q_4).as_matrix() 75 | return Frame.from_r_3_3(r_3_3, t_3_1) 76 | 77 | # construct a frame using angle-axis and translation vector 78 | @staticmethod 79 | def from_r_3(r_3, t_3_1): 80 | r_3_3 = Rotation.from_rotvec(r_3).as_matrix() 81 | return Frame.from_r_3_3(r_3_3, t_3_1) 82 | 83 | # construct a frame using ZYX euler angle and translation vector 84 | @staticmethod 85 | def from_euler_3(euler_3, t_3_1): 86 | r_3_3 = Rotation.from_euler("ZYX", euler_3, degrees=False).as_matrix() 87 | return Frame.from_r_3_3(r_3_3, t_3_1) 88 | 89 | # construct a frame using dh parameters 90 | @staticmethod 91 | def from_dh(dh_params): 92 | d, a, alpha, theta = dh_params 93 | return Frame(np.array([[c(theta), -s(theta) * c(alpha), s(theta) * s(alpha), a * c(theta)], 94 | [s(theta), c(theta) * c(alpha), -c(theta) * s(alpha), a * s(theta)], 95 | [0., s(alpha), c(alpha), d], 96 | [0., 0., 0., 1.]])) 97 | 98 | # construct a frame using modified dh parameters 99 | # for the difference between two DH parameter definitions 100 | # https://en.wikipedia.org/wiki/Denavit%E2%80%93Hartenberg_parameters 101 | @staticmethod 102 | def from_dh_modified(dh_params): 103 | d, a, alpha, theta = dh_params 104 | return Frame(np.array([[c(theta), -s(theta), 0, a], 105 | [s(theta) * c(alpha), c(theta) * c(alpha), -s(alpha), -s(alpha) * d], 106 | [s(theta) * s(alpha), c(theta) * s(alpha), c(alpha), c(alpha) * d], 107 | [0., 0., 0., 1.]])) 108 | 109 | # construct an identity frame 110 | @staticmethod 111 | def i_4_4(): 112 | return Frame(np.eye(4)) 113 | 114 | # calculate the center distance to the other frame 115 | def distance_to(self, other): 116 | return np.linalg.norm(self.t_3_1 - other.t_3_1, ord=2) 117 | 118 | # calculate the rotated angle to the other frame 119 | def angle_to(self, other): 120 | return np.linalg.norm(Rotation.from_matrix(self.r_3_3.T.dot(other.r_3_3)).as_rotvec(), ord=2) 121 | -------------------------------------------------------------------------------- /visual_kinematics/Robot.py: -------------------------------------------------------------------------------- 1 | from visual_kinematics.Frame import * 2 | from numpy import pi 3 | from abc import abstractmethod 4 | import matplotlib.pyplot as plt 5 | 6 | 7 | class Robot(object): 8 | # ================== Definition and Kinematics 9 | # params: according to definition of specific robots 10 | # initial_offset: always [dim, ] 11 | # ws_lim: lower and upper bound of all axes [num_axis, 2] 12 | # ws_division: number of sample points of all axes 13 | # ================== 14 | def __init__(self, params, initial_offset, plot_xlim=[-0.5, 0.5], plot_ylim=[-0.5, 0.5], plot_zlim=[0.0, 1.0], 15 | ws_lim=None, ws_division=5): 16 | self.params = params 17 | self.initial_offset = initial_offset 18 | self.axis_values = np.zeros(initial_offset.shape, dtype=np.float64) 19 | # is_reachable_inverse must be set everytime when inverse kinematics is performed 20 | self.is_reachable_inverse = True 21 | # plot related 22 | self.plot_xlim = plot_xlim 23 | self.plot_ylim = plot_ylim 24 | self.plot_zlim = plot_zlim 25 | self.figure = plt.figure() 26 | self.ax = self.figure.add_subplot(111, projection="3d") 27 | # workspace related 28 | if ws_lim is None: 29 | self.ws_lim = np.array([[-pi, pi]]*self.num_axis) 30 | else: 31 | self.ws_lim = ws_lim 32 | self.ws_division = ws_division 33 | 34 | @property 35 | def num_axis(self): 36 | return self.initial_offset.shape[0] 37 | 38 | @property 39 | @abstractmethod 40 | def end_frame(self): 41 | pass 42 | 43 | # ================== Jacobian [num_axis, 6] 44 | @property 45 | @abstractmethod 46 | def jacobian(self): 47 | pass 48 | 49 | def forward(self, theta_x): 50 | self.axis_values = theta_x 51 | return self.end_frame 52 | 53 | @abstractmethod 54 | def inverse(self, end_frame): 55 | pass 56 | 57 | # ================== Workspace analysis 58 | # sample through all possible points 59 | # ================== 60 | def workspace(self): 61 | num_points = self.ws_division ** self.num_axis 62 | lower = self.ws_lim[:, 0] 63 | intervals = (self.ws_lim[:, 1] - self.ws_lim[:, 0]) / (self.ws_division - 1) 64 | points = np.zeros([num_points, 3]) 65 | axes_indices = np.zeros([self.num_axis, ], dtype=np.int32) 66 | for i in range(num_points): 67 | points[i] = self.forward(lower + axes_indices*intervals).t_3_1.flatten() 68 | axes_indices[0] += 1 69 | for check_index in range(self.num_axis): 70 | if axes_indices[check_index] >= self.ws_division: 71 | if check_index >= self.num_axis-1: 72 | break 73 | axes_indices[check_index] = 0 74 | axes_indices[check_index+1] += 1 75 | else: 76 | break 77 | return points 78 | 79 | # ================== plot related 80 | # ================== 81 | def plot_settings(self): 82 | self.ax.set_xlim(self.plot_xlim) 83 | self.ax.set_ylim(self.plot_ylim) 84 | self.ax.set_zlim(self.plot_zlim) 85 | self.ax.set_xlabel("x") 86 | self.ax.set_ylabel("y") 87 | self.ax.set_zlabel("z") 88 | 89 | @abstractmethod 90 | def draw(self): 91 | pass 92 | 93 | def draw_ws(self): 94 | self.plot_settings() 95 | points = self.workspace() 96 | self.ax.scatter(points[:, 0], points[:, 1], points[:, 2], c="red", marker="o") 97 | 98 | def show(self, body=True, ws=False): 99 | if body: 100 | self.draw() 101 | if ws: 102 | self.draw_ws() 103 | plt.show() 104 | -------------------------------------------------------------------------------- /visual_kinematics/RobotDelta.py: -------------------------------------------------------------------------------- 1 | from visual_kinematics.Robot import * 2 | from visual_kinematics.utility import simplify_angle 3 | from math import pi, atan2, sqrt 4 | 5 | import logging 6 | 7 | 8 | class RobotDelta(Robot): 9 | # params [4, ] [r1, r2, l1, l2] 10 | def __init__(self, params, plot_xlim=[-0.5, 0.5], plot_ylim=[-0.5, 0.5], plot_zlim=[-1.0, 0.0], 11 | ws_lim=None, ws_division=5): 12 | Robot.__init__(self, params, np.zeros([3, ]), plot_xlim, plot_ylim, plot_zlim, ws_lim, ws_division) 13 | self.is_reachable_forward = True 14 | 15 | # ================== Definition of r1, r2, l1, l2 16 | # r1 17 | # C ========== ------- 18 | # l1 // O \\ / theta 19 | # // \\ 20 | # B \\---D // 21 | # l2 \\ // 22 | # \\ P // 23 | # A ====== 24 | # r2 25 | # ================== 26 | @property 27 | def r1(self): 28 | return self.params[0] 29 | 30 | @property 31 | def r2(self): 32 | return self.params[1] 33 | 34 | @property 35 | def l1(self): 36 | return self.params[2] 37 | 38 | @property 39 | def l2(self): 40 | return self.params[3] 41 | 42 | # ================== Definition of phi [3, ] 43 | # \ 44 | # \ phi[1] = 2/3*pi 45 | # \ 46 | # ----------- phi[0] = 0 47 | # / 48 | # phi[2] = 4/3*pi / 49 | # / 50 | # ================== 51 | @property 52 | def phi(self): 53 | return np.array([0., 2.*pi/3., 4.*pi/3.]) 54 | 55 | # ================== Definition of oc [3, 3] 56 | # | oc1_x | oc2_x | oc3_x | 57 | # | oc1_y | oc2_y | oc3_y | 58 | # | oc1_z | oc2_z | oc3_z | 59 | # ================== 60 | @property 61 | def oc(self): 62 | oc = np.zeros([3, 3], dtype=np.float64) 63 | oc[0] = np.cos(self.phi) * self.r1 64 | oc[1] = np.sin(self.phi) * self.r1 65 | return oc 66 | 67 | # ================== Definition of cb [3, 3] 68 | # | cb1_x | cb2_x | cb3_x | 69 | # | cb1_y | cb2_y | cb3_y | 70 | # | cb1_z | cb2_z | cb3_z | 71 | # ================== 72 | @property 73 | def cb(self): 74 | cb = np.zeros([3, 3], dtype=np.float64) 75 | cb[0] = np.cos(self.phi) * np.cos(self.axis_values) * self.l1 76 | cb[1] = np.sin(self.phi) * np.cos(self.axis_values) * self.l1 77 | cb[2] = - np.sin(self.axis_values) * self.l1 78 | return cb 79 | 80 | # ================== Definition of ap [3, 3] 81 | # | ap1_x | ap2_x | ap3_x | 82 | # | ap1_y | ap2_y | ap3_y | 83 | # | ap1_z | ap2_z | ap3_z | 84 | # ================== 85 | @property 86 | def ap(self): 87 | ap = np.zeros([3, 3], dtype=np.float64) 88 | ap[0] = - np.cos(self.phi) * self.r2 89 | ap[1] = - np.sin(self.phi) * self.r2 90 | return ap 91 | 92 | # ================== bd = ap [3 ,3] 93 | @property 94 | def bd(self): 95 | return self.ap 96 | 97 | # ================== od [3 ,3] 98 | @property 99 | def od(self): 100 | return self.oc + self.cb + self.bd 101 | 102 | # ================== op (is_reachable, [3, 1]) 103 | @property 104 | def op(self): 105 | # solve for circle centroid 106 | od = self.od 107 | temp_p = np.ones([4, 4], dtype=np.float64) 108 | temp_p[1:4, 0:3] = od.T 109 | temp_a = np.zeros([3, 3]) 110 | temp_y = np.zeros([3, 1]) 111 | 112 | temp_a[0, 0] = np.linalg.det(np.delete(np.delete(temp_p, 0, axis=0), 0, axis=1)) 113 | temp_a[0, 1] = - np.linalg.det(np.delete(np.delete(temp_p, 0, axis=0), 1, axis=1)) 114 | temp_a[0, 2] = np.linalg.det(np.delete(np.delete(temp_p, 0, axis=0), 2, axis=1)) 115 | temp_y[0, 0] = - np.linalg.det(od) 116 | 117 | temp_a[1, 0] = 2 * (od[0, 1] - od[0, 0]) 118 | temp_a[1, 1] = 2 * (od[1, 1] - od[1, 0]) 119 | temp_a[1, 2] = 2 * (od[2, 1] - od[2, 0]) 120 | temp_y[1, 0] = np.linalg.norm(od[:, 0]) ** 2 - np.linalg.norm(od[:, 1]) ** 2 121 | 122 | temp_a[2, 0] = 2 * (od[0, 2] - od[0, 0]) 123 | temp_a[2, 1] = 2 * (od[1, 2] - od[1, 0]) 124 | temp_a[2, 2] = 2 * (od[2, 2] - od[2, 0]) 125 | temp_y[2, 0] = np.linalg.norm(od[:, 0]) ** 2 - np.linalg.norm(od[:, 2]) ** 2 126 | 127 | oe = - np.linalg.inv(temp_a).dot(temp_y) 128 | r = np.linalg.norm(oe - od[:, 0:1]) 129 | if r > self.l2: 130 | logging.error("Pose cannot be reached!") 131 | self.is_reachable_inverse = False 132 | return oe # False : not reachable 133 | else: 134 | vec = np.cross(od[:, 2]-od[:, 1], od[:, 2]-od[:, 0]) 135 | vec = vec / np.linalg.norm(vec) * np.sqrt(self.l2*self.l2 - r*r) 136 | op = oe + vec.reshape([3, 1]) 137 | self.is_reachable_inverse = True 138 | return op # True : reachable 139 | 140 | @property 141 | def end_frame(self): 142 | return Frame.from_r_3_3(np.eye(3, dtype=np.float64), self.op) 143 | 144 | def inverse(self, end): 145 | op = end.t_3_1.flatten() 146 | # solve a*sinx + b*cosx = c 147 | a = 2 * self.l1 * op[2] 148 | theta = np.zeros([3, ], dtype=np.float64) 149 | self.is_reachable_inverse = True 150 | for i in range(3): 151 | oa = op - self.ap[:, i] 152 | b = 2 * self.l1 * (np.cos(self.phi[i]) * (self.r1 * np.cos(self.phi[i]) - oa[0]) 153 | + np.sin(self.phi[i]) * (self.r1 * np.sin(self.phi[i]) - oa[1])) 154 | c = self.l2 * self.l2 - self.l1 * self.l1 - np.linalg.norm(oa) ** 2 \ 155 | - self.r1 * self.r1 + 2 * self.r1 * (np.cos(self.phi[i]) * oa[0] + np.sin(self.phi[i]) * oa[1]) 156 | if a*a + b*b > c*c: 157 | theta[i] = simplify_angle(atan2(c, -sqrt(a*a+b*b-c*c)) - atan2(b, a)) 158 | else: 159 | self.is_reachable_inverse = False 160 | break 161 | if not self.is_reachable_inverse: 162 | logging.error("Pose cannot be reached!") 163 | self.forward(theta) 164 | return theta 165 | 166 | def draw(self): 167 | self.ax.clear() 168 | self.plot_settings() 169 | # get point coordinates 170 | oc = self.oc # [3, 3] 171 | ob = oc + self.cb # [3, 3] 172 | op = self.op # [3, 1] 173 | oa = op - self.ap # [3, 3] bd = ap 174 | # plot top frame 175 | x, y, z = oc[0].tolist(), oc[1].tolist(), oc[2].tolist() 176 | x.append(x[0]) 177 | y.append(y[0]) 178 | z.append(z[0]) 179 | self.ax.plot_wireframe(x, y, np.array([z])) 180 | # plot bottom frame 181 | x, y, z = oa[0].tolist(), oa[1].tolist(), oa[2].tolist() 182 | x.append(x[0]) 183 | y.append(y[0]) 184 | z.append(z[0]) 185 | self.ax.plot_wireframe(x, y, np.array([z])) 186 | # plot three arms 187 | for i in range(3): 188 | x = [oc[0, i], ob[0, i], oa[0, i]] 189 | y = [oc[1, i], ob[1, i], oa[1, i]] 190 | z = [oc[2, i], ob[2, i], oa[2, i]] 191 | self.ax.plot_wireframe(x, y, np.array([z])) 192 | # plot two dots 193 | self.ax.scatter([0.], [0.], [0.], c="red", marker="o") 194 | self.ax.scatter(op[0], op[1], op[2], c="red", marker="o") 195 | -------------------------------------------------------------------------------- /visual_kinematics/RobotSerial.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from visual_kinematics.Robot import * 4 | from visual_kinematics.utility import simplify_angles 5 | import logging 6 | 7 | 8 | class RobotSerial(Robot): 9 | # ================== 10 | # dh_params [n, 4] 11 | # | d | a | alpha | theta | 12 | # | x | x | x | x | 13 | # | x | x | x | x | 14 | # | ... | ... | ... | ... | 15 | # params [n, 3] (without theta) initial_offset [n, ] 16 | # | d | a | alpha | | theta | 17 | # | x | x | x | | x | 18 | # | x | x | x | | x | 19 | # | ... | ... | ... | | ... | 20 | # ================== 21 | def __init__(self, dh_params, dh_type="normal", analytical_inv=None 22 | , plot_xlim=[-0.5, 0.5], plot_ylim=[-0.5, 0.5], plot_zlim=[0.0, 1.0] 23 | , ws_lim=None, ws_division=5 24 | , inv_m="jac_pinv", step_size=5e-1, max_iter=300, final_loss=1e-4): 25 | Robot.__init__(self, dh_params[:, 0:3], dh_params[:, 3], plot_xlim, plot_ylim, plot_zlim, ws_lim, ws_division) 26 | self.dh_type = dh_type # can be only normal or modified 27 | if dh_type != "normal" and dh_type != "modified": 28 | raise Exception("dh_type can only be \"normal\" or \"modified\"") 29 | self.analytical_inv = analytical_inv 30 | # inverse settings 31 | self.inv_m = inv_m 32 | if inv_m != "jac_t" and inv_m != "jac_pinv": 33 | raise Exception("Motion type can only be \"jac_t\" or \"jac_inv\"!") 34 | self.step_size = step_size 35 | self.max_iter = max_iter 36 | self.final_loss = final_loss 37 | 38 | @property 39 | def dh_params(self): 40 | return np.hstack((self.params, (self.axis_values + self.initial_offset).reshape([self.num_axis, 1]))) 41 | 42 | # transformation between axes 43 | @property 44 | def ts(self): 45 | dh = self.dh_params 46 | ts = [] 47 | for i in range(self.num_axis): 48 | if self.dh_type == "normal": 49 | ts.append(Frame.from_dh(dh[i])) 50 | else: 51 | ts.append(Frame.from_dh_modified(dh[i])) 52 | return ts 53 | 54 | # base to end transformation 55 | @property 56 | def axis_frames(self): 57 | ts = self.ts 58 | fs = [] 59 | f = Frame.i_4_4() 60 | for i in range(self.num_axis): 61 | f = f * ts[i] 62 | fs.append(f) 63 | return fs 64 | 65 | @property 66 | def end_frame(self): 67 | return self.axis_frames[-1] 68 | 69 | @property 70 | def jacobian(self): 71 | axis_fs = self.axis_frames 72 | jac = np.zeros([6, self.num_axis]) 73 | if self.dh_type == "normal": 74 | jac[0:3, 0] = np.cross(np.array([0., 0., 1.]), axis_fs[-1].t_3_1.reshape([3, ])) 75 | jac[3:6, 0] = np.array([0., 0., 1.]) 76 | for i in range(1, self.num_axis): 77 | jac[0:3, i] = np.cross(axis_fs[i-1].z_3_1.reshape([3, ]), (axis_fs[-1].t_3_1 - axis_fs[i-1].t_3_1).reshape([3, ])) 78 | jac[3:6, i] = axis_fs[i-1].z_3_1.reshape([3, ]) 79 | if self.dh_type == "modified": 80 | for i in range(0, self.num_axis): 81 | jac[0:3, i] = np.cross(axis_fs[i].z_3_1.reshape([3, ]), (axis_fs[-1].t_3_1 - axis_fs[i].t_3_1).reshape([3, ])) 82 | jac[3:6, i] = axis_fs[i].z_3_1.reshape([3, ]) 83 | return jac 84 | 85 | def inverse(self, end_frame): 86 | if self.analytical_inv is not None: 87 | return self.inverse_analytical(end_frame, self.analytical_inv) 88 | else: 89 | return self.inverse_numerical(end_frame) 90 | 91 | def inverse_analytical(self, end_frame, method): 92 | self.is_reachable_inverse, theta_x = method(self.dh_params, end_frame) 93 | self.forward(theta_x) 94 | return theta_x 95 | 96 | def inverse_numerical(self, end_frame): 97 | last_dx = np.zeros([6, 1]) 98 | for _ in range(self.max_iter): 99 | if self.inv_m == "jac_t": 100 | jac = self.jacobian.T 101 | else: 102 | jac = np.linalg.pinv(self.jacobian) 103 | end = self.end_frame 104 | dx = np.zeros([6, 1]) 105 | dx[0:3, 0] = (end_frame.t_3_1 - end.t_3_1).reshape([3, ]) 106 | diff = end.inv * end_frame 107 | dx[3:6, 0] = end.r_3_3.dot(diff.r_3.reshape([3, 1])).reshape([3, ]) 108 | if np.linalg.norm(dx, ord=2) < self.final_loss or np.linalg.norm(dx - last_dx, ord=2) < 0.1*self.final_loss: 109 | self.axis_values = simplify_angles(self.axis_values) 110 | self.is_reachable_inverse = True 111 | return self.axis_values 112 | dq = self.step_size * jac.dot(dx) 113 | self.forward(self.axis_values + dq.reshape([self.num_axis, ])) 114 | last_dx = dx 115 | logging.error("Pose cannot be reached!") 116 | self.is_reachable_inverse = False 117 | 118 | def draw(self): 119 | self.ax.clear() 120 | self.plot_settings() 121 | # plot the arm 122 | x, y, z = [0.], [0.], [0.] 123 | axis_frames = self.axis_frames 124 | for i in range(self.num_axis): 125 | x.append(axis_frames[i].t_3_1[0, 0]) 126 | y.append(axis_frames[i].t_3_1[1, 0]) 127 | z.append(axis_frames[i].t_3_1[2, 0]) 128 | self.ax.plot_wireframe(x, y, np.array([z])) 129 | self.ax.scatter(x[1:], y[1:], z[1:], c="red", marker="o") 130 | # plot axes using cylinders 131 | cy_radius = np.amax(self.params[:, 0:2]) * 0.05 132 | cy_len = cy_radius * 4. 133 | cy_div = 4 + 1 134 | theta = np.linspace(0, 2 * np.pi, cy_div) 135 | cx = np.array([cy_radius * np.cos(theta)]) 136 | cz = np.array([-0.5 * cy_len, 0.5 * cy_len]) 137 | cx, cz = np.meshgrid(cx, cz) 138 | cy = np.array([cy_radius * np.sin(theta)] * 2) 139 | points = np.zeros([3, cy_div * 2]) 140 | points[0] = cx.flatten() 141 | points[1] = cy.flatten() 142 | points[2] = cz.flatten() 143 | self.ax.plot_surface(points[0].reshape(2, cy_div), points[1].reshape(2, cy_div), points[2].reshape(2, cy_div), 144 | color="pink", rstride=1, cstride=1, linewidth=0, alpha=0.4) 145 | for i in range(self.num_axis-1): 146 | f = axis_frames[i] 147 | points_f = f.r_3_3.dot(points) + f.t_3_1 148 | self.ax.plot_surface(points_f[0].reshape(2, cy_div), points_f[1].reshape(2, cy_div), points_f[2].reshape(2, cy_div) 149 | , color="pink", rstride=1, cstride=1, linewidth=0, alpha=0.4) 150 | # plot the end frame 151 | f = axis_frames[-1].t_4_4 152 | self.ax.plot_wireframe(np.array([f[0, 3], f[0, 3] + 0.2 * f[0, 0]]), 153 | np.array([f[1, 3], f[1, 3] + 0.2 * f[1, 0]]), 154 | np.array([[f[2, 3], f[2, 3] + 0.2 * f[2, 0]]]), color="red") 155 | self.ax.plot_wireframe(np.array([f[0, 3], f[0, 3] + 0.2 * f[0, 1]]), 156 | np.array([f[1, 3], f[1, 3] + 0.2 * f[1, 1]]), 157 | np.array([[f[2, 3], f[2, 3] + 0.2 * f[2, 1]]]), color="green") 158 | self.ax.plot_wireframe(np.array([f[0, 3], f[0, 3] + 0.2 * f[0, 2]]), 159 | np.array([f[1, 3], f[1, 3] + 0.2 * f[1, 2]]), 160 | np.array([[f[2, 3], f[2, 3] + 0.2 * f[2, 2]]]), color="blue") 161 | -------------------------------------------------------------------------------- /visual_kinematics/RobotTrajectory.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from visual_kinematics.Robot import * 4 | from mpl_toolkits.mplot3d import Axes3D 5 | from matplotlib.widgets import Slider 6 | 7 | 8 | class RobotTrajectory(object): 9 | def __init__(self, robot, frames, time_points=None, rot_tran_ratio=2.0): 10 | self.robot = robot 11 | if len(frames) < 2: 12 | raise Exception("trajectory must include at least 2 frames") 13 | if time_points is not None and len(frames) != len(time_points): 14 | raise Exception("time_points should have same length as frames") 15 | self.frames = frames 16 | self.time_points = time_points 17 | # rot_tran_ratio 1.0 means 2*Pi rotation is treated as 1.0 meter in translation 18 | self.rot_tran_ratio = rot_tran_ratio 19 | 20 | def __len__(self): 21 | return len(self.frames) 22 | 23 | # length of each segments considering only translation 24 | @property 25 | def len_segs_tran(self): 26 | lengths = np.zeros([len(self) - 1, ], dtype=np.float64) 27 | for i in range(len(self) - 1): 28 | lengths[i] = self.frames[i].distance_to(self.frames[i + 1]) 29 | return lengths 30 | 31 | # length of each segments considering only rotation 32 | @property 33 | def len_segs_rot(self): 34 | lengths = np.zeros([len(self) - 1, ], dtype=np.float64) 35 | for i in range(len(self) - 1): 36 | lengths[i] = self.frames[i].angle_to(self.frames[i + 1]) 37 | return lengths 38 | 39 | # length of each segments considering both rotation and translation 40 | @property 41 | def len_segs(self): 42 | return self.len_segs_rot * self.rot_tran_ratio / 2. / np.pi + self.len_segs_tran 43 | 44 | def interpolate(self, num_segs, motion="p2p", method="linear"): 45 | # !!! equal division, linear interpolation 46 | if self.time_points is None: 47 | lengths = self.len_segs 48 | else: 49 | lengths = self.time_points[1:] - self.time_points[:len(self)-1] 50 | length_total = np.sum(lengths) 51 | 52 | # axis angles for p2p, xyzabc for lin 53 | tra_array = np.zeros([len(self), max(self.robot.num_axis, 6)]) 54 | for i in range(len(self)): 55 | if motion == "p2p": 56 | self.robot.inverse(self.frames[i]) 57 | tra_array[i, 0:self.robot.num_axis] = self.robot.axis_values 58 | if motion == "lin": 59 | tra_array[i, 0:3] = np.array(self.frames[i].t_3_1.reshape([3, ])) 60 | tra_array[i, 3:6] = np.array(self.frames[i].euler_3) 61 | 62 | # interpolation values 63 | inter_values = np.zeros([num_segs + 1, self.robot.num_axis]) 64 | inter_time_points = np.zeros([num_segs + 1]) 65 | for progress in range(num_segs + 1): 66 | index = 0 67 | p_temp = progress * length_total / num_segs 68 | for i in range(lengths.shape[0]): 69 | if p_temp - lengths[i] > 1e-5: # prevent numerical error 70 | p_temp -= lengths[i] 71 | index += 1 72 | else: 73 | break 74 | p_temp /= lengths[index] # the percentage of the segment, in range [0., 1.] 75 | if motion == "p2p": 76 | inter_values[progress] = tra_array[index, 0:self.robot.num_axis] * (1 - p_temp) + tra_array[index + 1, 77 | 0:self.robot.num_axis] * p_temp 78 | if motion == "lin": 79 | xyzabc = tra_array[index, 0:6] * (1 - p_temp) + tra_array[index + 1, 0:6] * p_temp 80 | self.robot.inverse(Frame.from_euler_3(xyzabc[3:6], xyzabc[0:3].reshape([3, 1]))) 81 | inter_values[progress] = self.robot.axis_values 82 | inter_time_points[progress] = np.sum(lengths[0:index]) + lengths[index] * p_temp 83 | return inter_values, inter_time_points 84 | 85 | def show(self, num_segs=100, motion="p2p", method="linear"): 86 | # setup slider 87 | axamp = plt.axes([0.15, .06, 0.75, 0.02]) 88 | samp = Slider(axamp, "progress", 0., 1., valinit=0) 89 | # interpolation values 90 | inter_values, inter_time_points = self.interpolate(num_segs, motion, method) 91 | # save point for drawing trajectory 92 | x, y, z = [], [], [] 93 | for i in range(num_segs + 1): 94 | self.robot.forward(inter_values[i]) 95 | x.append(self.robot.end_frame.t_3_1[0, 0]) 96 | y.append(self.robot.end_frame.t_3_1[1, 0]) 97 | z.append(self.robot.end_frame.t_3_1[2, 0]) 98 | 99 | def update(val): 100 | self.robot.forward(inter_values[int(np.floor(samp.val * num_segs))]) 101 | self.robot.draw() 102 | # plot trajectory 103 | self.robot.ax.plot_wireframe(x, y, np.array([z]), color="lightblue") 104 | self.robot.figure.canvas.draw_idle() 105 | 106 | samp.on_changed(update) 107 | # plot initial 108 | self.robot.forward(inter_values[0]) 109 | self.robot.draw() 110 | self.robot.ax.plot_wireframe(x, y, np.array([z]), color="lightblue") 111 | plt.show() 112 | return inter_values, inter_time_points 113 | -------------------------------------------------------------------------------- /visual_kinematics/__init__.py: -------------------------------------------------------------------------------- 1 | from visual_kinematics.Frame import Frame 2 | from visual_kinematics.Robot import Robot 3 | -------------------------------------------------------------------------------- /visual_kinematics/utility.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from math import pi 3 | 4 | 5 | # ================== constrain angle between -pi and pi 6 | def simplify_angle(angle): 7 | while angle > pi: 8 | angle -= 2 * pi 9 | while angle < -pi: 10 | angle += 2 * pi 11 | return angle 12 | 13 | 14 | # ================== constrain angles[n, ] between -pi and pi 15 | def simplify_angles(angles): 16 | for i in range(angles.shape[0]): 17 | angles[i] = simplify_angle(angles[i]) 18 | return angles 19 | --------------------------------------------------------------------------------