├── .gitignore
├── README.md
├── environment.py
├── images
├── screenshot_1.png
└── screenshot_2.png
├── lab_env.cpp
└── xmls
├── lab_env.xml
├── light_env.xml
└── meshes
├── base.stl
├── forearm.stl
├── kinova_robotiq_coupler.stl
├── robotiq_85_base_link.stl
├── robotiq_85_finger_link.stl
├── robotiq_85_finger_tip_link.stl
├── robotiq_85_inner_knuckle_link.stl
├── robotiq_85_knuckle_link.stl
├── shoulder.stl
├── upperarm.stl
├── wrist1.stl
├── wrist2.stl
└── wrist3.stl
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Robotiq-UR5
2 | Simulator of UR5 robotic arm with Roboriq gripper, built with MuJoJo
3 |
4 | ## Requirements
5 | - [MuJoCo 1.5.0](http://www.mujoco.org/)
6 | - [mujoco-py](https://github.com/openai/mujoco-py)
7 |
8 | ## Usage
9 | - With Python3, `from environment import lab_env`
10 | - With C++, copy [lab_env.cpp](./lab_env.cpp) into `mjpro150/sample/` and build it with
11 | `g++ -O2 -I../include -L../bin -std=c++11 -mavx lab_env.cpp -lmujoco150 -lGL -lglew ../bin/libglfw.so.3 -o ../bin/lab_env`
12 |
13 | ## Demos
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/environment.py:
--------------------------------------------------------------------------------
1 | from __future__ import division
2 | from mujoco_py import load_model_from_path, MjSim
3 | import random
4 | import math
5 | import time
6 | import os
7 | import numpy as np
8 | from scipy.misc import imsave
9 | import matplotlib as mpl
10 | from scipy.misc.pilutil import imshow
11 | import logging
12 | import copy
13 | import gym
14 | import cv2
15 |
16 | initial_pos = [0.1, -50 / 180 * math.pi, 61.03 / 180 * math.pi,
17 | 0, 0, 0, # robotic arm
18 | 0, 0, 0, 0,
19 | 0, 0, 0, 0, # two fingers
20 | -0.62, 0.32, 0, math.cos(math.pi * 0.16), 0, 0, math.sin(math.pi * 0.16),
21 | -0.72, 0.38, 0, math.cos(math.pi * 0.18), 0, 0, math.sin(math.pi * 0.18),
22 | -0.835, 0.425, 0, math.cos(math.pi * 0.195), 0, 0, math.sin(math.pi * 0.195),
23 | -0.935, 0.46, 0, math.cos(math.pi * 0.23), 0, 0, math.sin(math.pi * 0.23)] # 4 cubes
24 | joint_pos = [[29.70 / 180 * math.pi, -85 / 180 * math.pi, 115 / 180 * math.pi], # yellow
25 | [31.00 / 180 * math.pi, -78 / 180 * math.pi, 105 / 180 * math.pi],
26 | [30.55 / 180 * math.pi, -70 / 180 * math.pi, 99 / 180 * math.pi],
27 | [20 / 180 * math.pi, -45 / 180 * math.pi, 60 / 180 * math.pi]]
28 | closed_pos = [1.12810781, -0.59798289, -0.53003607]
29 | fig_size_1 = (214, 214) # For the workbench camera
30 | fig_size_2 = (214, 214) # For the upper camera
31 | gaussian_noise_parameters = (20, 20)
32 | gaussian_blur_prarmeters = ((5, 5), 1.5)
33 | safety_threshold = 0.01 # Used to determine the stablity of current joint positions
34 | grasp_steps = 120 # The minimum steps in simulator for the gripper to close is 120
35 | drop_steps = 60 # It takes roughly 12 steps to reach ground from a 0.3m high position, extra iterations are for the convergance of final postion
36 | sensor_threshold = 2.0 # Used to judge whether a cube is firmly grasped
37 | action_scale = 30
38 |
39 | class vector():
40 | # 3D vector class
41 | def __init__(self, x=0, y=0, z=1):
42 | self.x, self.y, self.z = x, y, z
43 |
44 | def add(self, v):
45 | return vector(self.x + v.x, self.y + v.y, self.z + v.z)
46 |
47 | def dot(self, v):
48 | return self.x * v.x + self.y * v.y + self.z * v.z
49 |
50 | def mul_vec(self, v):
51 | return vector(self.y*v.z-self.z*v.y, self.z*v.x-self.x*v.z, self.x*v.y-self.y*v.x)
52 |
53 | def mul_num(self, s):
54 | return vector(self.x * s, self.y * s, self.z * s)
55 |
56 | class quaternion():
57 | # Quaternion class used for 3D rotation
58 | def __init__(self, w=0, v=vector(0,0,1)):
59 | self.v = v
60 | self.w = w
61 |
62 | def rev(self):
63 | N = math.sqrt(self.w*self.w + self.v.x*self.v.x + self.v.y*self.v.y + self.v.z*self.v.z)
64 | return quaternion(self.w/N, vector(-self.v.x/N, -self.v.y/N, -self.v.z/N))
65 |
66 | def mul(self, q):
67 | res_v = self.v.mul_vec(q.v).add(q.v.mul_num(self.w)).add(self.v.mul_num(q.w))
68 | res_w = self.w*q.w - self.v.dot(q.v)
69 | return quaternion(res_w, res_v)
70 |
71 | class lab_env():
72 | def __init__(self, env, args):
73 | #super(lab_env, self).__init__(env)
74 | # The real-world simulator
75 | self.model = load_model_from_path('xmls/lab_env.xml')
76 | self.sim = MjSim(self.model)
77 | # Used to locate the gripper
78 | self.model2 = load_model_from_path('xmls/light_env.xml')
79 | self.sim2 = MjSim(self.model2)
80 |
81 | def reset(self, task_id):
82 | self.task = task_id
83 | self.grasping = -1
84 | self.last_grasp = -1
85 | # Configure gravity
86 | for i in range(4):
87 | self.sim.data.ctrl[i] = -1
88 | # Configure joint positions
89 | for i in range(42):
90 | self.sim.data.qpos[i] = initial_pos[i]
91 | for i in range(3):
92 | self.sim.data.qpos[i] = joint_pos[task_id][i]
93 | self.pos_forward()
94 | self.sim.forward()
95 |
96 | remapped_pos = [remap(self.sim.data.qpos[0], -30 / 180 * math.pi, 45 / 180 * math.pi, -1, 1),
97 | remap(self.sim.data.qpos[1], -105 / 180 * math.pi, -50 / 180 * math.pi, -1, 1),
98 | remap(self.sim.data.qpos[2], 0 / 180 * math.pi, 180 / 180 * math.pi, -1, 1), 0]
99 |
100 | return (remapped_pos,) + self.get_state()
101 |
102 | def step(self, action):
103 | self.sim.data.qpos[0] = remap(action[0], -1, 1, -30 / 180 * math.pi, 45 / 180 * math.pi)
104 | self.sim.data.qpos[1] = remap(action[1], -1, 1, -105 / 180 * math.pi, -50 / 180 * math.pi)
105 | self.sim.data.qpos[2] = remap(action[2], -1, 1, 0 / 180 * math.pi, 180 / 180 * math.pi)
106 |
107 | self.pos_forward()
108 | self.sim.forward()
109 |
110 | if action[3] < self.last_grasp or self.grasping == -1:
111 | t = int(remap(action[3], -1, 1, 0, grasp_steps))
112 | for i in range(6, 14):
113 | self.sim.data.qpos[i] = 0
114 | self.sim.forward()
115 | self.grasping = -1
116 | self.sim.data.ctrl[4] = 1
117 | self.sim.data.ctrl[5] = 1
118 | backup = [self.sim.data.qpos[i] for i in [15, 16, 22, 23, 29, 30, 36, 37]]
119 |
120 | for i in range(t):
121 | self.sim.step()
122 | stop = False
123 | for j in range(4):
124 | if self.sim.data.sensordata[j] > sensor_threshold:
125 | self.grasping = j
126 | self.pickuppos = [self.sim.data.qpos[i] for i in (list(range(6)) + list(range(14 + 7 * self.grasping, 21 + 7 * self.grasping)))]
127 | stop = True
128 | break
129 | for i in range(4):
130 | for j in range(2):
131 | self.sim.data.qpos[15 + 7 * i + j] = backup[i * 2 + j]
132 | if stop:
133 | break
134 | self.gripper_sync()
135 | self.sim.forward()
136 |
137 | self.sim.data.ctrl[4] = 0
138 | self.sim.data.ctrl[5] = 0
139 |
140 | self.last_grasp = action[3]
141 |
142 | return self.get_state()
143 |
144 | # Ensure that the gripper is always heading down and is parallar to the desk edge
145 | def pos_forward(self):
146 | self.sim.data.qpos[3] = math.pi * 1.5 - self.sim.data.qpos[1] - self.sim.data.qpos[2]
147 | self.sim.data.qpos[4] = math.pi * 1.5
148 | self.sim.data.qpos[5] = math.pi * 1.25 + self.sim.data.qpos[0]
149 | self.gripper_sync()
150 |
151 | if self.grasping != -1:
152 | current_xyz = pos_to_xyz(self.sim.data.qpos[0: 6])
153 | old_xyz = pos_to_xyz(self.pickuppos[0: 6])
154 | for i in range(3):
155 | self.sim.data.qpos[14 + 7 * self.grasping + i] = self.pickuppos[6 + i] + current_xyz[i] - old_xyz[i]
156 | '''
157 | old_quat = quaternion(self.pickuppos[9], vector(self.pickuppos[10], self.pickuppos[11], self.pickuppos[12]))
158 | rotate_quat = quaternion(math.cos(self.sim.data.qpos[0] - self.pickuppos[0]), vector(0, 0, math.sin(self.sim.data.qpos[0] - self.pickuppos[0])))
159 | new_quat = rotate_quat.mul(old_quat)
160 | self.sim.data.qpos[17 + 7 * self.grasping] = new_quat.w
161 | self.sim.data.qpos[18 + 7 * self.grasping] = new_quat.v.x
162 | self.sim.data.qpos[19 + 7 * self.grasping] = new_quat.v.y
163 | self.sim.data.qpos[20 + 7 * self.grasping] = new_quat.v.z
164 | '''
165 |
166 | def gripper_sync(self):
167 | self.sim.data.qpos[9] = gripper_consistent(self.sim.data.qpos[6: 9])
168 | self.sim.data.qpos[13] = gripper_consistent(self.sim.data.qpos[10: 13])
169 |
170 | def get_state(self):
171 | sync(self.sim, self.sim2, 6)
172 | # Locate the gripper, render twice to overcome bugs in mujoco
173 | image_1 = copy.deepcopy(self.sim.render(width = 960, height = 720, camera_name = 'workbench_camera'))
174 | image_1 = copy.deepcopy(self.sim.render(width = 960, height = 720, camera_name = 'workbench_camera'))
175 | image_2 = copy.deepcopy(self.sim.render(width = 960, height = 720, camera_name = 'upper_camera'))
176 | image_2 = copy.deepcopy(self.sim.render(width = 960, height = 720, camera_name = 'upper_camera'))
177 | image_3 = copy.deepcopy(self.sim2.render(width = 960, height = 720, camera_name = 'workbench_camera'))
178 | image_3 = copy.deepcopy(self.sim2.render(width = 960, height = 720, camera_name = 'workbench_camera'))
179 | x1, y1 = get_x_y(image_3)
180 | image_4 = copy.deepcopy(self.sim2.render(width = 960, height = 720, camera_name = 'upper_camera'))
181 | image_4 = copy.deepcopy(self.sim2.render(width = 960, height = 720, camera_name = 'upper_camera'))
182 | x2, y2 = get_x_y(image_4)
183 | # Crop gripper images and add noise
184 | image_1 = cv2.GaussianBlur(gaussian_noise(crop(image_1, x1 + fig_size_1[0] // 2, y1, *fig_size_1), *gaussian_noise_parameters), *gaussian_blur_prarmeters).transpose((2, 0, 1))
185 | image_2 = cv2.GaussianBlur(gaussian_noise(crop(image_2, x2 + fig_size_2[0] // 2, y2, *fig_size_2), *gaussian_noise_parameters), *gaussian_blur_prarmeters).transpose((2, 0, 1))
186 |
187 | danger = int(self.safety_check() or math.isnan(x1) or math.isnan(y1) or math.isnan(x2) or math.isnan(y2))
188 | return [x1, y1, x2, y2, int(self.grasping == self.task), danger], (image_1, image_2)
189 |
190 | def safety_check(self):
191 | # return 0 if safe, otherwise 1
192 | backup = [self.sim.data.qpos[i] for i in range(14)]
193 | self.sim.step()
194 | s = 0
195 | for i in range(6):
196 | s += abs(backup[i] - self.sim.data.qpos[i])
197 | self.sim.data.qpos[i] = backup[i]
198 | return s > safety_threshold
199 |
200 | def force_close(self):
201 | for i in range(2):
202 | for j in range(3):
203 | self.sim.data.qpos[6 + i * 4 + j] = closed_pos[j]
204 | self.gripper_sync()
205 | self.sim.forward()
206 |
207 | def get_x_y(fig):
208 | gray_fig = fig.sum(axis = 2)
209 | x, y = np.where(gray_fig > 0)
210 | x_mean = x.mean() if x.shape[0] > 0 else math.nan
211 | y_mean = y.mean() if y.shape[0] > 0 else math.nan
212 | return x_mean, y_mean
213 |
214 | def pos_to_xyz(pos):
215 | x = 0.425 * math.cos(pos[1]) + 0.39225 * math.cos(pos[1] + pos[2]) - 0.09465 * math.sin(pos[1] + pos[2] + pos[3])\
216 | + 0.0823 * math.cos(pos[1] + pos[2] + pos[3]) * math.sin(pos[4])
217 | y = 0.10915 + 0.0823 * math.cos(pos[4])
218 | c, s = math.cos(pos[0] + 0.75 * math.pi), math.sin(pos[0] + 0.75 * math.pi)
219 | z = 0.089159 - 0.425 * math.sin(pos[1]) - 0.39225 * math.sin(pos[1] + pos[2]) - 0.09465 * math.cos(pos[1] + pos[2] + pos[3])\
220 | - 0.0823 * math.sin(pos[1] + pos[2] + pos[3]) * math.sin(pos[4])
221 | x, y = x * c - y * s, x * s + y * c
222 | return x, y, z
223 |
224 | def crop(fig, x, y, height, width):
225 | Height, Width, _ = fig.shape
226 | x = constrain(x, height // 2, Height - height // 2)
227 | y = constrain(y, width // 2, Width - width // 2)
228 | return fig[x - height // 2: x + height // 2, y - width // 2: y + width // 2, :]
229 |
230 | def constrain(x, lower_bound, upper_bound):
231 | x = (upper_bound + lower_bound) / 2 if math.isnan(x) else x
232 | x = upper_bound if x > upper_bound else x
233 | x = lower_bound if x < lower_bound else x
234 | return int(round(x))
235 |
236 | def gaussian_noise(fig, mu, sigma):
237 | noise = mu + np.random.randn(*fig.shape) * sigma
238 | target = fig + noise
239 | target[target < 0] = 0
240 | target[target > 255] = 255
241 | return target
242 |
243 | # Synchronize sim2 with sim1
244 | def sync(sim1, sim2, length):
245 | for i in range(length):
246 | sim2.data.qpos[i] = sim1.data.qpos[i]
247 | sim2.forward()
248 |
249 | def gripper_consistent(angle):
250 | x = -0.006496 + 0.0315 * math.sin(angle[0]) + 0.04787744772 * math.cos(angle[0] + angle[1] - 0.1256503306) - 0.02114828598 * math.sin(angle[0] + angle[1] + angle[2] - 0.1184899592)
251 | y = -0.0186011 - 0.0315 * math.cos(angle[0]) + 0.04787744772 * math.sin(angle[0] + angle[1] - 0.1256503306) + 0.02114828598 * math.cos(angle[0] + angle[1] + angle[2] - 0.1184899592)
252 | return math.atan2(y, x) + 0.6789024115
253 |
254 | def create_env(env_id, args):
255 | env = env_id #gym.make(env_id)
256 | if env == 'Goal_LfD':
257 | env = lab_env(env, args)
258 | return env
259 |
260 | def remap(x, lb, ub, LB, UB):
261 | return (x - lb) / (ub - lb) * (UB - LB) + LB
262 |
263 | if __name__ == '__main__':
264 | env = create_env('Goal_LfD', None)
265 | angles, state, images = env.reset(0)
266 | print(state)
267 | state, images = env.step([0.2, -0.2, 0.4, -1])
268 | #imshow(images[0])
269 | #imshow(images[1])
270 | fig1 = env.sim.render(width = 960, height = 720, camera_name = 'workbench_camera')
271 | fig1 = env.sim.render(width = 960, height = 720, camera_name = 'workbench_camera')
272 | #imshow(fig1)
273 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
274 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
275 | imshow(fig2)
276 | state, images = env.step([0.2, -0.2, 0.4, 1])
277 | print(state)
278 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
279 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
280 | imshow(fig2)
281 | state, images = env.step([0.2, 0, -0.2, 1])
282 | print(state)
283 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
284 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
285 | imshow(fig2)
286 | state, images = env.step([0.2, 0, -0.2, 0.9])
287 | print(state)
288 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
289 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
290 | imshow(fig2)
291 |
292 | '''
293 | video = cv2.VideoWriter("sample.avi", cv2.VideoWriter_fourcc('M','J','P','G'), 10.0, (1920, 720), True)
294 | for i in range(20):
295 | env.step([0, 0, 0, 0])
296 | fig1 = env.sim.render(width = 960, height = 720, camera_name = 'workbench_camera')
297 | fig1 = env.sim.render(width = 960, height = 720, camera_name = 'workbench_camera')
298 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
299 | fig2 = env.sim.render(width = 960, height = 720, camera_name = 'upper_camera')
300 | video.write(cv2.cvtColor(np.concatenate((fig1, fig2), axis=1), cv2.COLOR_BGR2RGB))
301 | video.release()
302 | '''
--------------------------------------------------------------------------------
/images/screenshot_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/images/screenshot_1.png
--------------------------------------------------------------------------------
/images/screenshot_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/images/screenshot_2.png
--------------------------------------------------------------------------------
/lab_env.cpp:
--------------------------------------------------------------------------------
1 | #include "mujoco.h"
2 | #include "glfw3.h"
3 | #include "stdio.h"
4 | #include "string.h"
5 | #include "math.h"
6 |
7 | //-------------------------------- global variables -------------------------------------
8 |
9 | double posY = 0.49;
10 | double posZ = 0.38729897226005433;
11 | int leftRight = 0;
12 | int upDown = 0;
13 | double pi = 3.141592653;
14 |
15 | // model
16 | mjModel* m = 0;
17 | mjData* d = 0;
18 | char lastfile[1000] = "";
19 |
20 | // user state
21 | bool paused = false;
22 | bool showoption = false;
23 | bool showinfo = true;
24 | bool showfullscreen = false;
25 | bool slowmotion = true;
26 | bool showdepth = false;
27 | bool showsensor = false;
28 | bool showprofiler = false;
29 | int showhelp = 2; // 0: none; 1: brief; 2: full
30 | int fontscale = mjFONTSCALE_150; // can be 100, 150, 200
31 | int keyreset = -1; // non-negative: reset to keyframe
32 |
33 | // abstract visualization
34 | mjvScene scn;
35 | mjvCamera cam;
36 | mjvOption vopt;
37 | mjvPerturb pert;
38 | mjvFigure figconstraint;
39 | mjvFigure figcost;
40 | mjvFigure figtimer;
41 | mjvFigure figsize;
42 | mjvFigure figsensor;
43 | char status[1000] = "";
44 |
45 | // OpenGL rendering
46 | int refreshrate;
47 | mjrContext con;
48 | float depth_buffer[5120*2880]; // big enough for 5K screen
49 | unsigned char depth_rgb[1280*720*3]; // 1/4th of screen
50 |
51 | // selection and perturbation
52 | bool button_left = false;
53 | bool button_middle = false;
54 | bool button_right = false;
55 | double lastx = 0;
56 | double lasty = 0;
57 | double window2buffer = 1; // framebuffersize / windowsize (for scaled video modes)
58 |
59 | // help strings
60 | const char help_title[] =
61 | "Joint 1\n"
62 | "Joint 2\n"
63 | "Joint 3\n"
64 | "Joint 4\n"
65 | "Joint 5\n"
66 | "Joint 6\n"
67 | "Gripper\n"
68 | "Camera";
69 |
70 |
71 | const char help_content[] =
72 | "Z, A\n"
73 | "X, S\n"
74 | "C, D\n"
75 | "V, F\n"
76 | "B, G\n"
77 | "N, H\n"
78 | "M, J\n"
79 | "[, ]";
80 |
81 | char opt_title[1000] = "";
82 | char opt_content[1000];
83 |
84 | void init(void);
85 | //-------------------------------- profiler and sensor ----------------------------------
86 |
87 | // init profiler
88 | void profilerinit(void)
89 | {
90 | int i, n;
91 |
92 | // set figures to default
93 | mjv_defaultFigure(&figconstraint);
94 | mjv_defaultFigure(&figcost);
95 | mjv_defaultFigure(&figtimer);
96 | mjv_defaultFigure(&figsize);
97 |
98 | // titles
99 | strcpy(figconstraint.title, "Counts");
100 | strcpy(figcost.title, "Convergence (log 10)");
101 | strcpy(figsize.title, "Dimensions");
102 | strcpy(figtimer.title, "CPU time (msec)");
103 |
104 | // x-labels
105 | strcpy(figconstraint.xlabel, "Solver iteration");
106 | strcpy(figcost.xlabel, "Solver iteration");
107 | strcpy(figsize.xlabel, "Video frame");
108 | strcpy(figtimer.xlabel, "Video frame");
109 |
110 | // y-tick nubmer formats
111 | strcpy(figconstraint.yformat, "%.0f");
112 | strcpy(figcost.yformat, "%.1f");
113 | strcpy(figsize.yformat, "%.0f");
114 | strcpy(figtimer.yformat, "%.2f");
115 |
116 | // colors
117 | figconstraint.figurergba[0] = 0.1f;
118 | figcost.figurergba[2] = 0.2f;
119 | figsize.figurergba[0] = 0.1f;
120 | figtimer.figurergba[2] = 0.2f;
121 |
122 | // legends
123 | strcpy(figconstraint.linename[0], "total");
124 | strcpy(figconstraint.linename[1], "active");
125 | strcpy(figconstraint.linename[2], "changed");
126 | strcpy(figconstraint.linename[3], "evals");
127 | strcpy(figconstraint.linename[4], "updates");
128 | strcpy(figcost.linename[0], "improvement");
129 | strcpy(figcost.linename[1], "gradient");
130 | strcpy(figcost.linename[2], "lineslope");
131 | strcpy(figsize.linename[0], "dof");
132 | strcpy(figsize.linename[1], "body");
133 | strcpy(figsize.linename[2], "constraint");
134 | strcpy(figsize.linename[3], "sqrt(nnz)");
135 | strcpy(figsize.linename[4], "contact");
136 | strcpy(figsize.linename[5], "iteration");
137 | strcpy(figtimer.linename[0], "total");
138 | strcpy(figtimer.linename[1], "collision");
139 | strcpy(figtimer.linename[2], "prepare");
140 | strcpy(figtimer.linename[3], "solve");
141 | strcpy(figtimer.linename[4], "other");
142 |
143 | // grid sizes
144 | figconstraint.gridsize[0] = 5;
145 | figconstraint.gridsize[1] = 5;
146 | figcost.gridsize[0] = 5;
147 | figcost.gridsize[1] = 5;
148 | figsize.gridsize[0] = 3;
149 | figsize.gridsize[1] = 5;
150 | figtimer.gridsize[0] = 3;
151 | figtimer.gridsize[1] = 5;
152 |
153 | // minimum ranges
154 | figconstraint.range[0][0] = 0;
155 | figconstraint.range[0][1] = 20;
156 | figconstraint.range[1][0] = 0;
157 | figconstraint.range[1][1] = 80;
158 | figcost.range[0][0] = 0;
159 | figcost.range[0][1] = 20;
160 | figcost.range[1][0] = -15;
161 | figcost.range[1][1] = 5;
162 | figsize.range[0][0] = -200;
163 | figsize.range[0][1] = 0;
164 | figsize.range[1][0] = 0;
165 | figsize.range[1][1] = 100;
166 | figtimer.range[0][0] = -200;
167 | figtimer.range[0][1] = 0;
168 | figtimer.range[1][0] = 0;
169 | figtimer.range[1][1] = 0.4f;
170 |
171 | // init x axis on history figures (do not show yet)
172 | for( n=0; n<6; n++ )
173 | for( i=0; isolver_iter, mjNSOLVER), mjMAXLINEPNT);
188 | for( i=1; i<5; i++ )
189 | figconstraint.linepnt[i] = figconstraint.linepnt[0];
190 | if( m->opt.solver==mjSOL_PGS )
191 | {
192 | figconstraint.linepnt[3] = 0;
193 | figconstraint.linepnt[4] = 0;
194 | }
195 | if( m->opt.solver==mjSOL_CG )
196 | figconstraint.linepnt[4] = 0;
197 | for( i=0; inefc;
208 | figconstraint.linedata[1][2*i+1] = (float)d->solver[i].nactive;
209 | figconstraint.linedata[2][2*i+1] = (float)d->solver[i].nchange;
210 | figconstraint.linedata[3][2*i+1] = (float)d->solver[i].neval;
211 | figconstraint.linedata[4][2*i+1] = (float)d->solver[i].nupdate;
212 | }
213 |
214 | // update cost figure
215 | figcost.linepnt[0] = mjMIN(mjMIN(d->solver_iter, mjNSOLVER), mjMAXLINEPNT);
216 | for( i=1; i<3; i++ )
217 | figcost.linepnt[i] = figcost.linepnt[0];
218 | if( m->opt.solver==mjSOL_PGS )
219 | {
220 | figcost.linepnt[1] = 0;
221 | figcost.linepnt[2] = 0;
222 | }
223 |
224 | for( i=0; isolver[i].improvement));
233 | figcost.linedata[1][2*i+1] = (float)mju_log10(mju_max(mjMINVAL, d->solver[i].gradient));
234 | figcost.linedata[2][2*i+1] = (float)mju_log10(mju_max(mjMINVAL, d->solver[i].lineslope));
235 | }
236 |
237 | // get timers: total, collision, prepare, solve, other
238 | int itotal = (d->timer[mjTIMER_STEP].duration > d->timer[mjTIMER_FORWARD].duration ?
239 | mjTIMER_STEP : mjTIMER_FORWARD);
240 | float tdata[5] = {
241 | (float)(d->timer[itotal].duration/mjMAX(1,d->timer[itotal].number)),
242 | (float)(d->timer[mjTIMER_POS_COLLISION].duration/mjMAX(1,d->timer[mjTIMER_POS_COLLISION].number)),
243 | (float)(d->timer[mjTIMER_POS_MAKE].duration/mjMAX(1,d->timer[mjTIMER_POS_MAKE].number)) +
244 | (float)(d->timer[mjTIMER_POS_PROJECT].duration/mjMAX(1,d->timer[mjTIMER_POS_PROJECT].number)),
245 | (float)(d->timer[mjTIMER_CONSTRAINT].duration/mjMAX(1,d->timer[mjTIMER_CONSTRAINT].number)),
246 | 0
247 | };
248 | tdata[4] = tdata[0] - tdata[1] - tdata[2] - tdata[3];
249 |
250 | // update figtimer
251 | int pnt = mjMIN(201, figtimer.linepnt[0]+1);
252 | for( n=0; n<5; n++ )
253 | {
254 | // shift data
255 | for( i=pnt-1; i>0; i-- )
256 | figtimer.linedata[n][2*i+1] = figtimer.linedata[n][2*i-1];
257 |
258 | // assign new
259 | figtimer.linepnt[n] = pnt;
260 | figtimer.linedata[n][1] = tdata[n];
261 | }
262 |
263 | // get sizes: nv, nbody, nefc, sqrt(nnz), ncont, iter
264 | float sdata[6] = {
265 | (float)m->nv,
266 | (float)m->nbody,
267 | (float)d->nefc,
268 | (float)mju_sqrt((mjtNum)d->solver_nnz),
269 | (float)d->ncon,
270 | (float)d->solver_iter
271 | };
272 |
273 | // update figsize
274 | pnt = mjMIN(201, figsize.linepnt[0]+1);
275 | for( n=0; n<6; n++ )
276 | {
277 | // shift data
278 | for( i=pnt-1; i>0; i-- )
279 | figsize.linedata[n][2*i+1] = figsize.linedata[n][2*i-1];
280 |
281 | // assign new
282 | figsize.linepnt[n] = pnt;
283 | figsize.linedata[n][1] = sdata[n];
284 | }
285 | }
286 |
287 |
288 |
289 | // show profiler
290 | void profilershow(mjrRect rect)
291 | {
292 | mjrRect viewport = {rect.width - rect.width/5, rect.bottom, rect.width/5, rect.height/4};
293 | mjr_figure(viewport, &figtimer, &con);
294 | viewport.bottom += rect.height/4;
295 | mjr_figure(viewport, &figsize, &con);
296 | viewport.bottom += rect.height/4;
297 | mjr_figure(viewport, &figcost, &con);
298 | viewport.bottom += rect.height/4;
299 | mjr_figure(viewport, &figconstraint, &con);
300 | }
301 |
302 |
303 |
304 | // init sensor figure
305 | void sensorinit(void)
306 | {
307 | // set figure to default
308 | mjv_defaultFigure(&figsensor);
309 |
310 | // set flags
311 | figsensor.flg_extend = 1;
312 | figsensor.flg_barplot = 1;
313 |
314 | // title
315 | strcpy(figsensor.title, "Sensor data");
316 |
317 | // y-tick nubmer format
318 | strcpy(figsensor.yformat, "%.0f");
319 |
320 | // grid size
321 | figsensor.gridsize[0] = 2;
322 | figsensor.gridsize[1] = 3;
323 |
324 | // minimum range
325 | figsensor.range[0][0] = 0;
326 | figsensor.range[0][1] = 0;
327 | figsensor.range[1][0] = -1;
328 | figsensor.range[1][1] = 1;
329 | }
330 |
331 |
332 | // update sensor figure
333 | void sensorupdate(void)
334 | {
335 | static const int maxline = 10;
336 |
337 | // clear linepnt
338 | for( int i=0; insensor; n++ )
346 | {
347 | // go to next line if type is different
348 | if( n>0 && m->sensor_type[n]!=m->sensor_type[n-1] )
349 | lineid = mjMIN(lineid+1, maxline-1);
350 |
351 | // get info about this sensor
352 | mjtNum cutoff = (m->sensor_cutoff[n]>0 ? m->sensor_cutoff[n] : 1);
353 | int adr = m->sensor_adr[n];
354 | int dim = m->sensor_dim[n];
355 |
356 | // data pointer in line
357 | int p = figsensor.linepnt[lineid];
358 |
359 | // fill in data for this sensor
360 | for( int i=0; i=mjMAXLINEPNT/2 )
364 | break;
365 |
366 | // x
367 | figsensor.linedata[lineid][2*p+4*i] = (float)(adr+i);
368 | figsensor.linedata[lineid][2*p+4*i+2] = (float)(adr+i);
369 |
370 | // y
371 | figsensor.linedata[lineid][2*p+4*i+1] = 0;
372 | figsensor.linedata[lineid][2*p+4*i+3] = (float)(d->sensordata[adr+i]/cutoff);
373 | }
374 |
375 | // update linepnt
376 | figsensor.linepnt[lineid] = mjMIN(mjMAXLINEPNT-1,
377 | figsensor.linepnt[lineid]+2*dim);
378 | }
379 | }
380 |
381 |
382 |
383 | // show sensor figure
384 | void sensorshow(mjrRect rect)
385 | {
386 | // render figure on the right
387 | mjrRect viewport = {rect.width - rect.width/4, rect.bottom, rect.width/4, rect.height/3};
388 | mjr_figure(viewport, &figsensor, &con);
389 | }
390 |
391 |
392 |
393 | //-------------------------------- utility functions ------------------------------------
394 |
395 | // center and scale view
396 | void autoscale(GLFWwindow* window)
397 | {
398 | // autoscale
399 | cam.lookat[0] = m->stat.center[0];
400 | cam.lookat[1] = m->stat.center[1];
401 | cam.lookat[2] = m->stat.center[2];
402 | cam.distance = 1.5 * m->stat.extent;
403 |
404 | // set to free camera
405 | cam.type = mjCAMERA_FREE;
406 | }
407 |
408 |
409 | // load mjb or xml model
410 | void loadmodel(GLFWwindow* window, const char* filename)
411 | {
412 | // make sure filename is given
413 | if( !filename )
414 | return;
415 |
416 | // load and compile
417 | char error[1000] = "could not load binary model";
418 | mjModel* mnew = 0;
419 | if( strlen(filename)>4 && !strcmp(filename+strlen(filename)-4, ".mjb") )
420 | mnew = mj_loadModel(filename, 0);
421 | else
422 | mnew = mj_loadXML(filename, 0, error, 1000);
423 | if( !mnew )
424 | {
425 | printf("%s\n", error);
426 | return;
427 | }
428 |
429 | // delete old model, assign new
430 | mj_deleteData(d);
431 | mj_deleteModel(m);
432 | m = mnew;
433 | d = mj_makeData(m);
434 | mj_forward(m, d);
435 |
436 | // save filename for reload
437 | strcpy(lastfile, filename);
438 |
439 | // re-create custom context
440 | mjr_makeContext(m, &con, fontscale);
441 |
442 | // clear perturbation state and keyreset
443 | pert.active = 0;
444 | pert.select = 0;
445 | keyreset = -1;
446 |
447 | // center and scale view, update scene
448 | autoscale(window);
449 | mjv_updateScene(m, d, &vopt, &pert, &cam, mjCAT_ALL, &scn);
450 |
451 | // set window title to mode name
452 | //if( window && m->names )
453 | //glfwSetWindowTitle(window, m->names);
454 | }
455 |
456 |
457 | // timer in milliseconds
458 | mjtNum timer(void)
459 | {
460 | // save start time
461 | static double starttm = 0;
462 | if( starttm==0 )
463 | starttm = glfwGetTime();
464 |
465 | // return time since start
466 | return (mjtNum)(1000 * (glfwGetTime() - starttm));
467 | }
468 |
469 |
470 | // clear all times
471 | void cleartimers(mjData* d)
472 | {
473 | for( int i=0; itimer[i].duration = 0;
476 | d->timer[i].number = 0;
477 | }
478 | }
479 |
480 | void pos_to_yz(double p1, double p2, double* y, double* z)
481 | {
482 | *y = 0.425 * cos(p1) + 0.39225 * cos(p1 + p2);
483 | *z = -0.425 * sin(p1) - 0.39225 * sin(p1 + p2);
484 | }
485 | void yz_to_pos(double y, double z, double* p1, double* p2)
486 | {
487 | double r = sqrt(y*y + z*z), r2, angle_1, angle_2, angle_3;
488 | if (r < 0.001) r = 0.001;
489 | if (r > 0.817)
490 | {
491 | y = y / r * 0.817;
492 | z = z / r * 0.817;
493 | r = 0.817;
494 | }
495 | if (r < 0.033)
496 | {
497 | y = y / r * 0.033;
498 | z = z / r * 0.033;
499 | r = 0.033;
500 | }
501 | r2 = r*r;
502 | if (y < 0.0001 && y > -0.0001)
503 | angle_1 = 1.570796327;
504 | else
505 | angle_1 = atan(z / y);
506 |
507 | angle_2 = acos((r2 + 0.0267649375) / 0.85 / r);
508 | angle_3 = acos((0.3344850625 - r2) / 0.3334125);
509 |
510 | *p1 = -angle_1 - angle_2;
511 | *p2 = 3.1415926535 - angle_3;
512 | }
513 |
514 | void proceed()
515 | {
516 | double p1, p2, y, z;
517 | p1 = d->qpos[1];
518 | p2 = d->qpos[2];
519 | pos_to_yz(p1, p2, &y, &z);
520 | y = y + leftRight * 0.01;
521 | z = z + upDown * 0.01;
522 | yz_to_pos(y, z, &p1, &p2);
523 | d->qpos[1] = p1;
524 | d->qpos[2] = p2;
525 | d->qpos[3] = 1.570796327 - p1 - p2;
526 | mj_forward(m, d);
527 | }
528 |
529 | //--------------------------------- GLFW callbacks --------------------------------------
530 |
531 | // keyboard
532 | void keyboard(GLFWwindow* window, int key, int scancode, int act, int mods)
533 | {
534 | int n;
535 |
536 | // require model
537 | if( !m )
538 | return;
539 |
540 | // do not act on release
541 | if( act==GLFW_RELEASE )
542 | {
543 | for (int i = 6; i < 12; ++i)
544 | d->ctrl[i] = 0;
545 | return;
546 | }
547 |
548 | switch( key )
549 | {
550 | case GLFW_KEY_F1: // help
551 | showhelp++;
552 | if( showhelp>2 )
553 | showhelp = 0;
554 | break;
555 |
556 | case GLFW_KEY_F2: // option
557 | showoption = !showoption;
558 | break;
559 |
560 | case GLFW_KEY_F3: // info
561 | showinfo = !showinfo;
562 | break;
563 |
564 | case GLFW_KEY_F4: // depth
565 | showdepth = !showdepth;
566 | break;
567 |
568 | case GLFW_KEY_F5: // toggle full screen
569 | showfullscreen = !showfullscreen;
570 | if( showfullscreen )
571 | glfwMaximizeWindow(window);
572 | else
573 | glfwRestoreWindow(window);
574 | break;
575 |
576 | case GLFW_KEY_F6: // stereo
577 | scn.stereo = (scn.stereo==mjSTEREO_NONE ? mjSTEREO_QUADBUFFERED : mjSTEREO_NONE);
578 | break;
579 |
580 | case GLFW_KEY_F7: // sensor figure
581 | showsensor = !showsensor;
582 | break;
583 |
584 | case GLFW_KEY_F8: // profiler
585 | showprofiler = !showprofiler;
586 | break;
587 |
588 | case GLFW_KEY_ENTER: // slow motion
589 | slowmotion = !slowmotion;
590 | break;
591 |
592 | case GLFW_KEY_SPACE: // pause
593 | paused = !paused;
594 | break;
595 |
596 | case GLFW_KEY_PAGE_UP: // previous keyreset
597 | case GLFW_KEY_PAGE_DOWN: // next keyreset
598 | if( key==GLFW_KEY_PAGE_UP )
599 | keyreset = mjMAX(-1, keyreset-1);
600 | else
601 | keyreset = mjMIN(m->nkey-1, keyreset+1);
602 |
603 | // continue with reset
604 |
605 | case GLFW_KEY_BACKSPACE: // reset
606 | mj_resetData(m, d);
607 | if( keyreset>=0 && keyresetnkey )
608 | {
609 | d->time = m->key_time[keyreset];
610 | mju_copy(d->qpos, m->key_qpos+keyreset*m->nq, m->nq);
611 | mju_copy(d->qvel, m->key_qvel+keyreset*m->nv, m->nv);
612 | mju_copy(d->act, m->key_act+keyreset*m->na, m->na);
613 | }
614 | init();
615 | mj_forward(m, d);
616 | profilerupdate();
617 | sensorupdate();
618 | break;
619 |
620 | case GLFW_KEY_RIGHT: // step forward
621 | if( paused )
622 | {
623 | mj_step(m, d);
624 | profilerupdate();
625 | sensorupdate();
626 | }
627 | break;
628 |
629 | case GLFW_KEY_LEFT: // step back
630 | if( paused )
631 | {
632 | m->opt.timestep = -m->opt.timestep;
633 | cleartimers(d);
634 | mj_step(m, d);
635 | m->opt.timestep = -m->opt.timestep;
636 | profilerupdate();
637 | sensorupdate();
638 | }
639 | break;
640 |
641 | case GLFW_KEY_DOWN: // step forward 100
642 | if( paused )
643 | {
644 | cleartimers(d);
645 | for( n=0; n<100; n++ )
646 | mj_step(m,d);
647 | profilerupdate();
648 | sensorupdate();
649 | }
650 | break;
651 |
652 | case GLFW_KEY_UP: // step back 100
653 | if( paused )
654 | {
655 | m->opt.timestep = -m->opt.timestep;
656 | cleartimers(d);
657 | for( n=0; n<100; n++ )
658 | mj_step(m,d);
659 | m->opt.timestep = -m->opt.timestep;
660 | profilerupdate();
661 | sensorupdate();
662 | }
663 | break;
664 |
665 | case GLFW_KEY_ESCAPE: // free camera
666 | cam.type = mjCAMERA_FREE;
667 | break;
668 |
669 | case '=': // bigger font
670 | if( fontscale<200 )
671 | {
672 | fontscale += 50;
673 | mjr_makeContext(m, &con, fontscale);
674 | }
675 | break;
676 |
677 | case '-': // smaller font
678 | if( fontscale>100 )
679 | {
680 | fontscale -= 50;
681 | mjr_makeContext(m, &con, fontscale);
682 | }
683 | break;
684 |
685 | case '[': // previous fixed camera or free
686 | if( m->ncam && cam.type==mjCAMERA_FIXED )
687 | {
688 | if( cam.fixedcamid>0 )
689 | cam.fixedcamid--;
690 | else
691 | cam.type = mjCAMERA_FREE;
692 | }
693 | break;
694 |
695 | case ']': // next fixed camera
696 | if( m->ncam )
697 | {
698 | if( cam.type!=mjCAMERA_FIXED )
699 | {
700 | cam.type = mjCAMERA_FIXED;
701 | cam.fixedcamid = 0;
702 | }
703 | else if( cam.fixedcamidncam-1 )
704 | cam.fixedcamid++;
705 | }
706 | break;
707 |
708 | case ';': // cycle over frame rendering modes
709 | vopt.frame = mjMAX(0, vopt.frame-1);
710 | break;
711 |
712 | case '\'': // cycle over frame rendering modes
713 | vopt.frame = mjMIN(mjNFRAME-1, vopt.frame+1);
714 | break;
715 |
716 | case '.': // cycle over label rendering modes
717 | vopt.label = mjMAX(0, vopt.label-1);
718 | break;
719 |
720 | case '/': // cycle over label rendering modes
721 | vopt.label = mjMIN(mjNLABEL-1, vopt.label+1);
722 | break;
723 |
724 | case GLFW_KEY_Z:
725 | d->ctrl[4] = 1;
726 | d->ctrl[5] = 1;
727 | mj_forward(m, d);
728 | break;
729 |
730 | case GLFW_KEY_A:
731 | d->ctrl[4] = 0;
732 | d->ctrl[5] = 0;
733 | for (int i = 6; i < 14; ++i)
734 | d->qpos[i] = 0;
735 | mj_forward(m, d);
736 | break;
737 |
738 | case GLFW_KEY_X:
739 | d->ctrl[6] = 1;
740 | mj_forward(m, d);
741 | break;
742 |
743 | case GLFW_KEY_S:
744 | d->ctrl[6] = -1;
745 | mj_forward(m, d);
746 | break;
747 |
748 | case GLFW_KEY_C:
749 | d->ctrl[7] = 1;
750 | mj_forward(m, d);
751 | break;
752 |
753 | case GLFW_KEY_D:
754 | d->ctrl[7] = -1;
755 | mj_forward(m, d);
756 | break;
757 |
758 | case GLFW_KEY_V:
759 | d->ctrl[8] = 1;
760 | mj_forward(m, d);
761 | break;
762 |
763 | case GLFW_KEY_F:
764 | d->ctrl[8] = -1;
765 | mj_forward(m, d);
766 | break;
767 |
768 | case GLFW_KEY_B:
769 | d->ctrl[9] = 1;
770 | mj_forward(m, d);
771 | break;
772 |
773 | case GLFW_KEY_G:
774 | d->ctrl[9] = -1;
775 | mj_forward(m, d);
776 | break;
777 |
778 | case GLFW_KEY_N:
779 | d->ctrl[10] = 1;
780 | mj_forward(m, d);
781 | break;
782 |
783 | case GLFW_KEY_H:
784 | d->ctrl[10] = -1;
785 | mj_forward(m, d);
786 | break;
787 |
788 | case GLFW_KEY_M:
789 | d->ctrl[11] = 1;
790 | mj_forward(m, d);
791 | break;
792 |
793 | case GLFW_KEY_J:
794 | d->ctrl[11] = -1;
795 | mj_forward(m, d);
796 | break;
797 |
798 | case GLFW_KEY_K:
799 | d->qpos[0] = 0;
800 | d->qpos[1] = -1.11905318;
801 | d->qpos[2] = 1.72568888;
802 | d->qpos[3] = 0.95673764;
803 | d->qpos[4] = 1.570796327;
804 | d->qpos[5] = 0;
805 | d->qpos[6] = -0.02550968;
806 | d->qpos[7] = -0.02550968;
807 | mj_forward(m, d);
808 | break;
809 |
810 | case GLFW_KEY_Y:
811 | d->qpos[0] = 3.89 / 180 * 3.1415926;
812 | d->qpos[1] = -58.54 / 180 * 3.1415926;
813 | d->qpos[2] = 80.70 / 180 * 3.1415926;
814 | d->qpos[3] = -134.68 / 180 * 3.1415926;
815 | d->qpos[4] = -99.40 / 180 * 3.1415926;
816 | d->qpos[5] = 15.30 / 180 * 3.1415926;
817 | mj_forward(m, d);
818 | break;
819 |
820 | case GLFW_KEY_U:
821 | d->qpos[0] = -4.79 / 180 * 3.1415926;
822 | d->qpos[1] = -36.74 / 180 * 3.1415926;
823 | d->qpos[2] = 61.03 / 180 * 3.1415926;
824 | d->qpos[3] = -175.48 / 180 * 3.1415926;
825 | d->qpos[4] = -98.52 / 180 * 3.1415926;
826 | d->qpos[5] = 20.34 / 180 * 3.1415926;
827 | mj_forward(m, d);
828 | break;
829 |
830 | case GLFW_KEY_I:
831 | d->qpos[0] = 4.14 / 180 * 3.1415926;
832 | d->qpos[1] = -64.22 / 180 * 3.1415926;
833 | d->qpos[2] = 105.83 / 180 * 3.1415926;
834 | d->qpos[3] = -140.31 / 180 * 3.1415926;
835 | d->qpos[4] = -38.61 / 180 * 3.1415926;
836 | d->qpos[5] = 20.32 / 180 * 3.1415926;
837 | mj_forward(m, d);
838 | break;
839 |
840 | case GLFW_KEY_O:
841 | d->qpos[0] = 22.90 / 180 * 3.1415926;
842 | d->qpos[1] = -37.06 / 180 * 3.1415926;
843 | d->qpos[2] = 73.24 / 180 * 3.1415926;
844 | d->qpos[3] = -192.74 / 180 * 3.1415926;
845 | d->qpos[4] = -160.40 / 180 * 3.1415926;
846 | d->qpos[5] = -13.33 / 180 * 3.1415926;
847 | mj_forward(m, d);
848 | break;
849 |
850 | case GLFW_KEY_P:
851 | d->qpos[0] = -5.71 / 180 * 3.1415926;
852 | d->qpos[1] = -77.41 / 180 * 3.1415926;
853 | d->qpos[2] = 144.98 / 180 * 3.1415926;
854 | d->qpos[3] = -197.17 / 180 * 3.1415926;
855 | d->qpos[4] = -50.28 / 180 * 3.1415926;
856 | d->qpos[5] = -13.33 / 180 * 3.1415926;
857 | mj_forward(m, d);
858 | break;
859 |
860 | case GLFW_KEY_L:
861 | printf("Sensor 0: %0.4f\n", d->sensordata[0]);
862 | printf("Sensor 1: %0.4f\n", d->sensordata[1]);
863 | printf("Sensor 2: %0.4f\n", d->sensordata[2]);
864 | printf("Sensor 3: %0.4f\n", d->sensordata[3]);
865 | break;
866 |
867 | case GLFW_KEY_TAB:
868 | printf("qpos = [ %0.8f,\n", d->qpos[0]);
869 | printf(" %0.8f,\n", d->qpos[1]);
870 | printf(" %0.8f,\n", d->qpos[2]);
871 | printf(" %0.8f,\n", d->qpos[3]);
872 | printf(" %0.8f,\n", d->qpos[4]);
873 | printf(" %0.8f,\n", d->qpos[5]);
874 | printf(" %0.8f,\n", d->qpos[6]);
875 | printf(" %0.8f,\n", d->qpos[7]);
876 | printf(" %0.8f,\n", d->qpos[8]);
877 | printf(" %0.8f,\n", d->qpos[9]);
878 | printf(" %0.8f,\n", d->qpos[10]);
879 | printf(" %0.8f,\n", d->qpos[11]);
880 | printf(" %0.8f,\n", d->qpos[12]);
881 | printf(" %0.8f,\n", d->qpos[13]);
882 | break;
883 |
884 | default: // toggle flag
885 | // control keys
886 | printf("default");
887 | if( mods & GLFW_MOD_CONTROL )
888 | {
889 | if( key==GLFW_KEY_A )
890 | autoscale(window);
891 | else if( key==GLFW_KEY_L && lastfile[0] )
892 | loadmodel(window, lastfile);
893 |
894 | break;
895 | }
896 |
897 | // toggle visualization flag
898 | for( int i=0; i0 )
955 | {
956 | // right: translate; left: rotate
957 | if( button_right )
958 | newperturb = mjPERT_TRANSLATE;
959 | else if( button_left )
960 | newperturb = mjPERT_ROTATE;
961 |
962 | // perturbation onset: reset reference
963 | if( newperturb && !pert.active )
964 | mjv_initPerturb(m, d, &scn, &pert);
965 | }
966 | pert.active = newperturb;
967 |
968 | // detect double-click (250 msec)
969 | if( act==GLFW_PRESS && glfwGetTime()-lastclicktm<0.25 && button==lastbutton )
970 | {
971 | // determine selection mode
972 | int selmode;
973 | if( button==GLFW_MOUSE_BUTTON_LEFT )
974 | selmode = 1;
975 | else if( mods & GLFW_MOD_CONTROL )
976 | selmode = 3;
977 | else
978 | selmode = 2;
979 |
980 | // get current window size
981 | int width, height;
982 | glfwGetWindowSize(window, &width, &height);
983 |
984 | // find geom and 3D click point, get corresponding body
985 | mjtNum selpnt[3];
986 | int selgeom = mjv_select(m, d, &vopt,
987 | (mjtNum)width/(mjtNum)height,
988 | (mjtNum)lastx/(mjtNum)width,
989 | (mjtNum)(height-lasty)/(mjtNum)height,
990 | &scn, selpnt);
991 | int selbody = (selgeom>=0 ? m->geom_bodyid[selgeom] : 0);
992 |
993 | // set lookat point, start tracking is requested
994 | if( selmode==2 || selmode==3 )
995 | {
996 | // copy selpnt if geom clicked
997 | if( selgeom>=0 )
998 | mju_copy3(cam.lookat, selpnt);
999 |
1000 | // switch to tracking camera
1001 | if( selmode==3 && selbody )
1002 | {
1003 | cam.type = mjCAMERA_TRACKING;
1004 | cam.trackbodyid = selbody;
1005 | cam.fixedcamid = -1;
1006 | }
1007 | }
1008 |
1009 | // set body selection
1010 | else
1011 | {
1012 | if( selbody )
1013 | {
1014 | // record selection
1015 | pert.select = selbody;
1016 |
1017 | // compute localpos
1018 | mjtNum tmp[3];
1019 | mju_sub3(tmp, selpnt, d->xpos+3*pert.select);
1020 | mju_mulMatTVec(pert.localpos, d->xmat+9*pert.select, tmp, 3, 3);
1021 | }
1022 | else
1023 | pert.select = 0;
1024 | }
1025 |
1026 | // stop perturbation on select
1027 | pert.active = 0;
1028 | }
1029 |
1030 | // save info
1031 | if( act==GLFW_PRESS )
1032 | {
1033 | lastbutton = button;
1034 | lastclicktm = glfwGetTime();
1035 | }
1036 | }
1037 |
1038 |
1039 | // mouse move
1040 | void mouse_move(GLFWwindow* window, double xpos, double ypos)
1041 | {
1042 | // no buttons down: nothing to do
1043 | if( !button_left && !button_middle && !button_right )
1044 | return;
1045 |
1046 | // compute mouse displacement, save
1047 | double dx = xpos - lastx;
1048 | double dy = ypos - lasty;
1049 | lastx = xpos;
1050 | lasty = ypos;
1051 |
1052 | // require model
1053 | if( !m )
1054 | return;
1055 |
1056 | // get current window size
1057 | int width, height;
1058 | glfwGetWindowSize(window, &width, &height);
1059 |
1060 | // get shift key state
1061 | bool mod_shift = (glfwGetKey(window, GLFW_KEY_LEFT_SHIFT)==GLFW_PRESS ||
1062 | glfwGetKey(window, GLFW_KEY_RIGHT_SHIFT)==GLFW_PRESS);
1063 |
1064 | // determine action based on mouse button
1065 | mjtMouse action;
1066 | if( button_right )
1067 | action = mod_shift ? mjMOUSE_MOVE_H : mjMOUSE_MOVE_V;
1068 | else if( button_left )
1069 | action = mod_shift ? mjMOUSE_ROTATE_H : mjMOUSE_ROTATE_V;
1070 | else
1071 | action = mjMOUSE_ZOOM;
1072 |
1073 | // move perturb or camera
1074 | if( pert.active )
1075 | mjv_movePerturb(m, d, action, dx/height, dy/height, &scn, &pert);
1076 | else
1077 | mjv_moveCamera(m, action, dx/height, dy/height, &scn, &cam);
1078 | }
1079 |
1080 |
1081 | // scroll
1082 | void scroll(GLFWwindow* window, double xoffset, double yoffset)
1083 | {
1084 | // require model
1085 | if( !m )
1086 | return;
1087 |
1088 | // scroll: emulate vertical mouse motion = 5% of window height
1089 | mjv_moveCamera(m, mjMOUSE_ZOOM, 0, -0.05*yoffset, &scn, &cam);
1090 | }
1091 |
1092 |
1093 | // drop
1094 | void drop(GLFWwindow* window, int count, const char** paths)
1095 | {
1096 | // make sure list is non-empty
1097 | return;
1098 | }
1099 |
1100 |
1101 | //-------------------------------- simulation and rendering -----------------------------
1102 |
1103 | // make option string
1104 | void makeoptionstring(const char* name, char key, char* buf)
1105 | {
1106 | int i=0, cnt=0;
1107 |
1108 | // copy non-& characters
1109 | while( name[i] && i<50 )
1110 | {
1111 | if( name[i]!='&' )
1112 | buf[cnt++] = name[i];
1113 |
1114 | i++;
1115 | }
1116 |
1117 | // finish
1118 | buf[cnt] = ' ';
1119 | buf[cnt+1] = '(';
1120 | buf[cnt+2] = key;
1121 | buf[cnt+3] = ')';
1122 | buf[cnt+4] = 0;
1123 | }
1124 |
1125 |
1126 | // advance simulation
1127 | void simulation(void)
1128 | {
1129 | // no model
1130 | if( !m )
1131 | return;
1132 |
1133 | // clear timers
1134 | cleartimers(d);
1135 |
1136 | // paused
1137 | if( paused )
1138 | {
1139 | // apply pose perturbations, run mj_forward
1140 | if( pert.active )
1141 | {
1142 | mjv_applyPerturbPose(m, d, &pert, 1); // move mocap and dynamic bodies
1143 | mj_forward(m, d);
1144 | }
1145 | }
1146 |
1147 | // running
1148 | else
1149 | {
1150 | // slow motion factor: 10x
1151 | mjtNum factor = (slowmotion ? 10 : 1);
1152 |
1153 | // advance effective simulation time by 1/refreshrate
1154 | mjtNum startsimtm = d->time;
1155 | while( (d->time-startsimtm)*factor<1.0/refreshrate )
1156 | {
1157 | // clear old perturbations, apply new
1158 | mju_zero(d->xfrc_applied, 6*m->nbody);
1159 | if( pert.select>0 )
1160 | {
1161 | mjv_applyPerturbPose(m, d, &pert, 0); // move mocap bodies only
1162 | mjv_applyPerturbForce(m, d, &pert);
1163 | }
1164 |
1165 | // run mj_step and count
1166 | mj_step(m, d);
1167 |
1168 | // break on reset
1169 | if( d->timesolver_iter )
1227 | {
1228 | int ind = mjMIN(d->solver_iter-1,mjNSOLVER-1);
1229 | solerr = mju_min(d->solver[ind].improvement, d->solver[ind].gradient);
1230 | if( solerr==0 )
1231 | solerr = mju_max(d->solver[ind].improvement, d->solver[ind].gradient);
1232 | }
1233 | solerr = mju_log10(mju_max(mjMINVAL, solerr));
1234 |
1235 | // status
1236 | sprintf(status, "%-20.1f\n%d (%d con)\n%.3f\n%.0f\n%.2f\n%.1f (%d it)\n%.1f %.1f\n%s\n%s\n%s\n%s",
1237 | d->time,
1238 | d->nefc,
1239 | d->ncon,
1240 | d->timer[mjTIMER_STEP].duration / mjMAX(1, d->timer[mjTIMER_STEP].number),
1241 | 1.0/(glfwGetTime()-lastrendertm),
1242 | d->energy[0]+d->energy[1],
1243 | solerr,
1244 | d->solver_iter,
1245 | mju_log10(mju_max(mjMINVAL,d->solver_fwdinv[0])),
1246 | mju_log10(mju_max(mjMINVAL,d->solver_fwdinv[1])),
1247 | camstr,
1248 | mjFRAMESTRING[vopt.frame],
1249 | mjLABELSTRING[vopt.label],
1250 | keyresetstr
1251 | );
1252 | }
1253 |
1254 | // FPS timing satistics
1255 | lastrendertm = glfwGetTime();
1256 |
1257 | // update scene
1258 | mjv_updateScene(m, d, &vopt, &pert, &cam, mjCAT_ALL, &scn);
1259 |
1260 | // render
1261 | mjr_render(rect, &scn, &con);
1262 |
1263 | // show depth map
1264 | if( showdepth )
1265 | {
1266 | // get the depth buffer
1267 | mjr_readPixels(NULL, depth_buffer, rect, &con);
1268 |
1269 | // convert to RGB, subsample by 4
1270 | for( int r=0; rctrl[0] = 1;
1374 | d->ctrl[1] = 1;
1375 | d->ctrl[2] = 1;
1376 | d->ctrl[3] = 1;
1377 |
1378 | d->qpos[0] = 0;
1379 | d->qpos[1] = -1.11905318;
1380 | d->qpos[2] = 1.72568888;
1381 | d->qpos[3] = 0.95673764;
1382 | d->qpos[4] = 1.570796327;
1383 | d->qpos[5] = 0;
1384 |
1385 | d->qpos[14] = -0.62;
1386 | d->qpos[15] = 0.32;
1387 | d->qpos[17] = cos(0.16 * pi);
1388 | d->qpos[20] = sin(0.16 * pi);
1389 |
1390 | d->qpos[21] = -0.72;
1391 | d->qpos[22] = 0.38;
1392 | d->qpos[24] = cos(0.18 * pi);
1393 | d->qpos[27] = sin(0.18 * pi);
1394 |
1395 | d->qpos[28] = -0.835;
1396 | d->qpos[29] = 0.425;
1397 | d->qpos[31] = cos(0.195 * pi);
1398 | d->qpos[34] = sin(0.195 * pi);
1399 |
1400 | d->qpos[35] = -0.935;
1401 | d->qpos[36] = 0.46;
1402 | d->qpos[38] = cos(0.23 * pi);
1403 | d->qpos[41] = sin(0.23 * pi);
1404 | }
1405 |
1406 |
1407 | //-------------------------------- main function ----------------------------------------
1408 |
1409 | int main(int argc, const char** argv)
1410 | {
1411 | // print version, check compatibility
1412 | printf("MuJoCo Pro library version %.2lf\n", 0.01*mj_version());
1413 | if( mjVERSION_HEADER!=mj_version() )
1414 | mju_error("Headers and library have different versions");
1415 |
1416 | // activate MuJoCo license
1417 | mj_activate("mjkey.txt");
1418 |
1419 | // init GLFW
1420 | if (!glfwInit())
1421 | return 1;
1422 |
1423 | // get refreshrate
1424 | refreshrate = glfwGetVideoMode(glfwGetPrimaryMonitor())->refreshRate;
1425 |
1426 | // multisampling
1427 | glfwWindowHint(GLFW_SAMPLES, 4);
1428 |
1429 | // create widdow
1430 | GLFWwindow* window = glfwCreateWindow(1200, 900, "UR-5 Simulation", NULL, NULL);
1431 | if( !window )
1432 | {
1433 | glfwTerminate();
1434 | return 1;
1435 | }
1436 |
1437 | // make context current, disable v-sync
1438 | glfwMakeContextCurrent(window);
1439 | glfwSwapInterval(1);
1440 |
1441 | // save window-to-framebuffer pixel scaling (needed for OSX scaling)
1442 | int width, width1, height;
1443 | glfwGetWindowSize(window, &width, &height);
1444 | glfwGetFramebufferSize(window, &width1, &height);
1445 | window2buffer = (double)width1 / (double)width;
1446 |
1447 | // init MuJoCo rendering, get OpenGL info
1448 | mjv_makeScene(&scn, 1000);
1449 | mjv_defaultCamera(&cam);
1450 | mjv_defaultOption(&vopt);
1451 | mjr_defaultContext(&con);
1452 | mjr_makeContext(m, &con, fontscale);
1453 | profilerinit();
1454 | sensorinit();
1455 |
1456 | // set GLFW callbacks
1457 | glfwSetKeyCallback(window, keyboard);
1458 | glfwSetCursorPosCallback(window, mouse_move);
1459 | glfwSetMouseButtonCallback(window, mouse_button);
1460 | glfwSetScrollCallback(window, scroll);
1461 | glfwSetDropCallback(window, drop);
1462 | glfwSetWindowRefreshCallback(window, render);
1463 |
1464 | // set MuJoCo time callback for profiling
1465 | mjcb_time = timer;
1466 |
1467 | // load model if filename given as argument
1468 | loadmodel(window, "lab_env.xml");
1469 | init();
1470 |
1471 | // main loop
1472 | while( !glfwWindowShouldClose(window) )
1473 | {
1474 | // simulate and render
1475 | render(window);
1476 |
1477 | // handle events (this calls all callbacks)
1478 | glfwPollEvents();
1479 | }
1480 |
1481 | // delete everything we allocated
1482 | mj_deleteData(d);
1483 | mj_deleteModel(m);
1484 | mjr_freeContext(&con);
1485 | mjv_freeScene(&scn);
1486 |
1487 | // terminate
1488 | glfwTerminate();
1489 | mj_deactivate();
1490 | return 0;
1491 | }
1492 |
--------------------------------------------------------------------------------
/xmls/lab_env.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
214 |
215 |
216 |
217 |
223 |
224 |
225 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
--------------------------------------------------------------------------------
/xmls/light_env.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
--------------------------------------------------------------------------------
/xmls/meshes/base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/base.stl
--------------------------------------------------------------------------------
/xmls/meshes/forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/forearm.stl
--------------------------------------------------------------------------------
/xmls/meshes/kinova_robotiq_coupler.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/kinova_robotiq_coupler.stl
--------------------------------------------------------------------------------
/xmls/meshes/robotiq_85_base_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/robotiq_85_base_link.stl
--------------------------------------------------------------------------------
/xmls/meshes/robotiq_85_finger_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/robotiq_85_finger_link.stl
--------------------------------------------------------------------------------
/xmls/meshes/robotiq_85_finger_tip_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/robotiq_85_finger_tip_link.stl
--------------------------------------------------------------------------------
/xmls/meshes/robotiq_85_inner_knuckle_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/robotiq_85_inner_knuckle_link.stl
--------------------------------------------------------------------------------
/xmls/meshes/robotiq_85_knuckle_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/robotiq_85_knuckle_link.stl
--------------------------------------------------------------------------------
/xmls/meshes/shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/shoulder.stl
--------------------------------------------------------------------------------
/xmls/meshes/upperarm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/upperarm.stl
--------------------------------------------------------------------------------
/xmls/meshes/wrist1.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/wrist1.stl
--------------------------------------------------------------------------------
/xmls/meshes/wrist2.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/wrist2.stl
--------------------------------------------------------------------------------
/xmls/meshes/wrist3.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cxy1997/Robotiq-UR5/6c6d53f6a7d525f83e103ce594f166520d9390ee/xmls/meshes/wrist3.stl
--------------------------------------------------------------------------------