├── .gitignore
├── DARTset.py
├── DARTset_utils.py
├── README.md
├── assets
├── hand_mesh.obj
└── teaser.png
└── postprocess
├── LICENSE
├── README.md
├── convert.py
├── data
├── hand_01.mtl
├── hand_01.obj
├── hand_02.mtl
├── hand_02.obj
├── hand_texture.png
└── test_hand.obj
├── extra_data
├── background.png
├── hand.obj
├── hand_mesh
│ ├── hand.mtl
│ ├── hand.obj
│ ├── hand_texture.png
│ └── hand_texture2.png
├── hand_wrist
│ ├── hand_01.mtl
│ ├── hand_01.obj
│ ├── hand_02.mtl
│ ├── hand_02.obj
│ └── hand_texture.png
├── mean_mano_params.pkl
├── park.png
└── test.obj
├── reprojection.py
├── retargeting
├── basis.py
└── mesh_utils.py
├── test
└── from_unity.obj
├── tmp.jpg
└── utils
├── ico_sphere.py
├── libs.py
├── losses
├── geodesic.py
├── loss.py
├── percep_loss.py
└── ssim.py
├── mano_wrist.py
└── utils.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # misc
132 | data/DARTset
133 | assets/mano_v1_2
134 | .idea
--------------------------------------------------------------------------------
/DARTset.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pickle
3 |
4 | import cv2
5 | import imageio
6 | import numpy as np
7 | import torch
8 | from manotorch.manolayer import ManoLayer
9 | from pytorch3d.io import load_obj
10 |
11 | from DARTset_utils import (aa_to_rotmat, fit_ortho_param, ortho_project,
12 | plot_hand, rotmat_to_aa)
13 |
14 | RAW_IMAGE_SIZE = 512
15 | BG_IMAGE_SIZE = 384
16 | DATA_ROOT = "./data"
17 |
18 |
19 | class DARTset():
20 |
21 | def __init__(self, data_split="train", use_full_wrist=True, load_wo_background=False):
22 |
23 | self.name = "DARTset"
24 | self.data_split = data_split
25 | self.root = os.path.join(DATA_ROOT, self.name, self.data_split)
26 | self.load_wo_background = load_wo_background
27 | self.raw_img_size = RAW_IMAGE_SIZE
28 | self.img_size = RAW_IMAGE_SIZE if load_wo_background else BG_IMAGE_SIZE
29 |
30 | self.use_full_wrist = use_full_wrist
31 |
32 | self.MANO_pose_mean = ManoLayer(joint_rot_mode="axisang",
33 | use_pca=False,
34 | mano_assets_root="assets/mano_v1_2",
35 | center_idx=0,
36 | flat_hand_mean=False).th_hands_mean.numpy().reshape(-1)
37 |
38 | obj_filename = os.path.join('./assets/hand_mesh.obj')
39 | _, faces, _ = load_obj(
40 | obj_filename,
41 | device="cpu",
42 | load_textures=False,
43 | )
44 | self.reorder_idx = [0, 13, 14, 15, 20, 1, 2, 3, 16, 4, 5, 6, 17, 10, 11, 12, 19, 7, 8, 9, 18]
45 | self.hand_faces = faces[0].numpy()
46 |
47 | self.load_dataset()
48 |
49 | def load_dataset(self):
50 |
51 | self.image_paths = []
52 | self.raw_mano_param = []
53 | self.joints_3d = []
54 | self.verts_3d_paths = []
55 | self.joints_2d = []
56 |
57 | image_parts = [
58 | r for r in os.listdir(self.root)
59 | if os.path.isdir(os.path.join(self.root, r)) and "verts" not in r and "wbg" not in r
60 | ]
61 | image_parts = sorted(image_parts)
62 |
63 | for imgs_dir in image_parts:
64 | imgs_path = os.path.join(self.root, imgs_dir)
65 | data_record = pickle.load(open(os.path.join(self.root, f"part_{imgs_dir}.pkl"), "rb"))
66 | for k in range(len(data_record["pose"])):
67 | self.image_paths.append(os.path.join(imgs_path, data_record["img"][k]))
68 | self.raw_mano_param.append(data_record["pose"][k].astype(np.float32))
69 | self.joints_3d.append(data_record["joint3d"][k].astype(np.float32))
70 | self.joints_2d.append(data_record["joint2d"][k].astype(np.float32))
71 | verts_3d_path = os.path.join(imgs_path + "_verts", data_record["img"][k].replace(".png", ".pkl"))
72 | self.verts_3d_paths.append(verts_3d_path)
73 |
74 | self.sample_idxs = list(range(len(self.image_paths)))
75 |
76 | def __len__(self):
77 | return len(self.image_paths)
78 |
79 | def __getitem__(self, idx):
80 | return {
81 | "image": self.get_image(idx),
82 | "joints_3d": self.get_joints_3d(idx),
83 | "joints_2d": self.get_joints_2d(idx),
84 | "joints_uvd": self.get_joints_uvd(idx),
85 | "verts_uvd": self.get_verts_uvd(idx),
86 | "ortho_intr": self.get_ortho_intr(idx),
87 | "sides": self.get_sides(idx),
88 | "mano_pose": self.get_mano_pose(idx),
89 | "image_mask": self.get_image_mask(idx),
90 | }
91 |
92 | def get_joints_3d(self, idx):
93 | joints = self.joints_3d[idx].copy()
94 | # * Transfer from UNITY coordinate system
95 | joints[:, 1:] = -joints[:, 1:]
96 | joints = joints[self.reorder_idx]
97 | joints = joints - joints[9] + np.array(
98 | [0, 0, 0.5]) # * We use ortho projection, so we need to shift the center of the hand to the origin
99 | return joints
100 |
101 | def get_verts_3d(self, idx):
102 | verts = pickle.load(open(self.verts_3d_paths[idx], "rb"))
103 | # * Transfer from UNITY coordinate system
104 | verts[:, 1:] = -verts[:, 1:]
105 | verts = verts + self.get_joints_3d(idx)[5]
106 | if not self.use_full_wrist:
107 | verts = verts[:778]
108 | verts = verts.astype(np.float32)
109 | return verts
110 |
111 | def get_joints_2d(self, idx):
112 | joints_2d = self.joints_2d[idx].copy()[self.reorder_idx]
113 | joints_2d = joints_2d / self.raw_img_size * self.img_size
114 | return joints_2d
115 |
116 | def get_image_path(self, idx):
117 | return self.image_paths[idx]
118 |
119 | def get_ortho_intr(self, idx):
120 | ortho_cam = fit_ortho_param(self.get_joints_3d(idx), self.get_joints_2d(idx))
121 | return ortho_cam
122 |
123 | def get_image(self, idx):
124 | path = self.image_paths[idx]
125 | if self.load_wo_background:
126 | img = np.array(imageio.imread(path, pilmode="RGBA"), dtype=np.uint8)
127 | img = img[:, :, :3]
128 | else:
129 | path = os.path.join(*path.split("/")[:-2], path.split("/")[-2] + "_wbg", path.split("/")[-1])
130 | img = cv2.imread(path)[..., ::-1]
131 |
132 | return img
133 |
134 | def get_image_mask(self, idx):
135 | path = self.image_paths[idx]
136 | image = np.array(imageio.imread(path, pilmode="RGBA"), dtype=np.uint8)
137 | image = cv2.resize(image, dsize=(self.img_size, self.img_size))
138 | return (image[:, :, 3] >= 128).astype(np.float32) * 255.0
139 |
140 | def get_joints_uvd(self, idx):
141 | uv = self.get_joints_2d(idx)
142 | d = self.get_joints_3d(idx)[:, 2:] # (21, 1)
143 | uvd = np.concatenate((uv, d), axis=1)
144 | return uvd
145 |
146 | def get_verts_uvd(self, idx):
147 | v3d = self.get_verts_3d(idx)
148 | ortho_cam = self.get_ortho_intr(idx)
149 | ortho_proj_verts = ortho_project(v3d, ortho_cam)
150 | d = v3d[:, 2:]
151 | uvd = np.concatenate((ortho_proj_verts, d), axis=1)
152 | return uvd
153 |
154 | def get_raw_mano_param(self, idx):
155 | return self.raw_mano_param[idx].copy()
156 |
157 | def get_mano_pose(self, idx):
158 | pose = self.get_raw_mano_param(idx) # [16, 3]
159 |
160 | # * Transfer from UNITY coordinate system
161 | unity2cam = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]]).astype(np.float32)
162 | root = rotmat_to_aa(unity2cam @ aa_to_rotmat(pose[0]))[None]
163 | new_pose = np.concatenate([root.reshape(-1), pose[1:].reshape(-1) + self.MANO_pose_mean], axis=0) # [48]
164 | return new_pose.astype(np.float32)
165 |
166 | def get_mano_shape(self, idx):
167 | return np.zeros((10), dtype=np.float32)
168 |
169 | def get_sides(self, idx):
170 | return "right"
171 |
172 |
173 | if __name__ == "__main__":
174 |
175 | dart_set = DARTset(data_split="test")
176 |
177 | for i in range(len(dart_set)):
178 | output = dart_set[i]
179 |
180 | image = output["image"]
181 | mask = (output["image_mask"]).astype(np.uint8)
182 |
183 | joints_2d = output["joints_2d"]
184 | joints_3d = output["joints_3d"]
185 | joints_uvd = output["joints_uvd"]
186 | verts_uvd = output["verts_uvd"]
187 | ortho_intr = output["ortho_intr"]
188 | mano_pose = output["mano_pose"]
189 |
190 | mano = ManoLayer(joint_rot_mode="axisang",
191 | use_pca=False,
192 | mano_assets_root="assets/mano_v1_2",
193 | center_idx=9,
194 | flat_hand_mean=True)
195 |
196 | mano_joints = mano(torch.tensor(mano_pose).unsqueeze(0)).joints.numpy()[0]
197 | mano_2d = ortho_project(mano_joints, ortho_intr)
198 |
199 | proj_2d = ortho_project(joints_3d, ortho_intr)
200 |
201 | frame_1 = image.copy()
202 | mask = mask[:, :, None]
203 | mask = np.concatenate([mask, mask * 0, mask * 0], axis=2)
204 | frame_2 = cv2.addWeighted(frame_1, 0.5, mask, 0.5, 0)
205 |
206 | all_2d_opt = {"ortho_proj": proj_2d, "gt": joints_2d, "uv": joints_uvd[:, :2], "mano_2d": mano_2d}
207 | plot_hand(frame_1, all_2d_opt["uv"], linewidth=1)
208 | plot_hand(frame_1, all_2d_opt["gt"], linewidth=2)
209 | plot_hand(frame_1, all_2d_opt["ortho_proj"], linewidth=1)
210 | plot_hand(frame_1, all_2d_opt["mano_2d"], linewidth=1)
211 |
212 | img_list = [image, frame_1, frame_2]
213 | comb_image = np.hstack(img_list)
214 |
215 | comb_image = cv2.cvtColor(comb_image, cv2.COLOR_BGR2RGB)
216 | cv2.imshow("comb_image", comb_image)
217 | cv2.waitKey(0)
218 |
--------------------------------------------------------------------------------
/DARTset_utils.py:
--------------------------------------------------------------------------------
1 | from typing import Union
2 |
3 | import cv2
4 | import numpy as np
5 | import torch
6 | from pytorch3d.transforms import (axis_angle_to_matrix, matrix_to_quaternion, quaternion_to_axis_angle)
7 |
8 |
9 | class Compose:
10 |
11 | def __init__(self, transforms: list):
12 | """Composes several transforms together. This transform does not
13 | support torchscript.
14 |
15 | Args:
16 | transforms (list): (list of transform functions)
17 | """
18 | self.transforms = transforms
19 |
20 | def __call__(self, rotation: Union[torch.Tensor, np.ndarray], convention: str = 'xyz', **kwargs):
21 | convention = convention.lower()
22 | if not (set(convention) == set('xyz') and len(convention) == 3):
23 | raise ValueError(f'Invalid convention {convention}.')
24 | if isinstance(rotation, np.ndarray):
25 | data_type = 'numpy'
26 | rotation = torch.FloatTensor(rotation)
27 | elif isinstance(rotation, torch.Tensor):
28 | data_type = 'tensor'
29 | else:
30 | raise TypeError('Type of rotation should be torch.Tensor or numpy.ndarray')
31 | for t in self.transforms:
32 | if 'convention' in t.__code__.co_varnames:
33 | rotation = t(rotation, convention.upper(), **kwargs)
34 | else:
35 | rotation = t(rotation, **kwargs)
36 | if data_type == 'numpy':
37 | rotation = rotation.detach().cpu().numpy()
38 | return rotation
39 |
40 |
41 | def aa_to_rotmat(axis_angle: Union[torch.Tensor, np.ndarray]) -> Union[torch.Tensor, np.ndarray]:
42 | """
43 | Convert axis_angle to rotation matrixs.
44 | Args:
45 | axis_angle (Union[torch.Tensor, numpy.ndarray]): input shape
46 | should be (..., 3). ndim of input is unlimited.
47 |
48 | Returns:
49 | Union[torch.Tensor, numpy.ndarray]: shape would be (..., 3, 3).
50 | """
51 | if axis_angle.shape[-1] != 3:
52 | raise ValueError(f'Invalid input axis angles shape f{axis_angle.shape}.')
53 | t = Compose([axis_angle_to_matrix])
54 | return t(axis_angle)
55 |
56 |
57 | def rotmat_to_aa(matrix: Union[torch.Tensor, np.ndarray]) -> Union[torch.Tensor, np.ndarray]:
58 | """Convert rotation matrixs to axis angles.
59 |
60 | Args:
61 | matrix (Union[torch.Tensor, numpy.ndarray]): input shape
62 | should be (..., 3, 3). ndim of input is unlimited.
63 | convention (str, optional): Convention string of three letters
64 | from {“x”, “y”, and “z”}. Defaults to 'xyz'.
65 |
66 | Returns:
67 | Union[torch.Tensor, numpy.ndarray]: shape would be (..., 3).
68 | """
69 | if matrix.shape[-1] != 3 or matrix.shape[-2] != 3:
70 | raise ValueError(f'Invalid rotation matrix shape f{matrix.shape}.')
71 | t = Compose([matrix_to_quaternion, quaternion_to_axis_angle])
72 | return t(matrix)
73 |
74 |
75 | def fit_ortho_param(joints3d: np.ndarray, joints2d: np.ndarray) -> np.ndarray:
76 | joints3d_xy = joints3d[:, :2] # (21, 2)
77 | joints3d_xy = joints3d_xy.reshape(-1)[:, np.newaxis]
78 | joints2d = joints2d.reshape(-1)[:, np.newaxis]
79 | pad2 = np.array(range(joints2d.shape[0]))
80 | pad2 = (pad2 % 2)[:, np.newaxis]
81 | pad1 = 1 - pad2
82 | jM = np.concatenate([joints3d_xy, pad1, pad2], axis=1) # (42, 3)
83 | jMT = jM.transpose() # (3, 42)
84 | jMTjM = np.matmul(jMT, jM)
85 | jMTb = np.matmul(jMT, joints2d)
86 | ortho_param = np.matmul(np.linalg.inv(jMTjM), jMTb)
87 | ortho_param = ortho_param.reshape(-1)
88 | return ortho_param # [f, tx, ty]
89 |
90 |
91 | def ortho_project(points3d, ortho_cam):
92 | x, y = points3d[:, 0], points3d[:, 1]
93 | u = ortho_cam[0] * x + ortho_cam[1]
94 | v = ortho_cam[0] * y + ortho_cam[2]
95 | u_, v_ = u[:, np.newaxis], v[:, np.newaxis]
96 | return np.concatenate([u_, v_], axis=1)
97 |
98 |
99 | class COLOR_CONST():
100 | colors = {
101 | "colors": [228 / 255, 178 / 255, 148 / 255],
102 | "light_pink": [0.9, 0.7, 0.7], # This is used to do no-3d
103 | "light_blue": [102 / 255, 209 / 255, 243 / 255],
104 | }
105 |
106 | color_hand_joints = [
107 | [1.0, 0.0, 0.0],
108 | [0.0, 0.4, 0.0],
109 | [0.0, 0.6, 0.0],
110 | [0.0, 0.8, 0.0],
111 | [0.0, 1.0, 0.0], # thumb
112 | [0.0, 0.0, 0.6],
113 | [0.0, 0.0, 1.0],
114 | [0.2, 0.2, 1.0],
115 | [0.4, 0.4, 1.0], # index
116 | [0.0, 0.4, 0.4],
117 | [0.0, 0.6, 0.6],
118 | [0.0, 0.8, 0.8],
119 | [0.0, 1.0, 1.0], # middle
120 | [0.4, 0.4, 0.0],
121 | [0.6, 0.6, 0.0],
122 | [0.8, 0.8, 0.0],
123 | [1.0, 1.0, 0.0], # ring
124 | [0.4, 0.0, 0.4],
125 | [0.6, 0.0, 0.6],
126 | [0.8, 0.0, 0.8],
127 | [1.0, 0.0, 1.0],
128 | ] # little
129 |
130 |
131 | def plot_hand(image, coords_hw, vis=None, linewidth=3):
132 | """Plots a hand stick figure into a matplotlib figure."""
133 |
134 | colors = np.array(COLOR_CONST.color_hand_joints)
135 | colors = colors[:, ::-1]
136 |
137 | # define connections and colors of the bones
138 | bones = [
139 | ((0, 1), colors[1, :]),
140 | ((1, 2), colors[2, :]),
141 | ((2, 3), colors[3, :]),
142 | ((3, 4), colors[4, :]),
143 | ((0, 5), colors[5, :]),
144 | ((5, 6), colors[6, :]),
145 | ((6, 7), colors[7, :]),
146 | ((7, 8), colors[8, :]),
147 | ((0, 9), colors[9, :]),
148 | ((9, 10), colors[10, :]),
149 | ((10, 11), colors[11, :]),
150 | ((11, 12), colors[12, :]),
151 | ((0, 13), colors[13, :]),
152 | ((13, 14), colors[14, :]),
153 | ((14, 15), colors[15, :]),
154 | ((15, 16), colors[16, :]),
155 | ((0, 17), colors[17, :]),
156 | ((17, 18), colors[18, :]),
157 | ((18, 19), colors[19, :]),
158 | ((19, 20), colors[20, :]),
159 | ]
160 |
161 | if vis is None:
162 | vis = np.ones_like(coords_hw[:, 0]) == 1.0
163 |
164 | for connection, color in bones:
165 | if (vis[connection[0]] == False) or (vis[connection[1]] == False):
166 | continue
167 |
168 | coord1 = coords_hw[connection[0], :]
169 | coord2 = coords_hw[connection[1], :]
170 | c1x = int(coord1[0])
171 | c1y = int(coord1[1])
172 | c2x = int(coord2[0])
173 | c2y = int(coord2[1])
174 | cv2.line(image, (c1x, c1y), (c2x, c2y), color=color * 255, thickness=linewidth)
175 |
176 | for i in range(coords_hw.shape[0]):
177 | cx = int(coords_hw[i, 0])
178 | cy = int(coords_hw[i, 1])
179 | cv2.circle(image, (cx, cy), radius=2 * linewidth, thickness=-1, color=colors[i, :] * 255)
180 |
181 | return image
182 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
DART: Articulated Hand Model with Diverse Accessories and Rich Textures
6 |
7 | Daiheng Gao*
8 | ·
9 | Yuliang Xiu*
10 | ·
11 | Kailin Li*
12 | ·
13 | Lixin Yang*
14 |
15 | Feng Wang
16 | ·
17 | Peng Zhang
18 | ·
19 | Bang Zhang
20 | ·
21 | Cewu Lu
22 | ·
23 | Ping Tan
24 |
25 | NeurIPS 2022 (Datasets and Benchmarks Track)
26 |
27 |

28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | ## Update
46 |
47 | - [2024.04.01] **DARTset could be easily cloned from Huggingface/Dataset** at [DARTset](https://huggingface.co/datasets/Yuliang/DART)
48 | - [2022.10.07] **DART's raw textures+accessories are released** at [RAW](https://drive.google.com/file/d/1_KPzMFjXLHagPhhos7NXvzdzMMN-b1bd/view)
49 | - [2022.09.29] **DART Unity GUI's source code is publicly available** at [GUI](https://drive.google.com/file/d/1xtfc-fMHR5ax-e5S5Drx53Rm2ddL5mHs/view?usp=sharing)
50 |
51 | ## Environment
52 |
53 | * numpy
54 | * cv2
55 | * imageio
56 | * PyTorch
57 | * PyTorch3D (>= 0.6)
58 | * [manotorch](https://github.com/lixiny/manotorch.git)
59 |
60 |
61 |
62 | ## Data
63 |
64 | Please download the data from [HuggingFace](https://huggingface.co/datasets/Yuliang/DART) or [Baidu Pan (4w3r)](https://pan.baidu.com/share/init?surl=xOV3FkNFxNS-mYrHTXd8Iw) and put them in the `data/DARTset` folder.
65 |
66 | ```bash
67 | git clone https://huggingface.co/datasets/Yuliang/DART data/DARTset
68 | ```
69 |
70 | Then download [MANO](https://mano.is.tue.mpg.de) from the official website and put it in the `assets` folder.
71 |
72 | Your directory should look like this:
73 |
74 | ```
75 | .
76 | ├── DARTset.py
77 | ├── DARTset_utils.py
78 | ├── assets
79 | │ └── mano_v1_2
80 | ├── data
81 | │ └── DARTset
82 | │ ├── train
83 | │ │ ├── 0
84 | │ │ ├── 0_wbg
85 | │ │ ├── part_0.pkl
86 | │ │ |-- ...
87 | │ └── test
88 | ```
89 |
90 | ## Visualization
91 |
92 | ```python
93 | python DARTset.py
94 | ```
95 |
96 | You can modify this [line](https://github.com/DART2022/DARTset/blob/f619f609b1902b344fc5bbba57d080763a5496eb/DARTset.py#L175) in DARTset.py to change the `train/test` data split.
97 |
98 | ## Post Processing with Unity GUI
99 |
100 | Please check [postprocess folder](postprocess/README.md) to learn how to generate intermediate output using DART's Unity GUI.
101 |
102 | ## Citation
103 |
104 | If you find our work useful in your research, please cite:
105 |
106 | ```
107 | @inproceedings{gao2022dart,
108 | title={{DART: Articulated Hand Model with Diverse Accessories and Rich Textures}},
109 | author={Daiheng Gao and Yuliang Xiu and Kailin Li and Lixin Yang and Feng Wang and Peng Zhang and Bang Zhang and Cewu Lu and Ping Tan},
110 | booktitle={Thirty-sixth Conference on Neural Information Processing Systems Datasets and Benchmarks Track},
111 | year={2022},
112 | }
113 |
114 | ```
115 |
--------------------------------------------------------------------------------
/assets/teaser.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/assets/teaser.png
--------------------------------------------------------------------------------
/postprocess/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Alibaba Group, Daiheng Gao.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/postprocess/README.md:
--------------------------------------------------------------------------------
1 | # DART GUI post processing
2 |
3 | Please refer to the [step3 of DART GUI](https://dart2022.github.io/)
4 | for the exporting process.
5 |
6 |
7 | ## Requirements
8 |
9 | 1. Download `mano_v1_2.zip` from the [MANO website](https://mano.is.tue.mpg.de/),
10 | unzip the file and copy `MANO_RIGHT.pkl` to `extra_data/hand_wrist/` folder:
11 |
12 | 2. Install `pytorch3d` according to official [INSTALL.md](https://github.com/facebookresearch/pytorch3d/blob/main/INSTALL.md)
13 |
14 | ## Usage
15 |
16 | 1. Copy the `xxx\Build_Hand\ExportData\2022-08-02_17-07-21` to `convert.py`.
17 |
18 | 2. `Python convert.py` and all the ground truths are exported to `output.pkl` (include mano pose, 2d/3d joint).
--------------------------------------------------------------------------------
/postprocess/convert.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import pickle
3 |
4 | from reprojection import generate_2d
5 | from retargeting.basis import *
6 | from retargeting.mesh_utils import *
7 | from utils.mano_wrist import rot_pose_beta_to_mesh, hands_mean
8 | from pytorch3d.transforms import quaternion_to_axis_angle
9 | import xlrd
10 |
11 | from pytorch3d.io import load_obj, save_obj
12 | obj_filename = os.path.join('./data/hand_01.obj')
13 | # obj_filename = os.path.join('extra_data/hand_mesh/hand.obj')
14 | verts, faces, aux = load_obj(
15 | obj_filename,
16 | device="cuda",
17 | load_textures=True,
18 | create_texture_atlas=True,
19 | texture_atlas_size=8,
20 | texture_wrap=None,
21 | )
22 | FACES = faces[0]
23 |
24 | if torch.cuda.is_available():
25 | device = torch.device("cuda:0")
26 | torch.cuda.set_device(device)
27 | else:
28 | device = torch.device("cpu")
29 |
30 | UNITY_BASIS = torch.tensor([[0.98252,0.1861579, 8.797169E-08, -1.925538E-07],[0.9867496,0.1622507, 8.623259E-07, -5.569176E-06],[0.998917,0.04652741, -3.814051E-07, -8.645397E-07],[0.9845469,0.1751214, -6.773092E-08, -1.296573E-07],[0.9895474,0.1442084, 2.347626E-07, 4.719131E-08],[0.9987367,0.0502522, 3.299475E-07, 2.839107E-07],[0.9802657,0.1976843, -4.922589E-09, -2.051222E-07],[0.9878246,0.1555727, -1.964481E-07, 5.758296E-08],[0.9954529,0.09525843, 4.293424E-07, 1.511594E-06],[0.9996653,0.02587354, -1.435634E-07, 1.650499E-07],[0.9994696,0.03256664, 2.490357E-07, -3.985346E-08],[0.9999212,0.01256056, -1.017585E-07, -2.349624E-09],[-0.9291456,-0.3697141, 2.073295E-07, -2.429503E-07],[-0.9564937,-0.2917534, 3.610364E-06, -1.518497E-05],[-0.9723085,-0.233702, 2.076902E-08, -6.052665E-07]], dtype=torch.float, device=device)
31 | MANO_BASIS = torch.tensor([[ 0.9973, -0.0215, -0.0324, 0.0616],
32 | [ 0.9993, 0.0230, 0.0032, -0.0292],
33 | [ 0.9992, -0.0185, 0.0262, -0.0252],
34 | [ 0.9953, -0.0267, 0.0893, 0.0271],
35 | [ 0.9955, -0.0322, 0.0842, -0.0297],
36 | [ 0.9950, -0.0078, 0.0995, 0.0057],
37 | [ 0.9486, 0.0659, 0.3088, 0.0216],
38 | [ 0.9500, 0.1272, 0.2845, 0.0189],
39 | [ 0.9690, 0.0084, 0.2448, -0.0313],
40 | [ 0.9954, -0.0097, 0.0891, 0.0351],
41 | [ 0.9843, -0.0753, 0.1529, -0.0463],
42 | [ 0.9850, -0.0863, 0.1479, -0.0226],
43 | [ 0.5461, -0.7217, -0.2437, 0.3487],
44 | [ 0.3954, -0.8682, -0.2121, 0.2121],
45 | [ 0.5029, -0.8037, -0.2171, 0.2324]], dtype=torch.float, device=device)
46 |
47 |
48 | def coordinate_change(data: torch.Tensor):
49 | """
50 | Convert data between left hand coordinate to rirght hand coordinate.
51 | Switch y and z, w = -w.
52 | Args:
53 | data:
54 |
55 | Returns:
56 |
57 | """
58 | if len(data.shape) == 2:
59 | data = data[:, [0, 1, 3, 2]]
60 | data[:, 0] *= -1
61 | else:
62 | assert len(data.shape) == 1
63 | data = data[[0, 1, 3, 2]]
64 | data[0] *= -1
65 | return data
66 |
67 |
68 | def get_unity_root_based_rotation(data: torch.Tensor):
69 | data_index = [0, 17, 18, 19, 12, 13, 14, 2, 3, 4, 7, 8, 9, 22, 23, 24]
70 | data = data[data_index]
71 | global_rotation = data[0]
72 | data = quaternion_multiply(quaternion_invert(global_rotation), data)
73 | data[1:] = quaternion_multiply(quaternion_invert(UNITY_BASIS), data[1:])
74 | return global_rotation, data
75 |
76 |
77 | def get_root_aa(root_rot):
78 | """
79 | Get root based axis-angle of mano based on the data exported from unity
80 | Args:
81 | root_rot:
82 |
83 | Returns:
84 |
85 | """
86 | # Rotation between mano and unity when root has no rotation
87 | mano_to_unity_quat = torch.tensor([0.9928, 0.0000, 0.1160, -0.0300], dtype=torch.float, device=device)
88 | mano_to_unity_quat = coordinate_change(mano_to_unity_quat)
89 |
90 | quat = root_rot
91 | quat = coordinate_change(quat)
92 | quat = quaternion_multiply(quat, mano_to_unity_quat)
93 |
94 | rot = torch.tensor([0.7071, -0.7071, 0, 0], dtype=torch.float, device=device)
95 | quat = quaternion_multiply(rot, quat)
96 |
97 | aa = quaternion_to_axis_angle(quat)
98 |
99 | return aa
100 |
101 |
102 | def get_mano_parameter(global_rotation: torch.Tensor, unity_rot_tensor: torch.Tensor):
103 | """
104 | Get rot_pose_beta_to_mesh of mano based on the exported data from unity
105 | Args:
106 | global_rotation:
107 | unity_rot_tensor:
108 |
109 | Returns:
110 |
111 | """
112 | unity_rot_tensor = coordinate_change(unity_rot_tensor)
113 |
114 | unity_rot_tensor[1:] = quaternion_multiply(unity_rot_tensor[1:], MANO_BASIS)
115 | # print(unity_rot_tensor)
116 | unity_local_rot_tensor = convert_global_to_local(unity_rot_tensor)
117 | # print(unity_local_rot_tensor)
118 | pose_aa = quaternion_to_axis_angle(unity_local_rot_tensor)
119 | root_rot = get_root_aa(global_rotation)
120 | return root_rot, pose_aa
121 |
122 |
123 | def generate_single_mesh(root_rot, pose, name):
124 | """
125 | Generate mesh in target file path
126 | Args:
127 | root_rot: rotation of root
128 | pose: rotation of joints
129 | name: target file path for mesh
130 |
131 | Returns:
132 |
133 | """
134 | all_data = rot_pose_beta_to_mesh(root_rot.view(1, 3), pose.contiguous().view(1, 45))
135 | mesh_data = all_data[0, 21:]
136 | joint_position = all_data[0, :21]
137 | # print("joint position", joint_position)
138 | # print(mesh_data.shape)
139 |
140 | # If render image is not needed, the next two lines can be skiped
141 | modify_vertices(mesh_data) # generate obj with vt and vn
142 | render_mesh_image() # render image
143 |
144 | if name != "":
145 | save_obj(name, mesh_data, FACES)
146 | save_obj('test/from_unity.obj', mesh_data, FACES)
147 | return joint_position
148 |
149 |
150 | def generate_obj(dir_path):
151 | """
152 |
153 | Args:
154 | dir_path:
155 |
156 | Returns:
157 |
158 | """
159 | excel_path = dir_path + "/ExcelData.xls"
160 | # rotation_data_index = 5
161 | rotation_data_index = 4
162 | dir_name = os.path.dirname(excel_path)
163 | unity_rot = []
164 | with xlrd.open_workbook(excel_path) as wb:
165 | # print(wb)
166 | sheet = wb.sheet_by_index(0)
167 | idx = 0
168 | for row in sheet.get_rows():
169 | if 1 <= idx <= 26:
170 | unity_rot.append(torch.tensor([float(x) for x in row[rotation_data_index].value.split(",")], dtype=torch.float, device=device))
171 | idx += 1
172 | unity_origin_tensor = torch.stack(unity_rot)
173 | # print(unity_origin_tensor)
174 | root_rot, pose_rot = get_unity_root_based_rotation(unity_origin_tensor)
175 | root_aa, pose_aa = get_mano_parameter(root_rot, pose_rot)
176 | pose_aa -= hands_mean.view(pose_aa.shape)
177 | obj_name = dir_name + "/mano_mesh.obj"
178 | joint_3d = generate_single_mesh(root_aa, pose_aa, obj_name)
179 | joint_2d = generate_2d(dir_path)
180 | output_data = {
181 | "root": root_aa,
182 | "pose": pose_aa,
183 | "joint_3d": joint_3d,
184 | "joint_2d": joint_2d
185 | }
186 | pickle.dump(output_data, open(dir_name + "/output.pkl", 'wb'))
187 |
188 |
189 | def test_load_data():
190 | data = pickle.load(open("output.pkl", 'rb'))
191 | generate_single_mesh(data["root"], data["pose"], "")
192 |
193 |
194 | if __name__ == '__main__':
195 | excel_path = r"C:\Users\FengWang\Downloads\newhand\Hand\Build_Hand\ExportData\2022-08-02_17-22-21"
196 | generate_obj(excel_path)
197 |
--------------------------------------------------------------------------------
/postprocess/data/hand_01.mtl:
--------------------------------------------------------------------------------
1 | newmtl initialShadingGroup
2 | illum 4
3 | Kd 0.50 0.50 0.50
4 | Ka 0.00 0.00 0.00
5 | Tf 1.00 1.00 1.00
6 | Ni 1.00
7 |
--------------------------------------------------------------------------------
/postprocess/data/hand_02.mtl:
--------------------------------------------------------------------------------
1 | newmtl hand:initialShadingGroup
2 | illum 4
3 | Kd 0.00 0.00 0.00
4 | Ka 0.00 0.00 0.00
5 | Tf 1.00 1.00 1.00
6 | map_Kd hand_texture.png
7 | Ni 1.00
8 |
--------------------------------------------------------------------------------
/postprocess/data/hand_texture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/data/hand_texture.png
--------------------------------------------------------------------------------
/postprocess/extra_data/background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/extra_data/background.png
--------------------------------------------------------------------------------
/postprocess/extra_data/hand_mesh/hand.mtl:
--------------------------------------------------------------------------------
1 | newmtl initialShadingGroup
2 | illum 4
3 | Kd 0.50 0.50 0.50
4 | Ka 0.00 0.00 0.00
5 | Tf 1.00 1.00 1.00
6 | Ni 1.00
7 | map_Kd hand_texture.png
--------------------------------------------------------------------------------
/postprocess/extra_data/hand_mesh/hand_texture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/extra_data/hand_mesh/hand_texture.png
--------------------------------------------------------------------------------
/postprocess/extra_data/hand_mesh/hand_texture2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/extra_data/hand_mesh/hand_texture2.png
--------------------------------------------------------------------------------
/postprocess/extra_data/hand_wrist/hand_01.mtl:
--------------------------------------------------------------------------------
1 | newmtl initialShadingGroup
2 | illum 4
3 | Kd 0.50 0.50 0.50
4 | Ka 0.00 0.00 0.00
5 | Tf 1.00 1.00 1.00
6 | Ni 1.00
7 |
--------------------------------------------------------------------------------
/postprocess/extra_data/hand_wrist/hand_02.mtl:
--------------------------------------------------------------------------------
1 | newmtl hand:initialShadingGroup
2 | illum 4
3 | Kd 0.00 0.00 0.00
4 | Ka 0.00 0.00 0.00
5 | Tf 1.00 1.00 1.00
6 | map_Kd hand_texture.png
7 | Ni 1.00
8 |
--------------------------------------------------------------------------------
/postprocess/extra_data/hand_wrist/hand_texture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/extra_data/hand_wrist/hand_texture.png
--------------------------------------------------------------------------------
/postprocess/extra_data/mean_mano_params.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/extra_data/mean_mano_params.pkl
--------------------------------------------------------------------------------
/postprocess/extra_data/park.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/extra_data/park.png
--------------------------------------------------------------------------------
/postprocess/extra_data/test.obj:
--------------------------------------------------------------------------------
1 | # https://github.com/mikedh/trimesh
2 | v 0.03822542 -0.01197649 -0.00489810
3 | v 0.04869004 -0.01397623 -0.00600988
4 | v 0.04110593 -0.01886322 -0.00031975
5 | v 0.03221133 -0.01554197 0.00158990
6 | v 0.05944462 -0.02993708 0.00106818
7 | v 0.06691592 -0.03062680 0.00983216
8 | v 0.05135971 -0.03518341 0.01560791
9 | v 0.04651914 -0.03538266 0.00752729
10 | v 0.02655460 -0.01672363 0.00728861
11 | v 0.03425227 -0.02274315 0.00450261
12 | v 0.02553013 -0.01385319 0.01113716
13 | v 0.01879224 -0.00307877 0.01369831
14 | v 0.01952651 -0.00934658 0.00966356
15 | v 0.01189306 0.00144161 0.01300542
16 | v 0.02544848 0.00600663 0.01509902
17 | v 0.01236970 0.01055679 0.00992577
18 | v 0.03319137 0.02085717 -0.00856588
19 | v 0.03678656 0.02161052 -0.02168021
20 | v 0.01981364 0.02108669 -0.02375128
21 | v 0.01786937 0.01940400 -0.01020863
22 | v 0.03045319 0.01739692 -0.04139141
23 | v 0.02369187 0.01953558 -0.03189073
24 | v 0.04066097 0.02127986 -0.02982226
25 | v 0.04588738 0.01910268 -0.03838746
26 | v 0.04690463 -0.00906348 -0.01801667
27 | v 0.05767483 -0.01010478 -0.01870928
28 | v 0.05488656 -0.01009954 -0.01371895
29 | v 0.04469711 -0.00943703 -0.01296236
30 | v 0.02900827 -0.01404159 0.01948252
31 | v 0.03593181 -0.00590063 0.02085643
32 | v 0.02930164 -0.00736817 0.01624655
33 | v 0.02651383 -0.01764249 0.01574079
34 | v 0.08428244 -0.01778150 -0.02737291
35 | v 0.09616715 -0.01177275 -0.02501057
36 | v 0.09546647 -0.01122308 -0.01919322
37 | v 0.08466067 -0.01633331 -0.02267127
38 | v 0.08688338 -0.01752945 0.00093456
39 | v 0.09463917 -0.00994975 -0.00110067
40 | v 0.09896105 -0.00122388 0.00843620
41 | v 0.09009293 -0.00580257 0.01199059
42 | v 0.07709933 -0.02489574 0.00544046
43 | v 0.08106742 -0.01615107 0.01553833
44 | v 0.07001479 -0.02384321 0.01862844
45 | v 0.05406026 -0.02877641 0.02225120
46 | v 0.09765600 -0.00986084 -0.03761122
47 | v 0.08930776 -0.01453576 -0.04115371
48 | v -0.02913415 -0.01917859 0.00113169
49 | v -0.02813096 -0.02033693 -0.00457833
50 | v -0.02484391 -0.01824435 -0.00405739
51 | v -0.02646882 -0.01739977 0.00153247
52 | v 0.09094898 0.00441983 -0.05024795
53 | v 0.08948693 0.01437025 -0.04929923
54 | v 0.09669225 0.01329139 -0.04947487
55 | v 0.09827435 0.00406015 -0.04845207
56 | v 0.08123748 0.00481959 -0.05250603
57 | v 0.07942409 0.01525721 -0.04855721
58 | v -0.03833911 -0.01042289 0.00246889
59 | v -0.03556779 -0.01409126 0.00471130
60 | v -0.03304141 -0.01028565 0.00543678
61 | v -0.03527021 -0.00623725 0.00276734
62 | v 0.02334608 -0.01225270 0.00371477
63 | v 0.02853483 -0.01188606 -0.00412697
64 | v 0.00585406 -0.01053964 0.00118717
65 | v 0.01497017 -0.01010938 -0.00171283
66 | v 0.01488231 -0.00927368 0.00535333
67 | v 0.00724785 -0.00838536 0.00716025
68 | v 0.02476636 -0.00822473 -0.02324387
69 | v 0.03640084 -0.00809871 -0.02198940
70 | v 0.03332726 -0.00820372 -0.01613119
71 | v 0.02038114 -0.00916142 -0.01583567
72 | v 0.04083113 -0.01081292 -0.03516178
73 | v 0.02902230 -0.01022262 -0.03534362
74 | v 0.03293675 -0.01182537 -0.04277982
75 | v 0.04065273 -0.01295527 -0.04131600
76 | v 0.00862871 -0.00828540 -0.03263872
77 | v 0.00169045 -0.00727801 -0.03295682
78 | v 0.00242604 -0.00827014 -0.03582955
79 | v 0.01021241 -0.00844135 -0.03588209
80 | v 0.10378349 0.01332210 -0.04919291
81 | v 0.10509623 0.00415151 -0.04588173
82 | v 0.04048559 0.00988246 -0.05913058
83 | v 0.04718482 0.00216031 -0.06349501
84 | v 0.03514840 0.00024279 -0.06772922
85 | v 0.03086583 0.00953792 -0.06210293
86 | v 0.08630127 0.02197361 -0.04296119
87 | v 0.09382086 0.02196176 -0.04521529
88 | v -0.03974142 -0.00806460 -0.00304406
89 | v -0.03657144 -0.00410366 -0.00292979
90 | v 0.07082067 -0.00996779 0.02306927
91 | v 0.05407175 -0.02010998 0.02631961
92 | v 0.08268844 -0.00048603 0.01818371
93 | v 0.09029803 0.00650047 0.01333517
94 | v 0.09800181 0.01058410 0.01114661
95 | v 0.01193632 -0.01027364 -0.00779307
96 | v 0.03497358 -0.00980234 -0.01064428
97 | v 0.04868668 -0.00944152 -0.02274376
98 | v 0.05375966 -0.01713396 -0.05210718
99 | v 0.06413913 -0.01761283 -0.05041791
100 | v 0.05846959 -0.01784118 -0.04138499
101 | v 0.05051744 -0.01672321 -0.04477901
102 | v 0.04817800 -0.00774483 -0.06432234
103 | v 0.04568383 -0.01384753 -0.05982341
104 | v 0.03715873 -0.01351723 -0.06148675
105 | v 0.03803228 -0.00819485 -0.06673238
106 | v 0.03235221 -0.01409439 0.02606438
107 | v 0.04198126 -0.00637708 0.02519108
108 | v 0.09170797 -0.00576657 -0.04743581
109 | v 0.09840233 -0.00484440 -0.04356664
110 | v 0.10610171 -0.00205470 -0.04129932
111 | v 0.07104824 -0.01461538 -0.02178523
112 | v 0.08495920 -0.01550932 -0.01936873
113 | v 0.08364907 -0.01600362 -0.01522633
114 | v 0.06998997 -0.01336564 -0.01899845
115 | v 0.05923054 -0.01886828 -0.00780173
116 | v 0.07164311 -0.02323736 -0.00546549
117 | v 0.04928941 -0.02490374 -0.00279111
118 | v 0.09624248 -0.01036400 -0.01112770
119 | v 0.10531051 -0.00900770 -0.01717054
120 | v 0.10479597 -0.00738306 -0.00843521
121 | v 0.10624949 -0.00825840 -0.02690301
122 | v 0.10657155 -0.00613572 -0.03556277
123 | v 0.10168465 0.02485695 -0.04679208
124 | v 0.10326538 -0.00453978 -0.00071069
125 | v 0.03824379 -0.03588668 0.01293921
126 | v 0.03153956 -0.03561535 0.01778230
127 | v 0.02803984 -0.03148037 0.01267094
128 | v 0.03350358 -0.03125540 0.00851285
129 | v 0.00289701 0.00363762 0.01169175
130 | v 0.00479247 -0.00404945 0.01097458
131 | v 0.01409571 -0.00541364 0.01159533
132 | v 0.07465937 -0.01887139 -0.03376309
133 | v 0.07940854 -0.01630621 -0.04517660
134 | v 0.00259719 -0.01012586 -0.00477202
135 | v -0.03099987 -0.00280149 0.00316795
136 | v -0.02883238 -0.00739513 0.00621078
137 | v -0.01193587 -0.00082046 0.00914848
138 | v -0.01467277 0.00459816 0.00590507
139 | v -0.00619315 -0.00545253 -0.00991573
140 | v -0.00259794 -0.00944366 -0.00577482
141 | v -0.00539638 -0.00874385 -0.00670513
142 | v -0.00907876 -0.00629814 -0.01150547
143 | v 0.00822547 -0.00965152 -0.04307654
144 | v 0.01505411 -0.00979260 -0.04299431
145 | v 0.00290580 0.01457587 -0.01264464
146 | v 0.00386554 0.01740762 0.00082191
147 | v 0.01581337 0.01757885 0.00164060
148 | v 0.02794535 -0.00893818 -0.02997124
149 | v 0.01767039 -0.00913281 -0.03189273
150 | v 0.01906764 -0.00941212 -0.03578883
151 | v 0.00858693 -0.00959918 -0.01128897
152 | v 0.00086709 -0.00867047 -0.01032831
153 | v 0.00409779 -0.00804936 -0.01455059
154 | v 0.01128546 -0.00898274 -0.01605831
155 | v 0.05974743 0.00419332 -0.06023533
156 | v 0.06079587 -0.00738952 -0.06161384
157 | v -0.03203037 -0.01711150 0.00433625
158 | v -0.02951026 -0.01442553 0.00500140
159 | v 0.03928953 -0.00883455 -0.02858153
160 | v 0.03034446 0.01714818 0.00214289
161 | v 0.05055984 -0.01028670 -0.02783545
162 | v 0.01416175 0.00730998 -0.05259267
163 | v 0.01382612 -0.00064679 -0.05515841
164 | v 0.00824773 -0.00260712 -0.05417485
165 | v 0.00611512 0.00311994 -0.05315319
166 | v -0.02319382 -0.01051617 -0.01364664
167 | v -0.02182350 -0.01505578 -0.01040837
168 | v -0.02526457 -0.01657886 -0.01009485
169 | v -0.02766188 -0.01250151 -0.01362281
170 | v -0.00102152 -0.01044653 -0.00018522
171 | v -0.00020325 -0.00898149 0.00613046
172 | v -0.00620743 -0.01027686 0.00412235
173 | v -0.00582921 -0.01032448 -0.00182631
174 | v -0.00284646 -0.00429206 0.00973493
175 | v -0.00857167 -0.00617292 0.00841401
176 | v -0.02636996 -0.00481068 -0.01353891
177 | v -0.03019153 -0.00031821 -0.00879346
178 | v -0.01494992 0.00639288 -0.00771945
179 | v -0.01377589 -0.00023264 -0.01190111
180 | v 0.06052357 0.02208504 -0.03574854
181 | v 0.06433281 0.01826175 -0.04549602
182 | v 0.04983262 0.01533591 -0.04846717
183 | v 0.03557957 0.01343930 -0.05214367
184 | v 0.02213107 0.01072214 -0.05438347
185 | v 0.01751879 0.01685147 -0.04468557
186 | v 0.05399324 0.01215869 -0.05476698
187 | v -0.00459427 0.00232847 -0.01357071
188 | v -0.00446549 0.01015477 -0.00777779
189 | v -0.00278996 0.00887882 -0.01354565
190 | v 0.05220143 -0.01303466 -0.03457558
191 | v -0.02577926 -0.01203685 0.00593952
192 | v 0.08662373 0.02571586 -0.00023481
193 | v 0.08673926 0.02981365 -0.01309633
194 | v 0.07834501 0.02777987 -0.01141007
195 | v 0.07819752 0.02230616 0.00302244
196 | v -0.02151766 -0.01734296 -0.00396359
197 | v -0.02271252 -0.01642870 0.00234410
198 | v 0.01582934 -0.01109085 -0.04933669
199 | v 0.00938480 -0.01031574 -0.04870822
200 | v 0.00830687 -0.00898595 -0.05212746
201 | v 0.01406452 -0.01188463 -0.05462619
202 | v 0.06763618 0.01570564 -0.04975905
203 | v 0.02793210 -0.00370802 -0.06977420
204 | v 0.02410804 0.00549202 -0.06495120
205 | v 0.07683548 0.02093578 -0.04252698
206 | v 0.07362148 0.02491481 -0.03418369
207 | v 0.08317211 0.02685901 -0.03364930
208 | v 0.00723538 0.01537117 -0.03329546
209 | v 0.05581039 0.02367586 -0.02860672
210 | v 0.07012907 0.02592639 -0.02741379
211 | v 0.08138988 0.02828276 -0.02645685
212 | v 0.08945125 0.03049468 -0.02655956
213 | v 0.09061284 0.02854545 -0.03440943
214 | v -0.03165478 -0.00059437 -0.00287182
215 | v -0.03568478 -0.00401197 -0.00977814
216 | v 0.09892526 0.03198403 -0.03585148
217 | v 0.09738514 0.03338803 -0.02679011
218 | v 0.06695423 0.02626722 -0.01873224
219 | v 0.06357070 0.02516340 -0.00779568
220 | v 0.05324527 0.02385226 -0.02023257
221 | v 0.04867603 0.02283377 -0.00962995
222 | v 0.00692345 0.02092373 -0.02398185
223 | v -0.04356629 -0.01881880 -0.01394266
224 | v -0.03581684 -0.01143893 -0.01385859
225 | v -0.03161012 -0.01542779 -0.01382760
226 | v -0.04064135 -0.02163200 -0.01523918
227 | v -0.03140707 -0.00779890 -0.01383276
228 | v -0.03863011 -0.00823935 -0.00984828
229 | v 0.04597988 0.01755624 0.00413389
230 | v -0.00254060 -0.00653567 -0.01448644
231 | v 0.06272796 0.01671331 0.00839297
232 | v 0.03886940 0.00862677 0.01710817
233 | v 0.05148824 0.00549282 0.02023629
234 | v 0.07963062 0.01370900 0.01383093
235 | v 0.06772830 0.00411479 0.02194910
236 | v 0.09539649 0.02054821 0.00715542
237 | v 0.08710455 0.01806455 0.00917281
238 | v 0.05592906 -0.00684166 0.02627384
239 | v -0.03568434 -0.02673787 -0.00289230
240 | v -0.03461003 -0.02669765 -0.00845713
241 | v 0.09452774 0.02827053 -0.00131389
242 | v 0.04005263 -0.02899914 0.00326040
243 | v 0.06243207 -0.01493824 -0.03126873
244 | v 0.06070933 -0.01261179 -0.02636209
245 | v 0.07025839 -0.01735000 -0.04862677
246 | v 0.06558260 -0.01822075 -0.03789164
247 | v -0.03847637 -0.02469123 0.00115697
248 | v -0.00263645 -0.00419150 -0.03457982
249 | v -0.00033918 -0.00408180 -0.03488654
250 | v 0.04241435 -0.03508342 0.01972304
251 | v 0.03613420 -0.03498603 0.02373176
252 | v 0.03606371 -0.02449346 0.03129513
253 | v 0.03699037 -0.03118275 0.02885989
254 | v 0.04498934 -0.02959672 0.02602721
255 | v 0.04458743 -0.02279731 0.02906321
256 | v 0.07148039 -0.01579197 -0.02426495
257 | v 0.07199879 -0.01698764 -0.02805257
258 | v 0.07126181 0.00436030 -0.05646994
259 | v 0.07144821 -0.00717838 -0.05758606
260 | v -0.00482161 0.00202243 0.00981485
261 | v 0.00144081 0.01099186 0.00806103
262 | v -0.00656105 0.00784025 0.00629546
263 | v -0.01414156 0.00819683 -0.00018791
264 | v -0.00609740 -0.00303436 -0.01427126
265 | v -0.00724993 0.00082455 -0.01091683
266 | v 0.07815102 -0.01923589 -0.01070973
267 | v 0.06615566 -0.01435031 -0.01383636
268 | v 0.03013127 -0.02452430 0.00871026
269 | v 0.02605781 -0.02474315 0.01173702
270 | v 0.01567860 -0.00861677 -0.02294425
271 | v -0.00306008 0.01011016 -0.03198256
272 | v -0.00327968 0.01444923 -0.02417613
273 | v 0.00664006 -0.00776671 -0.02303520
274 | v -0.04496979 -0.01949129 0.00027859
275 | v -0.04153933 -0.02260269 0.00198493
276 | v -0.00596235 0.01192760 0.00046703
277 | v 0.02263304 -0.01157218 -0.04312135
278 | v 0.00271970 0.00912162 -0.03654693
279 | v -0.00405137 0.00331445 -0.03527682
280 | v 0.03257217 -0.00996464 -0.06739005
281 | v 0.09541129 0.03185308 -0.01350814
282 | v -0.02886736 -0.01937541 -0.01087445
283 | v -0.03553071 -0.02504393 -0.01302315
284 | v -0.04539860 -0.01648227 -0.01079521
285 | v -0.04612582 -0.01600651 -0.00477441
286 | v 0.08087283 -0.00734348 -0.05208185
287 | v 0.05913857 -0.01118151 -0.02314178
288 | v 0.04537824 -0.01354330 0.02763520
289 | v 0.03510516 -0.01773392 0.02976619
290 | v 0.00133274 -0.00701715 -0.02339934
291 | v 0.01833010 0.00427862 -0.05855584
292 | v 0.00884983 0.01230990 -0.04634412
293 | v 0.00043219 0.00753998 -0.04765537
294 | v -0.00327202 0.00541695 -0.04128340
295 | v -0.00420947 -0.00037102 -0.03695922
296 | v -0.04843076 -0.02078304 -0.01170237
297 | v -0.04923391 -0.02025913 -0.00583817
298 | v -0.04823639 -0.02362393 -0.00069511
299 | v -0.04495933 -0.02636392 0.00098830
300 | v -0.04085040 -0.02783725 -0.00013955
301 | v -0.04681983 -0.02326007 -0.01498355
302 | v -0.04414915 -0.02582849 -0.01592191
303 | v -0.03840050 -0.02764862 -0.01381430
304 | v -0.04898028 -0.02770809 -0.01579539
305 | v -0.05323670 -0.02785943 -0.01264866
306 | v -0.05413118 -0.03661250 -0.00221870
307 | v -0.05314381 -0.03083377 -0.00221306
308 | v -0.05489426 -0.03207263 -0.00392198
309 | v -0.05649933 -0.03674196 -0.00455758
310 | v -0.05893908 -0.04096179 -0.00634803
311 | v -0.05652895 -0.04151260 -0.00370545
312 | v -0.05445034 -0.02890357 -0.00724199
313 | v -0.05688362 -0.03494646 -0.00877035
314 | v -0.05604194 -0.03447448 -0.01292108
315 | v -0.05881809 -0.03915699 -0.01387283
316 | v -0.05984906 -0.03964711 -0.01020408
317 | v -0.05471911 -0.03026316 -0.01154715
318 | v -0.05362759 -0.02677962 -0.00692542
319 | v -0.05850654 -0.04452441 -0.00670826
320 | v -0.06032767 -0.04307563 -0.00784027
321 | v -0.06076255 -0.04246903 -0.01112477
322 | v -0.05994077 -0.04545211 -0.01155013
323 | v -0.04755997 -0.03060246 0.00030852
324 | v -0.05180559 -0.03773821 -0.00149499
325 | v -0.05360090 -0.04276367 -0.00443581
326 | v -0.05588423 -0.04560299 -0.00739116
327 | v -0.04889647 -0.03982662 -0.00368273
328 | v -0.05126240 -0.04334889 -0.00621525
329 | v -0.05298344 -0.04569853 -0.00913054
330 | v -0.04671191 -0.04097518 -0.00723141
331 | v -0.04958703 -0.04372554 -0.00894056
332 | v -0.04014207 -0.03143555 -0.00405839
333 | v -0.04256859 -0.03043808 -0.00058127
334 | v -0.05107675 -0.04562484 -0.01015487
335 | v -0.05797858 -0.04638768 -0.01238233
336 | v -0.05364869 -0.03333066 -0.01552095
337 | v -0.05641311 -0.03977135 -0.01603035
338 | v -0.05166372 -0.03399313 -0.01673821
339 | v -0.05456815 -0.04025846 -0.01755214
340 | v -0.05902594 -0.04339610 -0.01590934
341 | v -0.05989740 -0.04187745 -0.01393104
342 | v -0.03748550 -0.02844902 -0.00397287
343 | v -0.03664593 -0.02840287 -0.00981550
344 | v -0.03884099 -0.03107150 -0.01013437
345 | v -0.04557223 -0.04059788 -0.01189068
346 | v -0.04051114 -0.03061395 -0.01423370
347 | v -0.04615812 -0.03010686 -0.01660396
348 | v -0.04967996 -0.03615902 -0.01729587
349 | v -0.04717950 -0.03829346 -0.01620038
350 | v -0.05204334 -0.04102779 -0.01734364
351 | v -0.05014272 -0.04196337 -0.01584610
352 | v -0.04893379 -0.04362290 -0.01256457
353 | v -0.05385267 -0.04451117 -0.01608512
354 | v -0.05683838 -0.04405935 -0.01647934
355 | v -0.05452045 -0.04627945 -0.01279014
356 | v -0.05192346 -0.04424632 -0.01547230
357 | v -0.05198241 -0.04524093 -0.01262175
358 | v -0.02962376 -0.02202431 -0.02574714
359 | v -0.02712559 -0.02274412 -0.03158327
360 | v -0.02486464 -0.01919128 -0.03042426
361 | v -0.02673652 -0.01886455 -0.02536949
362 | v -0.04039721 -0.01291510 -0.02640000
363 | v -0.03660883 -0.01717485 -0.02341400
364 | v -0.03423193 -0.01273576 -0.02191889
365 | v -0.03781787 -0.00750038 -0.02565368
366 | v -0.04030103 -0.00969664 -0.03317352
367 | v -0.03745249 -0.00447881 -0.03249227
368 | v -0.03376081 -0.00344244 -0.02422476
369 | v -0.03022203 -0.00876332 -0.02076687
370 | v -0.01157041 0.00037579 -0.01663999
371 | v -0.01427898 0.00623131 -0.01943871
372 | v -0.00454355 -0.00772350 -0.03110808
373 | v -0.00643352 -0.00536016 -0.03365828
374 | v -0.03250846 -0.02024741 -0.02381570
375 | v -0.03003692 -0.01630154 -0.02269056
376 | v -0.02243537 -0.01363867 -0.03863374
377 | v -0.02115048 -0.01685695 -0.03432291
378 | v -0.02407229 -0.01879036 -0.03518463
379 | v -0.02575653 -0.01570057 -0.03950410
380 | v -0.00666459 -0.00630382 -0.01894231
381 | v -0.00485922 -0.00709313 -0.02500859
382 | v -0.00907386 -0.00384517 -0.01706255
383 | v -0.02609026 -0.00837643 -0.04066107
384 | v -0.03037122 -0.00317574 -0.03890260
385 | v -0.01343885 0.00696104 -0.03395700
386 | v -0.01105504 0.00150794 -0.03657377
387 | v -0.02684810 -0.01320879 -0.02099942
388 | v -0.02212905 -0.01791771 -0.02881309
389 | v -0.02362321 -0.01685185 -0.02385014
390 | v -0.03286362 -0.00100629 -0.03131649
391 | v -0.03443357 -0.00738471 -0.03995158
392 | v -0.03933769 -0.02598368 -0.04380296
393 | v -0.03276110 -0.01652993 -0.04228148
394 | v -0.02894583 -0.01974448 -0.04057415
395 | v -0.03572414 -0.02951496 -0.04345416
396 | v -0.02955349 -0.01210945 -0.04155356
397 | v -0.03706615 -0.01262480 -0.04020472
398 | v -0.03563103 -0.03470094 -0.03072472
399 | v -0.03315484 -0.03492258 -0.03646011
400 | v -0.03899786 -0.03315814 -0.02807624
401 | v -0.01458215 0.00920836 -0.02640433
402 | v -0.04540824 -0.02500574 -0.02972314
403 | v -0.04188821 -0.03005811 -0.02731317
404 | v -0.02637911 -0.02208115 -0.03674012
405 | v -0.03345088 -0.03315423 -0.04104241
406 | v -0.04264634 -0.02343285 -0.04188843
407 | v -0.04540067 -0.02128418 -0.03559852
408 | v -0.04525599 -0.02848011 -0.04224690
409 | v -0.04790791 -0.02735358 -0.03668151
410 | v -0.04777795 -0.03063264 -0.03075767
411 | v -0.04394444 -0.03440234 -0.02863196
412 | v -0.04054055 -0.03642739 -0.02937409
413 | v -0.04232996 -0.03078983 -0.04440409
414 | v -0.03868558 -0.03393412 -0.04452059
415 | v -0.03525210 -0.03582298 -0.04131100
416 | v -0.04404840 -0.03779722 -0.04569765
417 | v -0.04806603 -0.03698836 -0.04385223
418 | v -0.05236495 -0.05130147 -0.03379146
419 | v -0.05118189 -0.04555093 -0.03270145
420 | v -0.05251789 -0.04512632 -0.03493665
421 | v -0.05375081 -0.05035333 -0.03609177
422 | v -0.05137362 -0.03929436 -0.03289120
423 | v -0.05206306 -0.04058790 -0.03501965
424 | v -0.05127383 -0.03850120 -0.03902981
425 | v -0.05200380 -0.04445143 -0.03968705
426 | v -0.04969282 -0.04365354 -0.04387970
427 | v -0.05166502 -0.04886642 -0.04519498
428 | v -0.05426653 -0.04976287 -0.04115391
429 | v -0.04997903 -0.03952825 -0.04251680
430 | v -0.05441412 -0.05347060 -0.03779978
431 | v -0.05313378 -0.05568545 -0.03725802
432 | v -0.05107170 -0.03555545 -0.03835533
433 | v -0.04638299 -0.03807866 -0.02942629
434 | v -0.04866653 -0.04679908 -0.03139127
435 | v -0.04959075 -0.05303878 -0.03348261
436 | v -0.05052342 -0.05706499 -0.03812987
437 | v -0.04502128 -0.04865135 -0.03235361
438 | v -0.04680575 -0.05408520 -0.03476260
439 | v -0.04786903 -0.05689003 -0.03887729
440 | v -0.04138079 -0.04984596 -0.03511539
441 | v -0.04378217 -0.05471527 -0.03765253
442 | v -0.03780355 -0.03959466 -0.03244723
443 | v -0.04161046 -0.03914196 -0.02953921
444 | v -0.04556727 -0.05616172 -0.03915204
445 | v -0.05394686 -0.05581609 -0.04208659
446 | v -0.05095118 -0.05744809 -0.04231019
447 | v -0.05452351 -0.05404558 -0.04178046
448 | v -0.04820218 -0.04367536 -0.04538756
449 | v -0.04985476 -0.04977589 -0.04626758
450 | v -0.04578085 -0.04452254 -0.04654917
451 | v -0.04800152 -0.05117082 -0.04705262
452 | v -0.05179511 -0.05403563 -0.04597791
453 | v -0.05307032 -0.05323573 -0.04505288
454 | v -0.03721688 -0.03694672 -0.03217375
455 | v -0.03487701 -0.03675139 -0.03724176
456 | v -0.03537691 -0.03912591 -0.03748589
457 | v -0.03950933 -0.04922554 -0.03994365
458 | v -0.03613024 -0.03822168 -0.04207787
459 | v -0.04044056 -0.03857800 -0.04539041
460 | v -0.04305660 -0.04535749 -0.04640772
461 | v -0.04048305 -0.04756027 -0.04428244
462 | v -0.04588223 -0.05174376 -0.04744264
463 | v -0.04320811 -0.05168727 -0.04547176
464 | v -0.04220842 -0.05381293 -0.04127500
465 | v -0.04802797 -0.05573393 -0.04637747
466 | v -0.05035673 -0.05506290 -0.04636211
467 | v -0.04866461 -0.05735765 -0.04227208
468 | v -0.04529944 -0.05497922 -0.04525968
469 | v -0.04556386 -0.05637567 -0.04188267
470 | v -0.01702141 -0.02402138 -0.04269703
471 | v -0.01294058 -0.02530538 -0.04691151
472 | v -0.01168120 -0.02296567 -0.04649514
473 | v -0.01530544 -0.02156685 -0.04217563
474 | v -0.02638380 -0.01707041 -0.04805255
475 | v -0.02498656 -0.01993167 -0.04416004
476 | v -0.02329397 -0.01602333 -0.04292445
477 | v -0.02439972 -0.01184557 -0.04643920
478 | v -0.02567389 -0.01755910 -0.05378056
479 | v -0.02315573 -0.01038805 -0.05280775
480 | v -0.02108339 -0.00701626 -0.04498265
481 | v -0.02035284 -0.01223067 -0.04163733
482 | v -0.00759684 -0.00311573 -0.03817810
483 | v -0.00934544 0.00184140 -0.04222923
484 | v -0.02125978 -0.02209692 -0.04251608
485 | v -0.01950124 -0.01933075 -0.04155768
486 | v -0.00842429 -0.01923701 -0.05504203
487 | v -0.00806582 -0.02112386 -0.05095000
488 | v -0.01028094 -0.02305816 -0.05174502
489 | v -0.01083424 -0.02142242 -0.05548187
490 | v 0.00063960 -0.00988953 -0.03830206
491 | v 0.00390706 -0.01005512 -0.04348851
492 | v -0.00391721 -0.00774366 -0.03717111
493 | v -0.01167950 -0.01469448 -0.05822201
494 | v -0.01597844 -0.00849833 -0.05740478
495 | v -0.00324199 -0.00057261 -0.05482713
496 | v 0.00076543 -0.00523996 -0.05489206
497 | v -0.01715175 -0.01640593 -0.04051338
498 | v -0.00915352 -0.02131307 -0.04547222
499 | v -0.01284828 -0.01982889 -0.04131125
500 | v -0.01913088 -0.00574901 -0.05098040
501 | v -0.01923269 -0.01375585 -0.05829541
502 | v -0.02336018 -0.03495968 -0.06082060
503 | v -0.01612049 -0.02097478 -0.05955724
504 | v -0.01211095 -0.02379345 -0.05605472
505 | v -0.01974150 -0.03713638 -0.05927831
506 | v -0.01424084 -0.01823090 -0.05909455
507 | v -0.02129923 -0.01815278 -0.05879292
508 | v -0.02182904 -0.03788116 -0.04578561
509 | v -0.01827010 -0.03844403 -0.05003782
510 | v -0.02531288 -0.03506149 -0.04504953
511 | v -0.00773822 0.00350344 -0.04903048
512 | v 0.00433233 -0.00933029 -0.05115675
513 | v -0.03088825 -0.03196967 -0.04930431
514 | v -0.02828265 -0.03352311 -0.04606174
515 | v -0.01135682 -0.02502682 -0.05200127
516 | v -0.01750813 -0.03836855 -0.05556555
517 | v -0.02714557 -0.03210323 -0.05971893
518 | v -0.03056177 -0.03011097 -0.05495781
519 | v -0.02848454 -0.03760174 -0.05983566
520 | v -0.03192382 -0.03553394 -0.05514762
521 | v -0.03275908 -0.03668297 -0.04957813
522 | v -0.02991184 -0.03774225 -0.04626124
523 | v -0.02673754 -0.03899522 -0.04515382
524 | v -0.02448686 -0.04038693 -0.06043072
525 | v -0.02126160 -0.04195277 -0.05863706
526 | v -0.01873480 -0.04034644 -0.05473172
527 | v -0.02459579 -0.04505471 -0.05992746
528 | v -0.03006644 -0.04484599 -0.05933189
529 | v -0.03484394 -0.05583902 -0.04770136
530 | v -0.03401380 -0.05016492 -0.04816050
531 | v -0.03424881 -0.05029133 -0.04939283
532 | v -0.03551703 -0.05598670 -0.04927361
533 | v -0.03429167 -0.04389082 -0.05018004
534 | v -0.03443071 -0.04558714 -0.05115910
535 | v -0.03357767 -0.04508578 -0.05486646
536 | v -0.03414840 -0.05144013 -0.05449653
537 | v -0.02953302 -0.05175864 -0.05826433
538 | v -0.03081497 -0.05676898 -0.05720633
539 | v -0.03531330 -0.05682712 -0.05399223
540 | v -0.03085873 -0.04644574 -0.05818101
541 | v -0.03563948 -0.05914319 -0.04998943
542 | v -0.03612485 -0.05989204 -0.04816994
543 | v -0.03352414 -0.04268197 -0.05512915
544 | v -0.03193066 -0.04272288 -0.04609700
545 | v -0.03253495 -0.05017368 -0.04560177
546 | v -0.03322196 -0.05600512 -0.04481773
547 | v -0.03292352 -0.06017824 -0.04632204
548 | v -0.02891500 -0.05067840 -0.04451125
549 | v -0.03002651 -0.05599530 -0.04455760
550 | v -0.02989038 -0.05978222 -0.04592660
551 | v -0.02568306 -0.05127221 -0.04519268
552 | v -0.02693593 -0.05600937 -0.04561403
553 | v -0.02375734 -0.04187246 -0.04582137
554 | v -0.02797929 -0.04208613 -0.04487044
555 | v -0.02780115 -0.05895454 -0.04650674
556 | v -0.03470643 -0.06181690 -0.05234952
557 | v -0.03088657 -0.06288994 -0.05081363
558 | v -0.03476139 -0.06010363 -0.05280946
559 | v -0.02786083 -0.05142019 -0.05905887
560 | v -0.02893318 -0.05799245 -0.05803854
561 | v -0.02499902 -0.05163180 -0.05866171
562 | v -0.02619033 -0.05815430 -0.05755434
563 | v -0.03128615 -0.06119778 -0.05621764
564 | v -0.03205325 -0.06027273 -0.05594835
565 | v -0.02278277 -0.03963918 -0.04638904
566 | v -0.01941186 -0.04012598 -0.05009510
567 | v -0.02029694 -0.04224070 -0.04912924
568 | v -0.02234500 -0.05235178 -0.04828184
569 | v -0.01914461 -0.04274538 -0.05369071
570 | v -0.02169417 -0.04619599 -0.05777094
571 | v -0.02269365 -0.05211464 -0.05635773
572 | v -0.02151320 -0.05277179 -0.05280975
573 | v -0.02411883 -0.05789614 -0.05561243
574 | v -0.02331514 -0.05676245 -0.05307416
575 | v -0.02389958 -0.05687676 -0.04821858
576 | v -0.02645798 -0.06076223 -0.05356481
577 | v -0.02864028 -0.06165391 -0.05541530
578 | v -0.02816349 -0.06184630 -0.04948997
579 | v -0.02495181 -0.05959117 -0.05189328
580 | v -0.02624936 -0.06027399 -0.04877953
581 | v 0.00514470 -0.01022217 -0.04780182
582 | v 0.00090244 -0.02804905 -0.06322453
583 | v 0.00431517 -0.02912846 -0.06541327
584 | v 0.00514307 -0.02717229 -0.06601465
585 | v 0.00254766 -0.02553482 -0.06295215
586 | v -0.00591258 -0.02129029 -0.06945923
587 | v -0.00512428 -0.02385564 -0.06558283
588 | v -0.00383007 -0.01928932 -0.06456964
589 | v -0.00460754 -0.01664525 -0.06824826
590 | v -0.00392185 -0.02083708 -0.07413832
591 | v -0.00236825 -0.01666675 -0.07325613
592 | v -0.00202553 -0.01342113 -0.06715404
593 | v -0.00118194 -0.01607052 -0.06323234
594 | v 0.01075685 -0.00525354 -0.05798306
595 | v 0.01223209 -0.00149252 -0.06193370
596 | v 0.02707534 -0.01271538 -0.06786635
597 | v 0.02765649 -0.01510686 -0.06260063
598 | v 0.02005652 -0.01846358 -0.06575742
599 | v 0.02085147 -0.01583815 -0.07002448
600 | v -0.00247244 -0.02616442 -0.06357834
601 | v -0.00083905 -0.02260249 -0.06221624
602 | v 0.00950191 -0.02403631 -0.07455653
603 | v 0.00918629 -0.02591479 -0.06999396
604 | v 0.00759736 -0.02754132 -0.06939162
605 | v 0.00822819 -0.02683568 -0.07398033
606 | v 0.02413268 -0.01547013 -0.05816202
607 | v 0.02024562 -0.01361485 -0.05557833
608 | v 0.01658232 -0.01604434 -0.05681784
609 | v 0.01733220 -0.01880045 -0.06144710
610 | v 0.01219206 -0.01219789 -0.05563754
611 | v 0.00773310 -0.02085662 -0.07698948
612 | v 0.00380832 -0.01631212 -0.07698545
613 | v 0.01916786 -0.00642411 -0.07326982
614 | v 0.02110894 -0.01096694 -0.07240948
615 | v 0.00030042 -0.02023422 -0.06175949
616 | v 0.00716104 -0.02550485 -0.06575982
617 | v 0.00367159 -0.02421288 -0.06300379
618 | v -0.00010465 -0.01312702 -0.07271647
619 | v 0.00190217 -0.01982060 -0.07709001
620 | v 0.00244546 -0.03533642 -0.07757482
621 | v 0.00576969 -0.02666658 -0.07700258
622 | v 0.00794271 -0.02926297 -0.07326871
623 | v 0.00388567 -0.03662888 -0.07523812
624 | v 0.00599699 -0.02406361 -0.07698728
625 | v 0.00081984 -0.02376975 -0.07764217
626 | v -0.00219364 -0.03619047 -0.06466959
627 | v 0.00212410 -0.03684288 -0.06773266
628 | v -0.00508709 -0.03476500 -0.06493587
629 | v 0.01514234 -0.00130891 -0.06811129
630 | v -0.00848477 -0.03161607 -0.07080863
631 | v -0.00771552 -0.03361543 -0.06692155
632 | v 0.00673543 -0.02978927 -0.06882606
633 | v 0.00400968 -0.03704193 -0.07162199
634 | v -0.00120861 -0.03321334 -0.07847125
635 | v -0.00623549 -0.03070938 -0.07587218
636 | v -0.00282617 -0.03818616 -0.07856049
637 | v -0.00778642 -0.03526951 -0.07585029
638 | v -0.00989514 -0.03593643 -0.07041710
639 | v -0.00843486 -0.03637864 -0.06692425
640 | v -0.00610420 -0.03728541 -0.06493893
641 | v 0.00107273 -0.03994561 -0.07747339
642 | v 0.00267077 -0.04058383 -0.07489894
643 | v 0.00313448 -0.04001295 -0.07122466
644 | v -0.00050164 -0.04419944 -0.07693882
645 | v -0.00527101 -0.04390645 -0.07774317
646 | v -0.01187540 -0.05034290 -0.06773105
647 | v -0.01128876 -0.04624597 -0.06810907
648 | v -0.01139458 -0.04636259 -0.06936973
649 | v -0.01258069 -0.05035408 -0.06865046
650 | v -0.01087619 -0.04130058 -0.07028132
651 | v -0.01085211 -0.04260185 -0.07134287
652 | v -0.00922933 -0.04202295 -0.07450419
653 | v -0.01053011 -0.04716084 -0.07349511
654 | v -0.00651448 -0.04934910 -0.07631394
655 | v -0.00804110 -0.05316712 -0.07519923
656 | v -0.01165014 -0.05091128 -0.07308026
657 | v -0.00702654 -0.04475006 -0.07697801
658 | v -0.01319389 -0.05297990 -0.06933968
659 | v -0.01328905 -0.05395055 -0.06833789
660 | v -0.00912068 -0.04040906 -0.07500967
661 | v -0.00941832 -0.04007572 -0.06648580
662 | v -0.01062248 -0.04680241 -0.06576636
663 | v -0.01155384 -0.05182504 -0.06591501
664 | v -0.01164988 -0.05530283 -0.06745014
665 | v -0.00840506 -0.04640622 -0.06439348
666 | v -0.00965795 -0.05228997 -0.06456346
667 | v -0.00960917 -0.05559350 -0.06622498
668 | v -0.00508279 -0.04674036 -0.06394377
669 | v -0.00697824 -0.05217350 -0.06465639
670 | v -0.00291218 -0.04037115 -0.06442038
671 | v -0.00657005 -0.04010239 -0.06465267
672 | v -0.00795279 -0.05480494 -0.06613634
673 | v -0.01217975 -0.05574092 -0.07158782
674 | v -0.01014232 -0.05699803 -0.06985508
675 | v -0.01232259 -0.05451583 -0.07249244
676 | v -0.00510276 -0.04958706 -0.07655533
677 | v -0.00680338 -0.05422381 -0.07517540
678 | v -0.00261794 -0.05000107 -0.07575655
679 | v -0.00514032 -0.05490757 -0.07390419
680 | v -0.00923224 -0.05640193 -0.07365315
681 | v -0.01006388 -0.05476269 -0.07405435
682 | v -0.00241285 -0.03869380 -0.06468042
683 | v 0.00120649 -0.03920136 -0.06740755
684 | v 0.00061851 -0.04136775 -0.06709068
685 | v -0.00169493 -0.04766207 -0.06599101
686 | v 0.00242596 -0.04251100 -0.07089428
687 | v 0.00165573 -0.04363599 -0.07456102
688 | v -0.00094580 -0.04989171 -0.07343112
689 | v -0.00025137 -0.04928905 -0.07002171
690 | v -0.00407074 -0.05508924 -0.07192408
691 | v -0.00324542 -0.05409515 -0.06908905
692 | v -0.00421347 -0.05293291 -0.06617545
693 | v -0.00693231 -0.05688176 -0.07026649
694 | v -0.00797068 -0.05721888 -0.07184587
695 | v -0.00844354 -0.05678305 -0.06858826
696 | v -0.00591261 -0.05610076 -0.06872409
697 | v -0.00670528 -0.05560624 -0.06740730
698 | v 0.02484713 -0.01001658 -0.07023428
699 | v 0.02029588 -0.01989603 0.02753445
700 | v 0.01820248 -0.01996084 0.02241921
701 | v 0.01948993 -0.03593831 0.01847116
702 | v 0.01672782 -0.02945107 0.01650218
703 | v 0.02228392 -0.03923366 0.02392757
704 | v 0.02605978 -0.03455415 0.03314517
705 | v 0.02532784 -0.03800507 0.02884653
706 | v 0.01640096 -0.02382717 0.01831316
707 | v 0.02299653 -0.02200723 0.03208896
708 | v 0.02580886 -0.02806273 0.03507788
709 | v 0.01551623 -0.02569265 0.03530603
710 | v 0.01882841 -0.03209422 0.03733692
711 | v 0.01963429 -0.03890360 0.03498935
712 | v 0.01965357 -0.04168135 0.02941643
713 | v 0.01843141 -0.04228590 0.02477230
714 | v 0.01380177 -0.02282103 0.02911825
715 | v 0.01208168 -0.02296512 0.02376989
716 | v 0.01223966 -0.02676979 0.01871083
717 | v 0.00558874 -0.02604619 0.02995818
718 | v -0.00034931 -0.03353708 0.03436171
719 | v -0.00468972 -0.05215699 0.03240579
720 | v 0.00140462 -0.04987411 0.03167503
721 | v 0.00008299 -0.04801467 0.03366845
722 | v -0.00515244 -0.05054412 0.03467843
723 | v 0.00521308 -0.04664064 0.03344792
724 | v 0.00320056 -0.04539160 0.03418195
725 | v 0.00229481 -0.04003376 0.03536817
726 | v -0.00263247 -0.04258082 0.03610206
727 | v -0.00099700 -0.03616896 0.03449714
728 | v -0.00497587 -0.03706825 0.03394520
729 | v -0.00757549 -0.04485071 0.03666928
730 | v -0.00878668 -0.05048388 0.03608937
731 | v -0.01166830 -0.04695383 0.03666107
732 | v -0.00976030 -0.05194191 0.03341063
733 | v 0.00294663 -0.03966042 0.03752223
734 | v 0.01117194 -0.04686830 0.02957615
735 | v 0.00334181 -0.05038883 0.02827436
736 | v -0.00422426 -0.05220084 0.02860827
737 | v -0.01016302 -0.05168413 0.03018579
738 | v 0.00352462 -0.04918782 0.02384272
739 | v -0.00374956 -0.04996579 0.02462225
740 | v -0.00928005 -0.04870558 0.02611125
741 | v 0.00235394 -0.04560581 0.02052148
742 | v -0.00424187 -0.04656751 0.02244901
743 | v 0.01369050 -0.04018529 0.01968350
744 | v 0.01405811 -0.04505594 0.02415254
745 | v -0.00811401 -0.04659672 0.02451107
746 | v -0.01392138 -0.04774692 0.03524218
747 | v -0.01432784 -0.04729886 0.03156267
748 | v -0.00535799 -0.03404571 0.03228586
749 | v -0.00980321 -0.03905300 0.03514803
750 | v -0.01062427 -0.03727277 0.03272273
751 | v -0.00453536 -0.03158674 0.02905444
752 | v -0.01090598 -0.03618855 0.02988381
753 | v -0.01401793 -0.04164622 0.03411813
754 | v -0.01208144 -0.04239557 0.03605170
755 | v 0.01684727 -0.03884074 0.01948030
756 | v 0.01472150 -0.03250360 0.01727677
757 | v 0.01148370 -0.03372109 0.01794593
758 | v 0.00075530 -0.03918055 0.01920031
759 | v 0.00791987 -0.02843305 0.01953839
760 | v 0.00588327 -0.02565650 0.02491301
761 | v -0.00386067 -0.03141003 0.02511944
762 | v -0.00175771 -0.03384848 0.02120220
763 | v -0.00991795 -0.03678711 0.02624057
764 | v -0.00831408 -0.03832533 0.02400942
765 | v -0.00626257 -0.04253293 0.02204472
766 | v -0.01270689 -0.04115849 0.02754242
767 | v -0.01418196 -0.04089284 0.03086344
768 | v -0.01250779 -0.04557296 0.02747437
769 | v -0.01059594 -0.04109908 0.02551351
770 | v -0.00999090 -0.04412035 0.02506732
771 | v 0.03235395 -0.01401884 -0.06167506
772 | v 0.02915814 -0.01476650 -0.05586194
773 | v 0.03527755 -0.01482797 -0.05606727
774 | v 0.04400471 -0.01612922 -0.05437626
775 | v 0.05392948 -0.01446863 -0.05765468
776 | v 0.04195918 -0.01526954 -0.04780618
777 | v 0.03285040 -0.01401798 -0.05008093
778 | v 0.02376820 -0.01311634 -0.05032041
779 | v 0.04803233 -0.01384978 -0.03972691
780 | f 2 3 1
781 | f 1 3 4
782 | f 5 6 8
783 | f 8 6 7
784 | f 9 4 10
785 | f 10 4 3
786 | f 12 13 11
787 | f 11 13 9
788 | f 14 12 16
789 | f 16 12 15
790 | f 18 19 17
791 | f 17 19 20
792 | f 22 23 21
793 | f 21 23 24
794 | f 26 27 25
795 | f 25 27 28
796 | f 30 31 29
797 | f 29 31 32
798 | f 33 34 36
799 | f 36 34 35
800 | f 37 38 40
801 | f 40 38 39
802 | f 41 42 6
803 | f 6 42 43
804 | f 6 43 7
805 | f 7 43 44
806 | f 45 34 46
807 | f 46 34 33
808 | f 48 49 47
809 | f 47 49 50
810 | f 52 53 51
811 | f 51 53 54
812 | f 56 52 55
813 | f 55 52 51
814 | f 58 59 57
815 | f 57 59 60
816 | f 61 62 4
817 | f 4 62 1
818 | f 64 65 63
819 | f 63 65 66
820 | f 68 69 67
821 | f 67 69 70
822 | f 72 73 71
823 | f 71 73 74
824 | f 76 77 75
825 | f 75 77 78
826 | f 53 79 54
827 | f 54 79 80
828 | f 82 83 81
829 | f 81 83 84
830 | f 85 86 52
831 | f 52 86 53
832 | f 57 60 87
833 | f 87 60 88
834 | f 89 90 43
835 | f 43 90 44
836 | f 43 42 89
837 | f 89 42 91
838 | f 40 39 92
839 | f 92 39 93
840 | f 94 70 64
841 | f 64 70 69
842 | f 13 61 9
843 | f 9 61 4
844 | f 25 28 69
845 | f 69 28 95
846 | f 96 25 68
847 | f 68 25 69
848 | f 98 99 97
849 | f 97 99 100
850 | f 101 102 104
851 | f 104 102 103
852 | f 106 30 105
853 | f 105 30 29
854 | f 51 54 107
855 | f 107 54 108
856 | f 54 80 108
857 | f 108 80 109
858 | f 111 112 110
859 | f 110 112 113
860 | f 115 5 114
861 | f 114 5 116
862 | f 115 41 5
863 | f 5 41 6
864 | f 36 35 111
865 | f 111 35 117
866 | f 35 118 117
867 | f 117 118 119
868 | f 34 120 35
869 | f 35 120 118
870 | f 34 45 120
871 | f 120 45 121
872 | f 125 126 124
873 | f 124 126 127
874 | f 128 129 14
875 | f 14 129 130
876 | f 33 131 46
877 | f 46 131 132
878 | f 133 94 63
879 | f 63 94 64
880 | f 134 135 137
881 | f 137 135 136
882 | f 139 140 138
883 | f 138 140 141
884 | f 77 142 78
885 | f 78 142 143
886 | f 144 145 20
887 | f 20 145 146
888 | f 148 149 147
889 | f 147 149 72
890 | f 151 152 150
891 | f 150 152 153
892 | f 82 101 83
893 | f 83 101 104
894 | f 101 82 155
895 | f 155 82 154
896 | f 156 157 58
897 | f 58 157 59
898 | f 158 68 147
899 | f 147 68 67
900 | f 159 146 15
901 | f 15 146 16
902 | f 68 158 96
903 | f 96 158 160
904 | f 129 66 130
905 | f 130 66 65
906 | f 162 163 161
907 | f 161 163 164
908 | f 166 167 165
909 | f 165 167 168
910 | f 170 171 169
911 | f 169 171 172
912 | f 174 171 173
913 | f 173 171 170
914 | f 176 177 175
915 | f 175 177 178
916 | f 180 181 179
917 | f 179 181 24
918 | f 182 183 21
919 | f 21 183 184
920 | f 81 185 82
921 | f 82 185 154
922 | f 186 187 188
923 | f 188 187 144
924 | f 158 71 160
925 | f 160 71 189
926 | f 190 135 157
927 | f 157 135 59
928 | f 169 172 139
929 | f 139 172 140
930 | f 147 72 158
931 | f 158 72 71
932 | f 192 193 191
933 | f 191 193 194
934 | f 167 166 49
935 | f 49 166 195
936 | f 196 190 50
937 | f 50 190 157
938 | f 197 198 200
939 | f 200 198 199
940 | f 21 24 182
941 | f 182 24 181
942 | f 146 159 20
943 | f 20 159 17
944 | f 182 181 81
945 | f 81 181 185
946 | f 201 185 180
947 | f 180 185 181
948 | f 202 203 83
949 | f 83 203 84
950 | f 205 206 204
951 | f 204 206 85
952 | f 207 22 184
953 | f 184 22 21
954 | f 179 24 208
955 | f 208 24 23
956 | f 208 209 179
957 | f 179 209 205
958 | f 209 210 205
959 | f 205 210 206
960 | f 212 206 211
961 | f 211 206 210
962 | f 179 205 180
963 | f 180 205 204
964 | f 213 176 88
965 | f 88 176 214
966 | f 86 85 212
967 | f 212 85 206
968 | f 193 217 218
969 | f 217 219 218
970 | f 218 219 220
971 | f 219 18 220
972 | f 220 18 17
973 | f 19 221 20
974 | f 20 221 144
975 | f 222 223 225
976 | f 225 223 224
977 | f 175 226 176
978 | f 176 226 214
979 | f 227 87 214
980 | f 214 87 88
981 | f 220 17 228
982 | f 228 17 159
983 | f 134 213 60
984 | f 60 213 88
985 | f 138 229 151
986 | f 151 229 152
987 | f 193 192 210
988 | f 210 192 211
989 | f 230 194 218
990 | f 218 194 193
991 | f 231 232 228
992 | f 228 232 230
993 | f 233 234 91
994 | f 91 234 89
995 | f 235 236 93
996 | f 93 236 92
997 | f 230 232 234
998 | f 234 232 237
999 | f 47 238 48
1000 | f 48 238 239
1001 | f 240 191 235
1002 | f 235 191 236
1003 | f 5 8 116
1004 | f 116 8 241
1005 | f 242 243 189
1006 | f 189 243 160
1007 | f 244 245 98
1008 | f 98 245 99
1009 | f 156 246 47
1010 | f 47 246 238
1011 | f 76 247 77
1012 | f 77 247 248
1013 | f 250 125 249
1014 | f 249 125 124
1015 | f 252 253 251
1016 | f 251 253 254
1017 | f 111 110 36
1018 | f 36 110 255
1019 | f 256 33 255
1020 | f 255 33 36
1021 | f 257 258 154
1022 | f 154 258 155
1023 | f 63 169 133
1024 | f 133 169 139
1025 | f 66 170 63
1026 | f 63 170 169
1027 | f 173 170 129
1028 | f 129 170 66
1029 | f 129 128 173
1030 | f 173 128 259
1031 | f 128 260 259
1032 | f 259 260 261
1033 | f 213 134 262
1034 | f 262 134 137
1035 | f 263 264 186
1036 | f 186 264 187
1037 | f 112 265 113
1038 | f 113 265 266
1039 | f 11 267 268
1040 | f 234 233 230
1041 | f 230 233 194
1042 | f 153 269 70
1043 | f 70 269 67
1044 | f 175 178 165
1045 | f 165 178 141
1046 | f 150 153 94
1047 | f 94 153 70
1048 | f 271 221 270
1049 | f 270 221 207
1050 | f 269 153 272
1051 | f 272 153 152
1052 | f 121 45 109
1053 | f 109 45 108
1054 | f 47 50 156
1055 | f 156 50 157
1056 | f 57 273 58
1057 | f 58 273 274
1058 | f 145 275 260
1059 | f 260 275 261
1060 | f 276 149 143
1061 | f 143 149 78
1062 | f 148 269 75
1063 | f 75 269 272
1064 | f 207 277 270
1065 | f 270 277 278
1066 | f 195 196 49
1067 | f 49 196 50
1068 | f 59 135 60
1069 | f 60 135 134
1070 | f 83 104 202
1071 | f 202 104 279
1072 | f 149 148 78
1073 | f 78 148 75
1074 | f 247 278 248
1075 | f 248 278 277
1076 | f 183 182 84
1077 | f 84 182 81
1078 | f 48 239 281
1079 | f 281 239 282
1080 | f 227 223 283
1081 | f 283 223 222
1082 | f 223 226 224
1083 | f 224 226 168
1084 | f 281 282 224
1085 | f 224 282 225
1086 | f 165 168 175
1087 | f 175 168 226
1088 | f 264 263 138
1089 | f 138 263 229
1090 | f 14 16 128
1091 | f 128 16 260
1092 | f 220 228 218
1093 | f 218 228 230
1094 | f 87 227 284
1095 | f 284 227 283
1096 | f 260 16 145
1097 | f 145 16 146
1098 | f 244 98 258
1099 | f 258 98 155
1100 | f 58 274 156
1101 | f 156 274 246
1102 | f 107 285 51
1103 | f 51 285 55
1104 | f 243 286 160
1105 | f 160 286 96
1106 | f 234 237 89
1107 | f 89 237 90
1108 | f 46 132 107
1109 | f 107 132 285
1110 | f 253 252 249
1111 | f 249 252 250
1112 | f 286 26 96
1113 | f 96 26 25
1114 | f 106 105 287
1115 | f 287 105 288
1116 | f 272 152 289
1117 | f 289 152 229
1118 | f 138 151 139
1119 | f 139 151 133
1120 | f 183 290 161
1121 | f 161 290 162
1122 | f 291 292 277
1123 | f 277 292 293
1124 | f 151 150 133
1125 | f 133 150 94
1126 | f 262 275 177
1127 | f 177 275 187
1128 | f 144 187 145
1129 | f 145 187 275
1130 | f 273 57 284
1131 | f 284 57 87
1132 | f 174 136 190
1133 | f 190 136 135
1134 | f 111 117 112
1135 | f 112 117 38
1136 | f 117 119 38
1137 | f 38 119 123
1138 | f 112 38 265
1139 | f 265 38 37
1140 | f 287 288 254
1141 | f 254 288 251
1142 | f 228 159 231
1143 | f 231 159 15
1144 | f 223 227 226
1145 | f 226 227 214
1146 | f 147 67 148
1147 | f 148 67 269
1148 | f 224 168 281
1149 | f 281 168 167
1150 | f 199 163 200
1151 | f 200 163 162
1152 | f 45 46 108
1153 | f 108 46 107
1154 | f 281 167 48
1155 | f 48 167 49
1156 | f 161 164 291
1157 | f 291 164 292
1158 | f 38 123 39
1159 | f 293 294 277
1160 | f 277 294 248
1161 | f 284 283 296
1162 | f 296 283 295
1163 | f 284 296 273
1164 | f 273 296 297
1165 | f 274 273 298
1166 | f 298 273 297
1167 | f 246 274 299
1168 | f 299 274 298
1169 | f 283 222 295
1170 | f 295 222 300
1171 | f 222 225 300
1172 | f 300 225 301
1173 | f 301 225 302
1174 | f 302 225 282
1175 | f 303 304 300
1176 | f 300 304 295
1177 | f 306 307 305
1178 | f 305 307 308
1179 | f 309 310 308
1180 | f 308 310 305
1181 | f 308 307 312
1182 | f 312 307 311
1183 | f 313 314 312
1184 | f 312 314 315
1185 | f 311 316 312
1186 | f 312 316 313
1187 | f 317 304 311
1188 | f 311 304 316
1189 | f 315 309 312
1190 | f 312 309 308
1191 | f 318 319 321
1192 | f 321 319 320
1193 | f 319 318 309
1194 | f 309 318 310
1195 | f 311 307 317
1196 | f 317 307 306
1197 | f 306 297 317
1198 | f 317 297 296
1199 | f 306 322 297
1200 | f 297 322 298
1201 | f 305 323 306
1202 | f 306 323 322
1203 | f 323 305 324
1204 | f 324 305 310
1205 | f 304 317 295
1206 | f 295 317 296
1207 | f 318 325 310
1208 | f 310 325 324
1209 | f 323 324 326
1210 | f 326 324 327
1211 | f 324 325 327
1212 | f 327 325 328
1213 | f 326 327 329
1214 | f 329 327 330
1215 | f 329 331 326
1216 | f 326 331 332
1217 | f 333 330 328
1218 | f 328 330 327
1219 | f 323 326 322
1220 | f 322 326 332
1221 | f 322 332 298
1222 | f 298 332 299
1223 | f 325 318 334
1224 | f 334 318 321
1225 | f 313 316 335
1226 | f 335 316 304
1227 | f 335 336 313
1228 | f 313 336 314
1229 | f 304 303 335
1230 | f 335 303 337
1231 | f 336 335 338
1232 | f 338 335 337
1233 | f 336 339 314
1234 | f 314 339 340
1235 | f 246 299 238
1236 | f 238 299 341
1237 | f 239 342 282
1238 | f 282 342 302
1239 | f 342 239 341
1240 | f 341 239 238
1241 | f 341 331 342
1242 | f 342 331 343
1243 | f 331 329 343
1244 | f 343 329 344
1245 | f 342 343 302
1246 | f 302 343 345
1247 | f 341 299 331
1248 | f 331 299 332
1249 | f 346 301 345
1250 | f 345 301 302
1251 | f 346 303 301
1252 | f 301 303 300
1253 | f 347 337 346
1254 | f 346 337 303
1255 | f 347 346 348
1256 | f 348 346 345
1257 | f 349 347 350
1258 | f 350 347 348
1259 | f 343 344 345
1260 | f 345 344 348
1261 | f 344 351 348
1262 | f 348 351 350
1263 | f 337 347 338
1264 | f 338 347 349
1265 | f 353 338 352
1266 | f 352 338 349
1267 | f 352 355 354
1268 | f 354 355 356
1269 | f 352 349 355
1270 | f 355 349 350
1271 | f 352 354 353
1272 | f 353 354 334
1273 | f 353 334 339
1274 | f 339 334 321
1275 | f 339 336 353
1276 | f 353 336 338
1277 | f 329 330 344
1278 | f 344 330 351
1279 | f 356 351 333
1280 | f 333 351 330
1281 | f 356 355 351
1282 | f 351 355 350
1283 | f 354 356 328
1284 | f 328 356 333
1285 | f 328 325 354
1286 | f 354 325 334
1287 | f 309 315 319
1288 | f 319 315 320
1289 | f 320 340 321
1290 | f 321 340 339
1291 | f 320 315 340
1292 | f 340 315 314
1293 | f 137 136 261
1294 | f 261 136 259
1295 | f 140 166 141
1296 | f 141 166 165
1297 | f 172 171 195
1298 | f 195 171 196
1299 | f 171 174 196
1300 | f 196 174 190
1301 | f 178 177 264
1302 | f 264 177 187
1303 | f 172 195 140
1304 | f 140 195 166
1305 | f 137 261 262
1306 | f 262 261 275
1307 | f 178 264 141
1308 | f 141 264 138
1309 | f 262 177 213
1310 | f 213 177 176
1311 | f 136 174 259
1312 | f 259 174 173
1313 | f 358 359 357
1314 | f 357 359 360
1315 | f 362 363 361
1316 | f 361 363 364
1317 | f 361 364 365
1318 | f 365 364 366
1319 | f 367 368 370
1320 | f 370 368 369
1321 | f 76 371 247
1322 | f 247 371 372
1323 | f 373 374 362
1324 | f 362 374 363
1325 | f 376 377 375
1326 | f 375 377 378
1327 | f 289 229 380
1328 | f 380 229 379
1329 | f 381 379 263
1330 | f 263 379 229
1331 | f 383 384 382
1332 | f 382 384 385
1333 | f 386 368 374
1334 | f 374 368 363
1335 | f 76 289 371
1336 | f 371 289 380
1337 | f 377 376 359
1338 | f 359 376 387
1339 | f 388 386 360
1340 | f 360 386 374
1341 | f 389 383 366
1342 | f 366 383 390
1343 | f 392 393 391
1344 | f 391 393 394
1345 | f 383 382 390
1346 | f 390 382 395
1347 | f 396 365 390
1348 | f 390 365 366
1349 | f 389 366 367
1350 | f 367 366 364
1351 | f 357 397 358
1352 | f 358 397 398
1353 | f 373 399 357
1354 | f 357 399 397
1355 | f 389 367 400
1356 | f 400 367 370
1357 | f 382 385 375
1358 | f 375 385 372
1359 | f 357 360 373
1360 | f 373 360 374
1361 | f 361 401 362
1362 | f 362 401 402
1363 | f 387 388 359
1364 | f 359 388 360
1365 | f 363 368 364
1366 | f 364 368 367
1367 | f 358 398 403
1368 | f 403 398 404
1369 | f 396 392 405
1370 | f 405 392 391
1371 | f 392 395 393
1372 | f 393 395 378
1373 | f 393 403 394
1374 | f 394 403 404
1375 | f 375 378 382
1376 | f 382 378 395
1377 | f 365 396 406
1378 | f 406 396 405
1379 | f 362 402 373
1380 | f 373 402 399
1381 | f 271 270 400
1382 | f 400 270 384
1383 | f 401 361 406
1384 | f 406 361 365
1385 | f 381 369 386
1386 | f 386 369 368
1387 | f 392 396 395
1388 | f 395 396 390
1389 | f 393 378 403
1390 | f 403 378 377
1391 | f 403 377 358
1392 | f 358 377 359
1393 | f 406 405 408
1394 | f 408 405 407
1395 | f 406 408 401
1396 | f 401 408 409
1397 | f 401 409 402
1398 | f 402 409 410
1399 | f 402 410 399
1400 | f 399 410 411
1401 | f 405 391 407
1402 | f 407 391 412
1403 | f 391 394 412
1404 | f 412 394 413
1405 | f 394 404 413
1406 | f 413 404 414
1407 | f 415 416 412
1408 | f 412 416 407
1409 | f 418 419 417
1410 | f 417 419 420
1411 | f 421 422 418
1412 | f 418 422 419
1413 | f 419 422 424
1414 | f 424 422 423
1415 | f 425 426 424
1416 | f 424 426 427
1417 | f 423 428 424
1418 | f 424 428 425
1419 | f 427 420 424
1420 | f 424 420 419
1421 | f 420 429 417
1422 | f 417 429 430
1423 | f 423 422 431
1424 | f 431 422 421
1425 | f 421 409 431
1426 | f 431 409 408
1427 | f 421 432 409
1428 | f 409 432 410
1429 | f 418 433 421
1430 | f 421 433 432
1431 | f 434 433 417
1432 | f 417 433 418
1433 | f 431 416 423
1434 | f 423 416 428
1435 | f 416 431 407
1436 | f 407 431 408
1437 | f 435 434 430
1438 | f 430 434 417
1439 | f 433 434 436
1440 | f 436 434 437
1441 | f 434 435 437
1442 | f 437 435 438
1443 | f 436 437 439
1444 | f 439 437 440
1445 | f 439 441 436
1446 | f 436 441 442
1447 | f 438 443 437
1448 | f 437 443 440
1449 | f 436 442 433
1450 | f 433 442 432
1451 | f 432 442 410
1452 | f 410 442 411
1453 | f 435 430 445
1454 | f 445 430 444
1455 | f 430 429 444
1456 | f 444 429 446
1457 | f 425 428 447
1458 | f 447 428 416
1459 | f 426 425 448
1460 | f 448 425 447
1461 | f 416 415 447
1462 | f 447 415 449
1463 | f 447 449 448
1464 | f 448 449 450
1465 | f 451 452 448
1466 | f 448 452 426
1467 | f 446 452 444
1468 | f 444 452 451
1469 | f 399 411 397
1470 | f 397 411 453
1471 | f 398 454 404
1472 | f 404 454 414
1473 | f 454 398 453
1474 | f 453 398 397
1475 | f 453 441 454
1476 | f 454 441 455
1477 | f 441 439 455
1478 | f 455 439 456
1479 | f 454 455 414
1480 | f 414 455 457
1481 | f 441 453 442
1482 | f 442 453 411
1483 | f 458 413 457
1484 | f 457 413 414
1485 | f 458 415 413
1486 | f 413 415 412
1487 | f 458 459 415
1488 | f 415 459 449
1489 | f 459 458 460
1490 | f 460 458 457
1491 | f 461 459 462
1492 | f 462 459 460
1493 | f 455 456 457
1494 | f 457 456 460
1495 | f 456 463 460
1496 | f 460 463 462
1497 | f 459 461 449
1498 | f 449 461 450
1499 | f 465 450 464
1500 | f 464 450 461
1501 | f 464 467 466
1502 | f 466 467 468
1503 | f 464 461 467
1504 | f 467 461 462
1505 | f 464 466 465
1506 | f 465 466 445
1507 | f 465 445 451
1508 | f 451 445 444
1509 | f 451 448 465
1510 | f 465 448 450
1511 | f 439 440 456
1512 | f 456 440 463
1513 | f 468 463 443
1514 | f 443 463 440
1515 | f 468 467 463
1516 | f 463 467 462
1517 | f 466 468 438
1518 | f 438 468 443
1519 | f 438 435 466
1520 | f 466 435 445
1521 | f 420 427 429
1522 | f 429 427 446
1523 | f 446 427 452
1524 | f 452 427 426
1525 | f 370 369 188
1526 | f 188 369 186
1527 | f 371 376 372
1528 | f 372 376 375
1529 | f 380 379 387
1530 | f 387 379 388
1531 | f 379 381 388
1532 | f 388 381 386
1533 | f 385 384 278
1534 | f 278 384 270
1535 | f 380 387 371
1536 | f 371 387 376
1537 | f 400 370 271
1538 | f 271 370 188
1539 | f 385 278 372
1540 | f 372 278 247
1541 | f 400 384 389
1542 | f 389 384 383
1543 | f 369 381 186
1544 | f 186 381 263
1545 | f 221 271 144
1546 | f 144 271 188
1547 | f 221 19 207
1548 | f 207 19 22
1549 | f 19 18 22
1550 | f 22 18 23
1551 | f 18 219 23
1552 | f 23 219 208
1553 | f 469 470 472
1554 | f 472 470 471
1555 | f 474 475 473
1556 | f 473 475 476
1557 | f 473 476 477
1558 | f 477 476 478
1559 | f 480 481 479
1560 | f 479 481 482
1561 | f 483 484 474
1562 | f 474 484 475
1563 | f 486 487 485
1564 | f 485 487 488
1565 | f 489 490 77
1566 | f 77 490 142
1567 | f 491 489 248
1568 | f 248 489 77
1569 | f 493 494 492
1570 | f 492 494 495
1571 | f 496 480 484
1572 | f 484 480 475
1573 | f 486 497 487
1574 | f 487 497 471
1575 | f 498 496 472
1576 | f 472 496 484
1577 | f 499 493 478
1578 | f 478 493 500
1579 | f 501 502 504
1580 | f 504 502 503
1581 | f 493 492 500
1582 | f 500 492 505
1583 | f 506 477 500
1584 | f 500 477 478
1585 | f 499 478 479
1586 | f 479 478 476
1587 | f 469 507 470
1588 | f 470 507 508
1589 | f 483 509 469
1590 | f 469 509 507
1591 | f 479 482 499
1592 | f 499 482 510
1593 | f 492 495 485
1594 | f 485 495 511
1595 | f 483 469 484
1596 | f 484 469 472
1597 | f 473 512 474
1598 | f 474 512 513
1599 | f 497 498 471
1600 | f 471 498 472
1601 | f 475 480 476
1602 | f 476 480 479
1603 | f 470 508 514
1604 | f 514 508 515
1605 | f 506 502 516
1606 | f 516 502 501
1607 | f 503 502 488
1608 | f 488 502 505
1609 | f 514 515 503
1610 | f 503 515 504
1611 | f 485 488 492
1612 | f 492 488 505
1613 | f 477 506 517
1614 | f 517 506 516
1615 | f 474 513 483
1616 | f 483 513 509
1617 | f 510 292 494
1618 | f 494 292 164
1619 | f 512 473 517
1620 | f 517 473 477
1621 | f 491 481 496
1622 | f 496 481 480
1623 | f 502 506 505
1624 | f 505 506 500
1625 | f 503 488 514
1626 | f 514 488 487
1627 | f 514 487 470
1628 | f 470 487 471
1629 | f 517 516 519
1630 | f 519 516 518
1631 | f 517 519 512
1632 | f 512 519 520
1633 | f 512 520 513
1634 | f 513 520 521
1635 | f 513 521 509
1636 | f 509 521 522
1637 | f 516 501 518
1638 | f 518 501 523
1639 | f 501 504 523
1640 | f 523 504 524
1641 | f 524 504 525
1642 | f 525 504 515
1643 | f 526 527 523
1644 | f 523 527 518
1645 | f 529 530 528
1646 | f 528 530 531
1647 | f 532 533 529
1648 | f 529 533 530
1649 | f 530 533 535
1650 | f 535 533 534
1651 | f 536 537 535
1652 | f 535 537 538
1653 | f 534 539 535
1654 | f 535 539 536
1655 | f 538 531 535
1656 | f 535 531 530
1657 | f 540 541 531
1658 | f 531 541 528
1659 | f 533 532 534
1660 | f 534 532 542
1661 | f 532 520 542
1662 | f 542 520 519
1663 | f 532 543 520
1664 | f 520 543 521
1665 | f 529 544 532
1666 | f 532 544 543
1667 | f 544 529 545
1668 | f 545 529 528
1669 | f 534 542 539
1670 | f 539 542 527
1671 | f 527 542 518
1672 | f 518 542 519
1673 | f 541 546 528
1674 | f 528 546 545
1675 | f 547 544 548
1676 | f 548 544 545
1677 | f 546 549 545
1678 | f 545 549 548
1679 | f 550 547 551
1680 | f 551 547 548
1681 | f 547 550 553
1682 | f 553 550 552
1683 | f 549 554 548
1684 | f 548 554 551
1685 | f 544 547 543
1686 | f 543 547 553
1687 | f 543 553 521
1688 | f 521 553 522
1689 | f 546 541 556
1690 | f 556 541 555
1691 | f 541 540 555
1692 | f 555 540 557
1693 | f 536 539 558
1694 | f 558 539 527
1695 | f 537 536 559
1696 | f 559 536 558
1697 | f 527 526 558
1698 | f 558 526 560
1699 | f 559 558 561
1700 | f 561 558 560
1701 | f 559 562 537
1702 | f 537 562 563
1703 | f 557 563 555
1704 | f 555 563 562
1705 | f 509 522 507
1706 | f 507 522 564
1707 | f 508 565 515
1708 | f 515 565 525
1709 | f 565 508 564
1710 | f 564 508 507
1711 | f 564 552 565
1712 | f 565 552 566
1713 | f 552 550 566
1714 | f 566 550 567
1715 | f 565 566 525
1716 | f 525 566 568
1717 | f 564 522 552
1718 | f 552 522 553
1719 | f 569 524 568
1720 | f 568 524 525
1721 | f 569 526 524
1722 | f 524 526 523
1723 | f 570 560 569
1724 | f 569 560 526
1725 | f 570 569 571
1726 | f 571 569 568
1727 | f 572 570 573
1728 | f 573 570 571
1729 | f 566 567 568
1730 | f 568 567 571
1731 | f 567 574 571
1732 | f 571 574 573
1733 | f 570 572 560
1734 | f 560 572 561
1735 | f 576 561 575
1736 | f 575 561 572
1737 | f 575 578 577
1738 | f 577 578 579
1739 | f 575 572 578
1740 | f 578 572 573
1741 | f 575 577 576
1742 | f 576 577 556
1743 | f 576 556 562
1744 | f 562 556 555
1745 | f 562 559 576
1746 | f 576 559 561
1747 | f 550 551 567
1748 | f 567 551 574
1749 | f 579 574 554
1750 | f 554 574 551
1751 | f 579 578 574
1752 | f 574 578 573
1753 | f 579 554 577
1754 | f 577 554 549
1755 | f 549 546 577
1756 | f 577 546 556
1757 | f 531 538 540
1758 | f 540 538 557
1759 | f 557 538 563
1760 | f 563 538 537
1761 | f 481 294 482
1762 | f 482 294 293
1763 | f 580 486 511
1764 | f 511 486 485
1765 | f 490 489 497
1766 | f 497 489 498
1767 | f 489 491 498
1768 | f 498 491 496
1769 | f 163 495 164
1770 | f 164 495 494
1771 | f 580 490 486
1772 | f 486 490 497
1773 | f 482 293 510
1774 | f 510 293 292
1775 | f 495 163 511
1776 | f 511 163 199
1777 | f 494 493 510
1778 | f 510 493 499
1779 | f 481 491 294
1780 | f 294 491 248
1781 | f 582 583 581
1782 | f 581 583 584
1783 | f 586 587 585
1784 | f 585 587 588
1785 | f 585 588 589
1786 | f 589 588 590
1787 | f 592 593 591
1788 | f 591 593 594
1789 | f 595 596 598
1790 | f 598 596 597
1791 | f 599 600 586
1792 | f 586 600 587
1793 | f 601 602 604
1794 | f 604 602 603
1795 | f 605 606 608
1796 | f 608 606 607
1797 | f 609 607 200
1798 | f 200 607 606
1799 | f 610 611 613
1800 | f 613 611 612
1801 | f 600 614 587
1802 | f 587 614 592
1803 | f 596 605 597
1804 | f 597 605 608
1805 | f 602 615 603
1806 | f 603 615 583
1807 | f 616 614 584
1808 | f 584 614 600
1809 | f 611 618 617
1810 | f 617 618 590
1811 | f 620 621 619
1812 | f 619 621 622
1813 | f 610 623 611
1814 | f 611 623 618
1815 | f 624 589 618
1816 | f 618 589 590
1817 | f 617 590 591
1818 | f 591 590 588
1819 | f 581 625 582
1820 | f 582 625 626
1821 | f 599 627 581
1822 | f 581 627 625
1823 | f 591 594 617
1824 | f 617 594 628
1825 | f 601 610 598
1826 | f 598 610 613
1827 | f 581 584 599
1828 | f 599 584 600
1829 | f 585 629 586
1830 | f 586 629 630
1831 | f 615 616 583
1832 | f 583 616 584
1833 | f 587 592 588
1834 | f 588 592 591
1835 | f 582 626 631
1836 | f 631 626 632
1837 | f 624 620 633
1838 | f 633 620 619
1839 | f 621 620 604
1840 | f 604 620 623
1841 | f 631 632 621
1842 | f 621 632 622
1843 | f 610 601 623
1844 | f 623 601 604
1845 | f 589 624 634
1846 | f 634 624 633
1847 | f 586 630 599
1848 | f 599 630 627
1849 | f 203 202 628
1850 | f 628 202 612
1851 | f 629 585 634
1852 | f 634 585 589
1853 | f 609 593 614
1854 | f 614 593 592
1855 | f 620 624 623
1856 | f 623 624 618
1857 | f 631 621 603
1858 | f 603 621 604
1859 | f 582 631 583
1860 | f 583 631 603
1861 | f 634 633 636
1862 | f 636 633 635
1863 | f 634 636 629
1864 | f 629 636 637
1865 | f 629 637 630
1866 | f 630 637 638
1867 | f 630 638 627
1868 | f 627 638 639
1869 | f 633 619 635
1870 | f 635 619 640
1871 | f 619 622 640
1872 | f 640 622 641
1873 | f 641 622 642
1874 | f 642 622 632
1875 | f 644 635 643
1876 | f 643 635 640
1877 | f 645 646 648
1878 | f 648 646 647
1879 | f 649 650 646
1880 | f 646 650 647
1881 | f 647 650 652
1882 | f 652 650 651
1883 | f 652 653 655
1884 | f 655 653 654
1885 | f 651 656 652
1886 | f 652 656 653
1887 | f 655 648 652
1888 | f 652 648 647
1889 | f 648 657 645
1890 | f 645 657 658
1891 | f 650 649 651
1892 | f 651 649 659
1893 | f 649 637 659
1894 | f 659 637 636
1895 | f 649 660 637
1896 | f 637 660 638
1897 | f 661 660 646
1898 | f 646 660 649
1899 | f 662 661 645
1900 | f 645 661 646
1901 | f 651 659 656
1902 | f 656 659 644
1903 | f 644 659 635
1904 | f 635 659 636
1905 | f 663 662 658
1906 | f 658 662 645
1907 | f 664 661 665
1908 | f 665 661 662
1909 | f 663 666 662
1910 | f 662 666 665
1911 | f 667 664 668
1912 | f 668 664 665
1913 | f 664 667 670
1914 | f 670 667 669
1915 | f 666 671 665
1916 | f 665 671 668
1917 | f 661 664 660
1918 | f 660 664 670
1919 | f 660 670 638
1920 | f 638 670 639
1921 | f 658 672 663
1922 | f 663 672 673
1923 | f 658 657 672
1924 | f 672 657 674
1925 | f 653 656 675
1926 | f 675 656 644
1927 | f 653 675 654
1928 | f 654 675 676
1929 | f 675 644 677
1930 | f 677 644 643
1931 | f 676 675 678
1932 | f 678 675 677
1933 | f 679 680 676
1934 | f 676 680 654
1935 | f 674 680 672
1936 | f 672 680 679
1937 | f 627 639 625
1938 | f 625 639 681
1939 | f 626 682 632
1940 | f 632 682 642
1941 | f 681 682 625
1942 | f 625 682 626
1943 | f 669 683 681
1944 | f 681 683 682
1945 | f 667 684 669
1946 | f 669 684 683
1947 | f 682 683 642
1948 | f 642 683 685
1949 | f 681 639 669
1950 | f 669 639 670
1951 | f 686 641 685
1952 | f 685 641 642
1953 | f 641 686 640
1954 | f 640 686 643
1955 | f 686 687 643
1956 | f 643 687 677
1957 | f 687 686 688
1958 | f 688 686 685
1959 | f 689 687 690
1960 | f 690 687 688
1961 | f 683 684 685
1962 | f 685 684 688
1963 | f 684 691 688
1964 | f 688 691 690
1965 | f 687 689 677
1966 | f 677 689 678
1967 | f 693 678 692
1968 | f 692 678 689
1969 | f 692 695 694
1970 | f 694 695 696
1971 | f 692 689 695
1972 | f 695 689 690
1973 | f 692 694 693
1974 | f 693 694 673
1975 | f 693 673 679
1976 | f 679 673 672
1977 | f 679 676 693
1978 | f 693 676 678
1979 | f 684 667 691
1980 | f 691 667 668
1981 | f 696 691 671
1982 | f 671 691 668
1983 | f 696 695 691
1984 | f 691 695 690
1985 | f 694 696 666
1986 | f 666 696 671
1987 | f 666 663 694
1988 | f 694 663 673
1989 | f 648 655 657
1990 | f 657 655 674
1991 | f 674 655 680
1992 | f 680 655 654
1993 | f 593 162 594
1994 | f 594 162 290
1995 | f 598 597 601
1996 | f 601 597 602
1997 | f 615 608 616
1998 | f 616 608 607
1999 | f 607 609 616
2000 | f 616 609 614
2001 | f 697 595 613
2002 | f 613 595 598
2003 | f 597 608 602
2004 | f 602 608 615
2005 | f 594 290 628
2006 | f 628 290 203
2007 | f 628 612 617
2008 | f 617 612 611
2009 | f 593 609 162
2010 | f 162 609 200
2011 | f 105 29 698
2012 | f 698 29 699
2013 | f 268 126 701
2014 | f 701 126 700
2015 | f 126 125 700
2016 | f 700 125 702
2017 | f 252 703 250
2018 | f 250 703 704
2019 | f 32 268 705
2020 | f 705 268 701
2021 | f 288 105 706
2022 | f 706 105 698
2023 | f 29 32 699
2024 | f 699 32 705
2025 | f 707 251 706
2026 | f 706 251 288
2027 | f 250 704 125
2028 | f 125 704 702
2029 | f 703 252 707
2030 | f 707 252 251
2031 | f 706 708 707
2032 | f 707 708 709
2033 | f 707 709 703
2034 | f 703 709 710
2035 | f 703 710 704
2036 | f 704 710 711
2037 | f 704 711 702
2038 | f 702 711 712
2039 | f 706 698 708
2040 | f 708 698 713
2041 | f 698 699 713
2042 | f 713 699 714
2043 | f 699 705 714
2044 | f 714 705 715
2045 | f 717 708 716
2046 | f 716 708 713
2047 | f 719 720 718
2048 | f 718 720 721
2049 | f 722 723 719
2050 | f 719 723 720
2051 | f 724 725 723
2052 | f 723 725 720
2053 | f 724 726 725
2054 | f 725 726 727
2055 | f 728 721 725
2056 | f 725 721 720
2057 | f 721 728 729
2058 | f 729 728 730
2059 | f 729 731 721
2060 | f 721 731 718
2061 | f 724 723 732
2062 | f 732 723 722
2063 | f 722 710 732
2064 | f 732 710 709
2065 | f 722 733 710
2066 | f 710 733 711
2067 | f 719 734 722
2068 | f 722 734 733
2069 | f 734 719 735
2070 | f 735 719 718
2071 | f 724 732 726
2072 | f 726 732 717
2073 | f 717 732 708
2074 | f 708 732 709
2075 | f 731 736 718
2076 | f 718 736 735
2077 | f 737 734 738
2078 | f 738 734 735
2079 | f 736 739 735
2080 | f 735 739 738
2081 | f 740 737 741
2082 | f 741 737 738
2083 | f 737 740 743
2084 | f 743 740 742
2085 | f 739 744 738
2086 | f 738 744 741
2087 | f 734 737 733
2088 | f 733 737 743
2089 | f 733 743 711
2090 | f 711 743 712
2091 | f 736 731 746
2092 | f 746 731 745
2093 | f 731 729 745
2094 | f 745 729 730
2095 | f 727 726 747
2096 | f 747 726 717
2097 | f 748 727 749
2098 | f 749 727 747
2099 | f 747 717 750
2100 | f 750 717 716
2101 | f 749 747 751
2102 | f 751 747 750
2103 | f 749 752 748
2104 | f 748 752 753
2105 | f 730 753 745
2106 | f 745 753 752
2107 | f 728 725 748
2108 | f 748 725 727
2109 | f 730 728 753
2110 | f 753 728 748
2111 | f 700 702 754
2112 | f 754 702 712
2113 | f 705 701 715
2114 | f 715 701 755
2115 | f 754 755 700
2116 | f 700 755 701
2117 | f 742 756 754
2118 | f 754 756 755
2119 | f 740 757 742
2120 | f 742 757 756
2121 | f 755 756 715
2122 | f 715 756 758
2123 | f 754 712 742
2124 | f 742 712 743
2125 | f 759 714 758
2126 | f 758 714 715
2127 | f 714 759 713
2128 | f 713 759 716
2129 | f 759 760 716
2130 | f 716 760 750
2131 | f 760 759 761
2132 | f 761 759 758
2133 | f 762 760 763
2134 | f 763 760 761
2135 | f 756 757 758
2136 | f 758 757 761
2137 | f 757 764 761
2138 | f 761 764 763
2139 | f 760 762 750
2140 | f 750 762 751
2141 | f 766 751 765
2142 | f 765 751 762
2143 | f 767 765 769
2144 | f 769 765 768
2145 | f 765 762 768
2146 | f 768 762 763
2147 | f 765 767 766
2148 | f 766 767 746
2149 | f 766 746 752
2150 | f 752 746 745
2151 | f 752 749 766
2152 | f 766 749 751
2153 | f 757 740 764
2154 | f 764 740 741
2155 | f 769 764 744
2156 | f 744 764 741
2157 | f 769 768 764
2158 | f 764 768 763
2159 | f 767 769 739
2160 | f 739 769 744
2161 | f 739 736 767
2162 | f 767 736 746
2163 | f 219 217 208
2164 | f 208 217 209
2165 | f 209 217 210
2166 | f 210 217 193
2167 | f 198 580 199
2168 | f 199 580 511
2169 | f 76 75 289
2170 | f 289 75 272
2171 | f 277 207 291
2172 | f 291 207 184
2173 | f 290 183 203
2174 | f 203 183 84
2175 | f 490 580 142
2176 | f 142 580 198
2177 | f 142 198 143
2178 | f 143 198 197
2179 | f 149 276 72
2180 | f 72 276 73
2181 | f 104 103 279
2182 | f 279 103 770
2183 | f 770 596 279
2184 | f 279 596 595
2185 | f 605 596 771
2186 | f 771 596 770
2187 | f 771 770 772
2188 | f 772 770 103
2189 | f 772 103 773
2190 | f 773 103 102
2191 | f 155 774 101
2192 | f 101 774 102
2193 | f 773 102 97
2194 | f 97 102 774
2195 | f 775 773 100
2196 | f 100 773 97
2197 | f 773 775 772
2198 | f 772 775 776
2199 | f 772 776 771
2200 | f 771 776 777
2201 | f 200 606 197
2202 | f 197 606 777
2203 | f 189 71 778
2204 | f 778 71 74
2205 | f 97 774 98
2206 | f 98 774 155
2207 | f 183 161 184
2208 | f 184 161 291
2209 | f 777 276 197
2210 | f 197 276 143
2211 | f 777 606 771
2212 | f 771 606 605
2213 | f 613 612 697
2214 | f 697 612 202
2215 | f 595 697 279
2216 | f 279 697 202
2217 | f 73 276 776
2218 | f 776 276 777
2219 | f 776 775 73
2220 | f 73 775 74
2221 | f 100 778 775
2222 | f 775 778 74
2223 | f 778 100 189
2224 | f 189 100 99
2225 | f 245 242 99
2226 | f 99 242 189
2227 | f 131 33 256
2228 | f 28 1 95
2229 | f 95 1 62
2230 | f 27 2 28
2231 | f 28 2 1
2232 | f 265 115 266
2233 | f 266 115 114
2234 | f 95 62 69
2235 | f 69 62 64
2236 | f 62 61 64
2237 | f 64 61 65
2238 | f 61 13 65
2239 | f 65 13 130
2240 | f 13 12 130
2241 | f 130 12 14
2242 | f 31 11 32
2243 | f 32 11 268
2244 | f 15 12 31
2245 | f 31 12 11
2246 | f 31 30 15
2247 | f 15 30 231
2248 | f 30 106 231
2249 | f 231 106 232
2250 | f 287 237 106
2251 | f 106 237 232
2252 | f 254 90 287
2253 | f 287 90 237
2254 | f 253 44 254
2255 | f 254 44 90
2256 | f 253 249 44
2257 | f 44 249 7
2258 | f 267 127 268
2259 | f 268 127 126
2260 | f 10 3 241
2261 | f 241 3 116
2262 | f 3 2 116
2263 | f 116 2 114
2264 | f 2 27 114
2265 | f 114 27 266
2266 | f 27 26 266
2267 | f 266 26 113
2268 | f 26 286 113
2269 | f 113 286 110
2270 | f 286 243 110
2271 | f 110 243 255
2272 | f 243 242 255
2273 | f 255 242 256
2274 | f 242 245 256
2275 | f 256 245 131
2276 | f 245 244 131
2277 | f 131 244 132
2278 | f 265 37 115
2279 | f 115 37 41
2280 | f 41 37 42
2281 | f 42 37 40
2282 | f 92 91 40
2283 | f 40 91 42
2284 | f 236 233 92
2285 | f 92 233 91
2286 | f 191 194 236
2287 | f 236 194 233
2288 | f 258 285 244
2289 | f 244 285 132
2290 | f 257 55 258
2291 | f 258 55 285
2292 | f 257 154 201
2293 | f 201 154 185
2294 | f 56 201 204
2295 | f 204 201 180
2296 | f 55 257 56
2297 | f 56 257 201
2298 | f 204 85 56
2299 | f 56 85 52
2300 | f 86 122 53
2301 | f 53 122 79
2302 | f 212 215 86
2303 | f 86 215 122
2304 | f 211 216 212
2305 | f 212 216 215
2306 | f 192 280 211
2307 | f 211 280 216
2308 | f 191 240 192
2309 | f 192 240 280
2310 | f 249 124 7
2311 | f 7 124 8
2312 | f 124 127 8
2313 | f 8 127 241
2314 | f 127 267 241
2315 | f 241 267 10
2316 | f 10 267 9
2317 | f 9 267 11
--------------------------------------------------------------------------------
/postprocess/reprojection.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pickle
3 |
4 | import cv2
5 | import numpy as np
6 | import torch
7 | import xlrd
8 | # from termcolor import cprint
9 |
10 | # from utils.mano_wrist import hands_mean, rot_pose_beta_to_mesh
11 |
12 |
13 | COLOR_JOINTS = [
14 | [1.0, 0.0, 0.0],
15 | [0.0, 0.4, 0.0],
16 | [0.0, 0.6, 0.0],
17 | [0.0, 0.8, 0.0],
18 | [0.0, 1.0, 0.0], # thumb
19 | [0.0, 0.0, 0.6],
20 | [0.0, 0.0, 1.0],
21 | [0.2, 0.2, 1.0],
22 | [0.4, 0.4, 1.0], # index
23 | [0.0, 0.4, 0.4],
24 | [0.0, 0.6, 0.6],
25 | [0.0, 0.8, 0.8],
26 | [0.0, 1.0, 1.0], # middle
27 | [0.4, 0.4, 0.0],
28 | [0.6, 0.6, 0.0],
29 | [0.8, 0.8, 0.0],
30 | [1.0, 1.0, 0.0], # ring
31 | [0.4, 0.0, 0.4],
32 | [0.6, 0.0, 0.6],
33 | [0.8, 0.0, 0.8],
34 | [1.0, 0.0, 1.0],
35 | ] # little
36 |
37 |
38 | def plot_hand(image, coords_hw, vis=None, linewidth=3):
39 | """Plots a hand stick figure into a matplotlib figure."""
40 |
41 | colors = np.array(COLOR_JOINTS)
42 | colors = colors[:, ::-1]
43 |
44 | # define connections and colors of the bones
45 | bones = [
46 | ((0, 1), colors[1, :]),
47 | ((1, 2), colors[2, :]),
48 | ((2, 3), colors[3, :]),
49 | ((3, 4), colors[4, :]),
50 | ((0, 5), colors[5, :]),
51 | ((5, 6), colors[6, :]),
52 | ((6, 7), colors[7, :]),
53 | ((7, 8), colors[8, :]),
54 | ((0, 9), colors[9, :]),
55 | ((9, 10), colors[10, :]),
56 | ((10, 11), colors[11, :]),
57 | ((11, 12), colors[12, :]),
58 | ((0, 13), colors[13, :]),
59 | ((13, 14), colors[14, :]),
60 | ((14, 15), colors[15, :]),
61 | ((15, 16), colors[16, :]),
62 | ((0, 17), colors[17, :]),
63 | ((17, 18), colors[18, :]),
64 | ((18, 19), colors[19, :]),
65 | ((19, 20), colors[20, :]),
66 | ]
67 |
68 | if vis is None:
69 | vis = np.ones_like(coords_hw[:, 0]) == 1.0
70 |
71 | for connection, color in bones:
72 | if (vis[connection[0]] == False) or (vis[connection[1]] == False):
73 | continue
74 |
75 | coord1 = coords_hw[connection[0], :]
76 | coord2 = coords_hw[connection[1], :]
77 | c1x = int(coord1[0])
78 | c1y = int(coord1[1])
79 | c2x = int(coord2[0])
80 | c2y = int(coord2[1])
81 | cv2.line(image, (c1x, c1y), (c2x, c2y), color=color * 255.0, thickness=linewidth)
82 |
83 | for i in range(coords_hw.shape[0]):
84 | cx = int(coords_hw[i, 0])
85 | cy = int(coords_hw[i, 1])
86 | cv2.circle(image, (cx, cy), radius=2 * linewidth, thickness=-1, color=colors[i, :] * 255.0)
87 |
88 |
89 | # def test():
90 | # aa = pickle.load(open("20220508/2022-05-08_08-56-25/test.pkl", "rb"))
91 |
92 | # all_data = rot_pose_beta_to_mesh(aa["root"].view(1, 3), aa["pose"].contiguous().view(1, 45))
93 | # ref_joints = all_data[0, :21][[0, 13, 14, 15, 16, 1, 2, 3, 17, 4, 5, 6, 18, 10, 11, 12, 19, 7, 8, 9, 20]]
94 | # all_verts = all_data[0, 21:]
95 |
96 | # img = cv2.imread("20220508/2022-05-08_08-56-25/3.jpg")
97 | # h = img.shape[0]
98 | # w = img.shape[1]
99 |
100 | # scale_x = 100 * 30
101 | # scale_y = 100 * 27
102 | # ref_joints[:, 0] = ref_joints[:, 0] * scale_x
103 | # ref_joints[:, 1] = ref_joints[:, 1] * scale_y
104 | # u_shift = 0
105 | # v_shift = 0
106 | # # ref_joints[:, :2] = ref_joints[:, :2] / ref_joints[:, 2:]
107 |
108 | # ref_joints[:, 0] += u_shift
109 | # ref_joints[:, 1] += v_shift
110 |
111 | # # scale_x, scale_y
112 | # ref_joints[:, 0] = ref_joints[:, 0] - 30
113 | # ref_joints[:, 1] = ref_joints[:, 1] + 72
114 |
115 | # # import pdb; pdb.set_trace()
116 |
117 | # ref_joints[:, 0] = ref_joints[:, 0] + w // 2
118 | # ref_joints[:, 1] = h // 2 - ref_joints[:, 1] * 2
119 | # ref_joints = ref_joints.detach().cpu().numpy()
120 |
121 | # plot_hand(img, ref_joints)
122 |
123 | # cv2.circle(img, (w // 2, h // 2), radius=6, thickness=-1, color=(0, 0, 255))
124 |
125 | # cv2.imwrite("tmp.jpg", img)
126 |
127 |
128 | def generate_2d(path, plot=False):
129 | img = cv2.imread(path + "/0.png")
130 | cv2.imwrite("tmp.jpg", img)
131 |
132 | excel_path = path + "/ExcelData.xls"
133 | screen_pos = []
134 | with xlrd.open_workbook(excel_path) as wb:
135 | sheet = wb.sheet_by_index(0)
136 | idx = 0
137 | for row in sheet.get_rows():
138 | if 1 <= idx <= 26:
139 | screen_pos.append(
140 | torch.tensor([float(x) for x in row[8].value.split(",")], dtype=torch.float))
141 | idx += 1
142 | screen_pos_tensor = torch.stack(screen_pos)
143 | print(screen_pos_tensor.shape)
144 | ref_joints = screen_pos_tensor[[0, 22, 23, 24, 25, 17, 18, 19, 20, 12, 13, 14, 15, 7, 8, 9, 10, 2, 3, 4, 5]]
145 |
146 | ref_joints[:, 1] = 1 - ref_joints[:, 1]
147 | ref_joints[:, 0] *= img.shape[1]
148 | ref_joints[:, 1] *= img.shape[0]
149 |
150 | if plot:
151 | print(ref_joints)
152 | plot_hand(img, ref_joints)
153 | cv2.imwrite("tmp.jpg", img)
154 | return ref_joints
155 |
156 |
157 | if __name__ == '__main__':
158 | data_dir = r"C:\Users\FengWang\Downloads\newhand\Hand\Build_Hand\ExportData\2022-08-02_17-07-21"
159 | generate_2d(data_dir, True)
160 |
--------------------------------------------------------------------------------
/postprocess/retargeting/basis.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from pytorch3d.transforms import axis_angle_to_quaternion,\
3 | quaternion_invert, quaternion_multiply, matrix_to_quaternion
4 |
5 | MANO_INDEX = []
6 | MANO_ROT_PARENT = [-1, 0, 1, 2, 0, 4, 5, 0, 7, 8, 0, 10, 11, 0, 13, 14]
7 |
8 |
9 | class FingerConfig:
10 |
11 | def __init__(self, name, pos_index, rot_index):
12 | self.name = name
13 | self.pos_index = pos_index
14 | self.rot_index = rot_index
15 |
16 |
17 | INDEX = FingerConfig("index", [1, 2, 3, 16], [1, 2, 3])
18 | MIDDLE = FingerConfig("middle", [4, 5, 6, 17], [4, 5, 6])
19 | PINKY = FingerConfig("pinky", [7, 8, 9, 18], [7, 8, 9])
20 | RING = FingerConfig("ring", [10, 11, 12, 19], [10, 11, 12])
21 | THUMB = FingerConfig("thumb", [13, 14, 15, 20], [13, 14, 15])
22 |
23 | WRIST_ROTATION = torch.tensor([ 0.7598, -0.6481, 0.0243, 0.0458], dtype=torch.float)
24 |
25 |
26 | def norm(val: torch.Tensor):
27 | if val.norm() > 0:
28 | return val / val.norm()
29 | else:
30 | return val
31 |
32 |
33 | def look_rotation(forward, up) -> torch.Tensor:
34 | forward = norm(forward)
35 | up = norm(up)
36 | right = norm(torch.cross(up, forward))
37 | rot_mat = torch.eye(3)
38 | for i in range(3):
39 | rot_mat[i] = torch.tensor([right[i], up[i], forward[i]])
40 | return norm(matrix_to_quaternion(rot_mat))
41 |
42 |
43 | def convert_local_to_global(rotation):
44 | for i in range(rotation.shape[0]):
45 | if MANO_ROT_PARENT[i] >= 0:
46 | rotation[i] = norm(quaternion_multiply(rotation[MANO_ROT_PARENT[i]], rotation[i]))
47 | return rotation
48 |
49 |
50 | def convert_global_to_local(rotation):
51 | for i in range(15, -1, -1):
52 | parent = MANO_ROT_PARENT[i]
53 | if parent >= 0:
54 | rotation[i] = norm(quaternion_multiply(quaternion_invert(rotation[parent]), rotation[i]))
55 | return rotation[1:]
56 |
57 |
58 | def calc_hand_rotation(wrist, index, middle):
59 | global WRIST_ROTATION
60 |
61 | dir1 = index - wrist
62 | dir2 = middle - wrist
63 |
64 | print(norm(dir1), norm(dir2))
65 | second = norm(torch.cross(dir2, dir1))
66 | prim = norm(2 * dir2 + dir1)
67 | forward = norm(torch.cross(prim, second))
68 | up = second
69 |
70 | wrist_rotation = norm(look_rotation(up, forward))
71 | print("hand direction", forward)
72 | print("wrist", wrist_rotation)
73 | WRIST_ROTATION = wrist_rotation
74 |
75 |
76 | class BasisHelper:
77 |
78 | @staticmethod
79 | def init_joint_info(name, curr_rot, prim, second):
80 | forward = norm(torch.cross(second, prim))
81 | up = second
82 |
83 | desired_rot = look_rotation(forward, up)
84 |
85 | real_curr_rot = norm(quaternion_multiply(curr_rot, WRIST_ROTATION))
86 |
87 | print(name, forward, second, desired_rot, curr_rot, real_curr_rot)
88 | offset = norm(quaternion_multiply(quaternion_invert(desired_rot), curr_rot))
89 | # print(name, offset)
90 | return offset
91 |
92 | @staticmethod
93 | def init_finger_info(fc: FingerConfig, pos, rot):
94 | pos0: torch.Tensor = pos[fc.pos_index[0]]
95 | pos1: torch.Tensor = pos[fc.pos_index[1]]
96 | pos2: torch.Tensor = pos[fc.pos_index[2]]
97 | pos3: torch.Tensor = pos[fc.pos_index[3]]
98 | f0prim = norm(pos0 - pos[0])
99 | f1prim = norm(pos1 - pos0)
100 | f2prim = norm(pos2 - pos1)
101 | f3prim = norm(pos3 - pos2)
102 |
103 | rot_dir_0 = norm(torch.cross(f1prim, f0prim))
104 | rot_dir_1 = norm(torch.cross(f2prim, f1prim))
105 | rot_dir_2 = norm(torch.cross(f3prim, f2prim))
106 |
107 | print(fc.name + " rotation dir", rot_dir_1, rot_dir_2)
108 |
109 | f1second = norm(torch.cross(rot_dir_1, f1prim))
110 | f2second = norm(torch.cross(rot_dir_1, f2prim))
111 | f3second = norm(torch.cross(rot_dir_1, f3prim))
112 |
113 | offset1 = BasisHelper.init_joint_info(fc.name + "_1", rot[fc.rot_index[0]], f1prim, f1second)
114 | offset2 = BasisHelper.init_joint_info(fc.name + "_2", rot[fc.rot_index[1]], f2prim, f2second)
115 | offset3 = BasisHelper.init_joint_info(fc.name + "_3", rot[fc.rot_index[2]], f3prim, f3second)
116 | return torch.stack([offset1, offset2, offset3])
117 |
118 | @staticmethod
119 | def get_basis(rotation, position):
120 | """
121 | rotation with hands_mean
122 | Args:
123 | rotation:
124 | position:
125 |
126 | Returns:
127 |
128 | """
129 | # print(position)
130 | calc_hand_rotation(position[0], position[1], position[4])
131 | # quaternion in w,x,y,z order
132 | rot_quat = axis_angle_to_quaternion(rotation)
133 | rot_quat = convert_local_to_global(rot_quat)
134 | # print(rot_quat)
135 | res = [BasisHelper.init_finger_info(INDEX, position, rot_quat),
136 | BasisHelper.init_finger_info(MIDDLE, position, rot_quat),
137 | BasisHelper.init_finger_info(PINKY, position, rot_quat),
138 | BasisHelper.init_finger_info(RING, position, rot_quat),
139 | BasisHelper.init_finger_info(THUMB, position, rot_quat)]
140 |
141 | res = torch.concat(res)
142 | return res
143 |
144 | # print(rotation.shape, position.shape)
145 | # print(rotation, position)
146 |
147 |
148 | def test_look_at():
149 | tmp = torch.tensor([1., 1., 1.])
150 | forward = torch.tensor([-1., 0.5, 0.25])
151 | up = torch.cross(forward, tmp)
152 |
153 | # forward = torch.tensor([0, 0, 1.])
154 | # up = torch.tensor([0, 1., 0])
155 | rot = look_rotation(forward, up)
156 | print(forward)
157 | print(up)
158 | print(rot)
159 |
160 |
161 | if __name__ == "__main__":
162 | test_look_at()
163 | # bh.axis_angle_to_quaternion(aa)
164 |
165 |
166 |
--------------------------------------------------------------------------------
/postprocess/retargeting/mesh_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import csv
3 | import numpy as np
4 | import torch
5 | from pytorch3d.io import load_objs_as_meshes, load_obj, save_obj
6 | import matplotlib.pyplot as plt
7 | # Data structures and functions for rendering
8 | from pytorch3d.structures import Meshes
9 | from pytorch3d.vis.plotly_vis import AxisArgs, plot_batch_individually, plot_scene
10 | from pytorch3d.vis.texture_vis import texturesuv_image_matplotlib
11 | from pytorch3d.renderer import (
12 | look_at_view_transform,
13 | FoVPerspectiveCameras,
14 | PointLights,
15 | DirectionalLights,
16 | Materials,
17 | RasterizationSettings,
18 | MeshRenderer,
19 | MeshRasterizer,
20 | SoftPhongShader,
21 | TexturesUV,
22 | TexturesVertex
23 | )
24 |
25 | DATA_DIR = r"./data"
26 |
27 |
28 | def modify_vertices(verts):
29 | # Set paths
30 | obj_filename = os.path.join(DATA_DIR, r"hand_02.obj")
31 | out_filename = os.path.join(DATA_DIR, r"test_hand.obj")
32 |
33 | with open(obj_filename) as fin, open(out_filename, 'w+') as fout:
34 | cnt = 0
35 | read_vert = False
36 | for line in fin.readlines():
37 | if line.startswith("v "):
38 | cnt += 1
39 | read_vert = True
40 | else:
41 | if read_vert:
42 | read_vert = False
43 | for v in verts:
44 | fout.write("v {:.6f} {:.6f} {:.6f}\n".format(v[0], v[1], v[2]))
45 | fout.write(line)
46 | print(cnt)
47 |
48 |
49 | def render_mesh_image():
50 | if torch.cuda.is_available():
51 | device = torch.device("cuda:0")
52 | torch.cuda.set_device(device)
53 | else:
54 | device = torch.device("cpu")
55 |
56 | # Set paths
57 | obj_filename = os.path.join(DATA_DIR, "test_hand.obj")
58 | # obj_filename = os.path.join(DATA_DIR, "hand_wrist/hand_02.obj")
59 |
60 | # Load obj file
61 | mesh = load_objs_as_meshes([obj_filename], device=device)
62 |
63 | # Initialize a camera.
64 | # With world coordinates +Y up, +X left and +Z in, the front of the cow is facing the -Z direction.
65 | # So we move the camera by 180 in the azimuth direction so it is facing the front of the cow.
66 | R, T = look_at_view_transform(1.0, 0, 0)
67 | cameras = FoVPerspectiveCameras(device=device, R=R, T=T)
68 |
69 | # Define the settings for rasterization and shading. Here we set the output image to be of size
70 | # 512x512. As we are rendering images for visualization purposes only we will set faces_per_pixel=1
71 | # and blur_radius=0.0. We also set bin_size and max_faces_per_bin to None which ensure that
72 | # the faster coarse-to-fine rasterization method is used. Refer to rasterize_meshes.py for
73 | # explanations of these parameters. Refer to docs/notes/renderer.md for an explanation of
74 | # the difference between naive and coarse-to-fine rasterization.
75 | raster_settings = RasterizationSettings(
76 | image_size=512,
77 | blur_radius=0.0,
78 | faces_per_pixel=1,
79 | )
80 |
81 | # Place a point light in front of the object. As mentioned above, the front of the cow is facing the
82 | # -z direction.
83 | lights = PointLights(device=device, location=[[0.0, 0.0, -3.0]])
84 |
85 | # Create a Phong renderer by composing a rasterizer and a shader. The textured Phong shader will
86 | # interpolate the texture uv coordinates for each vertex, sample from a texture image and
87 | # apply the Phong lighting model
88 | renderer = MeshRenderer(
89 | rasterizer=MeshRasterizer(
90 | cameras=cameras,
91 | raster_settings=raster_settings
92 | ),
93 | shader=SoftPhongShader(
94 | device=device,
95 | cameras=cameras,
96 | lights=lights
97 | )
98 | )
99 | images = renderer(mesh)
100 | plt.figure(figsize=(10, 10))
101 | plt.imshow(images[0, ..., :3].cpu().numpy())
102 | plt.show()
103 | # plt.axis("off")
104 |
105 |
106 | if __name__ == "__main__":
107 | render_mesh_image()
108 |
--------------------------------------------------------------------------------
/postprocess/test/from_unity.obj:
--------------------------------------------------------------------------------
1 | v -0.009470 -0.034797 -0.022181
2 | v -0.013170 -0.045750 -0.028193
3 | v -0.016258 -0.037084 -0.031780
4 | v -0.013425 -0.027323 -0.024329
5 | v -0.032364 -0.050295 -0.038884
6 | v -0.042952 -0.052014 -0.034426
7 | v -0.039463 -0.036617 -0.035857
8 | v -0.030544 -0.037973 -0.040172
9 | v -0.015585 -0.021383 -0.023578
10 | v -0.017655 -0.029367 -0.033015
11 | v -0.016292 -0.020724 -0.018416
12 | v -0.016384 -0.013808 -0.006268
13 | v -0.015060 -0.014010 -0.013021
14 | v -0.014007 -0.008195 -0.000427
15 | v -0.018543 -0.020791 0.002197
16 | v -0.010341 -0.012387 0.007521
17 | v -0.000310 -0.038479 0.011441
18 | v 0.010196 -0.045971 0.008946
19 | v 0.018295 -0.030824 0.011460
20 | v 0.006122 -0.024704 0.012660
21 | v 0.029834 -0.045427 0.001615
22 | v 0.024240 -0.036656 0.007375
23 | v 0.016268 -0.051997 0.006146
24 | v 0.021929 -0.058967 0.001125
25 | v -0.000790 -0.046487 -0.021827
26 | v -0.004059 -0.056713 -0.025499
27 | v -0.007549 -0.053415 -0.025245
28 | v -0.004453 -0.043160 -0.021365
29 | v -0.022498 -0.018222 -0.017082
30 | v -0.027894 -0.024463 -0.008461
31 | v -0.021271 -0.021499 -0.010856
32 | v -0.018488 -0.018934 -0.021116
33 | v -0.006573 -0.081310 -0.038971
34 | v -0.012065 -0.092984 -0.035041
35 | v -0.017229 -0.090682 -0.033026
36 | v -0.011092 -0.080650 -0.036628
37 | v -0.036409 -0.075357 -0.030535
38 | v -0.034182 -0.084683 -0.026406
39 | v -0.043171 -0.087401 -0.016778
40 | v -0.044045 -0.077160 -0.017414
41 | v -0.041045 -0.063542 -0.033482
42 | v -0.047326 -0.065055 -0.022186
43 | v -0.048388 -0.050906 -0.024116
44 | v -0.043701 -0.034335 -0.027914
45 | v -0.000613 -0.098693 -0.036252
46 | v 0.004937 -0.090954 -0.039809
47 | v 0.002043 0.039273 0.009123
48 | v 0.007735 0.039534 0.009057
49 | v 0.006971 0.034039 0.005214
50 | v 0.001515 0.034993 0.006222
51 | v 0.015404 -0.100017 -0.024019
52 | v 0.017351 -0.101098 -0.013781
53 | v 0.016866 -0.107932 -0.016360
54 | v 0.011087 -0.106016 -0.024998
55 | v 0.020803 -0.091844 -0.022309
56 | v 0.019323 -0.091689 -0.011047
57 | v -0.001750 0.033869 0.019476
58 | v -0.003021 0.036616 0.015483
59 | v -0.003786 0.032323 0.014056
60 | v -0.001924 0.029512 0.018029
61 | v -0.012308 -0.017957 -0.017124
62 | v -0.007256 -0.024888 -0.018783
63 | v -0.003039 -0.000375 -0.010467
64 | v -0.004617 -0.011458 -0.013400
65 | v -0.010643 -0.009449 -0.011686
66 | v -0.009172 -0.001055 -0.008160
67 | v 0.011801 -0.027731 -0.017667
68 | v 0.006565 -0.038084 -0.019487
69 | v 0.002292 -0.033525 -0.017904
70 | v 0.006346 -0.021178 -0.016142
71 | v 0.016932 -0.045392 -0.025800
72 | v 0.021354 -0.034662 -0.023142
73 | v 0.026889 -0.039853 -0.027139
74 | v 0.022524 -0.046455 -0.029167
75 | v 0.026968 -0.015867 -0.015923
76 | v 0.030406 -0.009874 -0.013273
77 | v 0.030627 -0.011023 -0.013137
78 | v 0.028910 -0.018257 -0.016922
79 | v 0.012695 -0.114155 -0.016865
80 | v 0.006154 -0.111431 -0.025946
81 | v 0.040681 -0.058146 -0.011532
82 | v 0.042261 -0.063456 -0.021378
83 | v 0.050142 -0.053742 -0.020765
84 | v 0.046177 -0.050718 -0.010834
85 | v 0.013621 -0.098142 -0.004445
86 | v 0.016175 -0.106182 -0.004886
87 | v 0.002984 0.031726 0.022706
88 | v 0.002993 0.027584 0.020987
89 | v -0.046428 -0.052536 -0.011254
90 | v -0.043196 -0.033567 -0.017993
91 | v -0.045288 -0.068278 -0.007885
92 | v -0.042621 -0.079393 -0.005592
93 | v -0.043311 -0.088434 -0.004916
94 | v 0.002281 -0.010045 -0.014002
95 | v -0.003389 -0.033158 -0.018992
96 | v 0.002807 -0.049338 -0.023293
97 | v 0.027278 -0.060704 -0.037995
98 | v 0.021852 -0.069796 -0.040007
99 | v 0.015518 -0.061778 -0.037107
100 | v 0.021731 -0.055649 -0.035322
101 | v 0.041765 -0.061724 -0.030936
102 | v 0.037978 -0.055985 -0.035166
103 | v 0.044034 -0.047736 -0.033169
104 | v 0.048482 -0.052594 -0.029271
105 | v -0.029074 -0.016263 -0.016031
106 | v -0.033624 -0.027469 -0.007191
107 | v 0.011087 -0.097284 -0.033308
108 | v 0.005321 -0.102457 -0.032884
109 | v 0.000568 -0.109351 -0.031108
110 | v -0.006993 -0.068640 -0.032581
111 | v -0.014442 -0.080186 -0.035110
112 | v -0.018588 -0.077841 -0.034282
113 | v -0.009404 -0.067587 -0.031074
114 | v -0.019887 -0.055741 -0.034789
115 | v -0.029014 -0.064001 -0.036954
116 | v -0.021619 -0.044902 -0.038456
117 | v -0.025138 -0.089093 -0.030244
118 | v -0.022502 -0.099457 -0.032278
119 | v -0.031835 -0.096799 -0.029610
120 | v -0.013691 -0.103509 -0.033921
121 | v -0.005466 -0.106995 -0.033847
122 | v 0.014362 -0.114141 -0.001941
123 | v -0.036542 -0.093586 -0.023753
124 | v -0.027646 -0.030173 -0.039712
125 | v -0.024087 -0.022413 -0.039036
126 | v -0.017298 -0.023285 -0.035452
127 | v -0.019843 -0.029659 -0.036957
128 | v -0.011186 -0.001085 0.004572
129 | v -0.011499 0.000347 -0.002989
130 | v -0.015048 -0.008407 -0.007277
131 | v 0.002611 -0.074125 -0.039510
132 | v 0.011849 -0.082637 -0.040448
133 | v 0.003715 0.001192 -0.009567
134 | v -0.002269 0.025405 0.018193
135 | v -0.004634 0.027682 0.013593
136 | v -0.008491 0.012037 0.008753
137 | v -0.005518 0.010653 0.014481
138 | v 0.010524 0.006057 -0.001589
139 | v 0.006022 0.006533 -0.005286
140 | v 0.006983 0.009416 -0.002191
141 | v 0.011684 0.010751 0.001730
142 | v 0.034382 -0.017922 -0.017480
143 | v 0.033797 -0.024254 -0.020547
144 | v 0.012618 -0.010726 0.010722
145 | v -0.000774 -0.009511 0.014808
146 | v -0.003446 -0.019392 0.012548
147 | v 0.016858 -0.032483 -0.020452
148 | v 0.022369 -0.023605 -0.018954
149 | v 0.025541 -0.025899 -0.020351
150 | v 0.006738 -0.008228 -0.013183
151 | v 0.009314 -0.000069 -0.009084
152 | v 0.011544 -0.006420 -0.011238
153 | v 0.009862 -0.012954 -0.014159
154 | v 0.035062 -0.074355 -0.020879
155 | v 0.034602 -0.072696 -0.032344
156 | v -0.001753 0.038388 0.011585
157 | v -0.002408 0.034091 0.009424
158 | v 0.011599 -0.042532 -0.022171
159 | v -0.008401 -0.031810 0.010031
160 | v 0.006749 -0.052374 -0.025478
161 | v 0.044969 -0.031370 -0.007539
162 | v 0.046006 -0.030035 -0.013753
163 | v 0.046105 -0.022793 -0.015718
164 | v 0.047528 -0.022283 -0.010772
165 | v 0.015029 0.024835 0.009108
166 | v 0.012047 0.027440 0.004744
167 | v 0.012827 0.032439 0.007195
168 | v 0.015624 0.029817 0.011423
169 | v 0.000235 0.006576 -0.006214
170 | v -0.006168 0.006242 -0.004830
171 | v -0.003579 0.012126 -0.002291
172 | v 0.002166 0.011144 -0.002833
173 | v -0.009301 0.007036 0.000829
174 | v -0.007825 0.012383 0.002450
175 | v 0.013781 0.022476 0.015223
176 | v 0.008408 0.021305 0.020653
177 | v 0.007256 0.008455 0.015684
178 | v 0.011362 0.011153 0.009516
179 | v 0.014905 -0.072215 0.001828
180 | v 0.022039 -0.077742 -0.004712
181 | v 0.029153 -0.064775 -0.005590
182 | v 0.036838 -0.052706 -0.005662
183 | v 0.043285 -0.040434 -0.006497
184 | v 0.038096 -0.034449 0.002798
185 | v 0.033040 -0.069637 -0.010847
186 | v 0.014306 -0.002732 -0.000554
187 | v 0.008273 -0.001971 0.010786
188 | v 0.014253 -0.005446 0.005130
189 | v 0.012053 -0.055195 -0.029846
190 | v -0.003990 0.029116 0.008705
191 | v -0.026273 -0.085831 0.008252
192 | v -0.014040 -0.091144 0.009158
193 | v -0.012896 -0.082384 0.009212
194 | v -0.026333 -0.075954 0.007651
195 | v 0.005885 0.029015 0.002314
196 | v -0.000184 0.030035 0.003795
197 | v 0.039948 -0.026105 -0.022881
198 | v 0.039118 -0.020106 -0.020252
199 | v 0.042035 -0.019970 -0.019857
200 | v 0.045240 -0.025464 -0.023104
201 | v 0.024442 -0.081405 -0.008729
202 | v 0.054237 -0.047771 -0.019005
203 | v 0.050334 -0.046027 -0.010886
204 | v 0.015444 -0.088941 -0.003766
205 | v 0.009376 -0.084425 0.002462
206 | v 0.005916 -0.093501 0.002645
207 | v 0.031627 -0.020667 0.006792
208 | v 0.010195 -0.066084 0.005836
209 | v 0.004456 -0.079388 0.005550
210 | v -0.000011 -0.090001 0.005876
211 | v -0.002406 -0.097972 0.006416
212 | v 0.005991 -0.101925 0.002279
213 | v 0.003188 0.023141 0.020678
214 | v 0.009165 0.025991 0.021557
215 | v 0.005294 -0.111203 0.005382
216 | v -0.004875 -0.105941 0.007731
217 | v -0.002436 -0.073875 0.008324
218 | v -0.011021 -0.066957 0.010300
219 | v 0.003404 -0.061174 0.008277
220 | v -0.004617 -0.053418 0.010342
221 | v 0.023464 -0.018372 0.013952
222 | v 0.013872 0.040701 0.025681
223 | v 0.014149 0.032481 0.020051
224 | v 0.015796 0.035292 0.015278
225 | v 0.015617 0.043129 0.022876
226 | v 0.014307 0.027257 0.016852
227 | v 0.009324 0.030477 0.022948
228 | v -0.015443 -0.045156 0.008461
229 | v 0.014335 -0.002292 -0.008397
230 | v -0.024973 -0.058195 0.006528
231 | v -0.024436 -0.031983 0.003854
232 | v -0.031741 -0.040876 0.000738
233 | v -0.037104 -0.070669 0.003302
234 | v -0.039790 -0.053714 -0.000387
235 | v -0.037299 -0.090056 0.003544
236 | v -0.035795 -0.080886 0.003851
237 | v -0.040084 -0.038105 -0.007201
238 | v 0.005216 0.047914 0.016205
239 | v 0.010436 0.047507 0.016039
240 | v -0.028230 -0.094076 0.008834
241 | v -0.021180 -0.035276 -0.038678
242 | v 0.005119 -0.063172 -0.032932
243 | v 0.001502 -0.060779 -0.029431
244 | v 0.018044 -0.075012 -0.040485
245 | v 0.009697 -0.067207 -0.038056
246 | v 0.000774 0.046578 0.018188
247 | v 0.033084 -0.006260 -0.009843
248 | v 0.031581 -0.009168 -0.007733
249 | v -0.035425 -0.028290 -0.037139
250 | v -0.031559 -0.020879 -0.037991
251 | v -0.035429 -0.014749 -0.026974
252 | v -0.035347 -0.016984 -0.033815
253 | v -0.039754 -0.025242 -0.030449
254 | v -0.039500 -0.023447 -0.022737
255 | v -0.004785 -0.069240 -0.034070
256 | v -0.001509 -0.070457 -0.036004
257 | v 0.027704 -0.083758 -0.021871
258 | v 0.027154 -0.081387 -0.033161
259 | v -0.009343 0.005558 0.007232
260 | v -0.007700 -0.003915 0.011271
261 | v -0.006181 0.003434 0.013074
262 | v 0.000132 0.007727 0.017002
263 | v 0.014768 -0.000096 -0.004536
264 | v 0.010975 0.003901 0.004245
265 | v -0.022847 -0.071479 -0.035458
266 | v -0.014058 -0.063388 -0.031428
267 | v -0.017399 -0.025554 -0.031323
268 | v -0.015240 -0.021599 -0.028671
269 | v 0.014794 -0.019235 -0.016144
270 | v 0.032078 -0.008017 0.002679
271 | v 0.024670 -0.007443 0.007623
272 | v 0.018501 -0.011438 -0.013391
273 | v 0.000239 0.041722 0.025084
274 | v -0.000731 0.044738 0.021129
275 | v -0.000121 0.000415 0.015432
276 | v 0.031348 -0.030457 -0.024721
277 | v 0.036344 -0.016218 0.002041
278 | v 0.035139 -0.005894 -0.003258
279 | v 0.052411 -0.047095 -0.027487
280 | v -0.016786 -0.099623 0.009420
281 | v 0.013618 0.038393 0.011445
282 | v 0.014463 0.045915 0.017659
283 | v 0.010487 0.038613 0.027548
284 | v 0.004635 0.038352 0.027553
285 | v 0.018889 -0.088360 -0.033792
286 | v -0.000705 -0.058842 -0.027260
287 | v -0.037261 -0.025954 -0.013088
288 | v -0.032844 -0.014882 -0.020174
289 | v 0.021802 -0.007938 -0.011516
290 | v 0.046730 -0.038037 -0.009787
291 | v 0.043091 -0.025941 -0.000306
292 | v 0.046604 -0.017220 -0.003639
293 | v 0.041964 -0.011999 -0.001154
294 | v 0.035931 -0.008104 -0.003174
295 | v 0.011983 0.041878 0.029533
296 | v 0.005808 0.041271 0.030145
297 | v 0.001015 0.044792 0.028987
298 | v -0.000340 0.047880 0.025382
299 | v 0.001177 0.049175 0.021089
300 | v 0.015186 0.044468 0.027743
301 | v 0.016235 0.047206 0.025133
302 | v 0.014482 0.048957 0.019629
303 | v 0.016162 0.048678 0.029758
304 | v 0.013343 0.048329 0.034128
305 | v 0.003134 0.056795 0.036511
306 | v 0.003067 0.051153 0.034858
307 | v 0.004916 0.052271 0.036622
308 | v 0.005593 0.056643 0.038857
309 | v 0.007490 0.060474 0.041819
310 | v 0.004789 0.061361 0.039433
311 | v 0.008162 0.049224 0.035659
312 | v 0.009822 0.054821 0.038848
313 | v 0.013920 0.054446 0.037703
314 | v 0.015078 0.058595 0.041206
315 | v 0.011420 0.058980 0.042513
316 | v 0.012423 0.050546 0.035839
317 | v 0.007599 0.047139 0.034676
318 | v 0.007921 0.064072 0.041756
319 | v 0.009085 0.062384 0.043456
320 | v 0.012429 0.061653 0.043826
321 | v 0.012883 0.064749 0.043228
322 | v 0.000360 0.051627 0.029193
323 | v 0.002424 0.058131 0.034283
324 | v 0.005479 0.062887 0.036586
325 | v 0.008570 0.065393 0.039164
326 | v 0.004534 0.060501 0.031532
327 | v 0.007221 0.063708 0.034249
328 | v 0.010233 0.065854 0.036222
329 | v 0.008017 0.061909 0.029273
330 | v 0.009885 0.064318 0.032539
331 | v 0.004606 0.053188 0.021139
332 | v 0.001285 0.052029 0.023939
333 | v 0.011178 0.065967 0.034330
334 | v 0.013669 0.065852 0.041236
335 | v 0.016338 0.053625 0.035044
336 | v 0.017185 0.059520 0.038754
337 | v 0.017398 0.054512 0.033077
338 | v 0.018566 0.060253 0.036831
339 | v 0.017211 0.062760 0.041906
340 | v 0.015237 0.061124 0.042746
341 | v 0.005263 0.049636 0.018117
342 | v 0.010753 0.049529 0.017706
343 | v 0.010586 0.052994 0.019742
344 | v 0.012581 0.061680 0.028027
345 | v 0.014618 0.052344 0.021402
346 | v 0.016932 0.051336 0.027148
347 | v 0.017953 0.056845 0.031356
348 | v 0.016905 0.059237 0.029135
349 | v 0.018288 0.061315 0.034404
350 | v 0.016773 0.062517 0.032649
351 | v 0.013464 0.064323 0.031836
352 | v 0.017206 0.064533 0.036753
353 | v 0.017724 0.063720 0.039696
354 | v 0.013952 0.066216 0.037764
355 | v 0.016471 0.064498 0.034809
356 | v 0.013677 0.065525 0.035094
357 | v 0.022035 0.017316 -0.025135
358 | v 0.028249 0.016054 -0.026663
359 | v 0.027496 0.015729 -0.022527
360 | v 0.022302 0.016831 -0.021316
361 | v 0.021566 0.030059 -0.018518
362 | v 0.018880 0.025025 -0.021422
363 | v 0.018945 0.024705 -0.015683
364 | v 0.023136 0.029028 -0.012297
365 | v 0.028448 0.031956 -0.017130
366 | v 0.029964 0.029896 -0.011035
367 | v 0.022644 0.024916 -0.007612
368 | v 0.018526 0.020857 -0.012032
369 | v 0.016610 0.002377 -0.003538
370 | v 0.020142 0.004511 0.001749
371 | v 0.030192 -0.003871 -0.013686
372 | v 0.032896 -0.001991 -0.011661
373 | v 0.019594 0.020297 -0.023665
374 | v 0.019458 0.020057 -0.019011
375 | v 0.035859 0.014115 -0.019867
376 | v 0.031202 0.012433 -0.022233
377 | v 0.032353 0.015230 -0.023280
378 | v 0.036732 0.018288 -0.021609
379 | v 0.018354 -0.001907 -0.010382
380 | v 0.024436 -0.003466 -0.012057
381 | v 0.016567 0.000171 -0.007719
382 | v 0.038687 0.018193 -0.015202
383 | v 0.037455 0.022523 -0.009943
384 | v 0.034671 0.003812 0.000049
385 | v 0.036777 0.002215 -0.005457
386 | v 0.018188 0.017380 -0.016269
387 | v 0.025418 0.013149 -0.022326
388 | v 0.020610 0.014279 -0.020345
389 | v 0.030168 0.024393 -0.006474
390 | v 0.036951 0.027658 -0.014933
391 | v 0.035657 0.028682 -0.035539
392 | v 0.037948 0.025188 -0.024369
393 | v 0.036823 0.020467 -0.026396
394 | v 0.035789 0.024089 -0.037696
395 | v 0.038598 0.022947 -0.018808
396 | v 0.035334 0.029424 -0.020947
397 | v 0.022611 0.019351 -0.038783
398 | v 0.028864 0.018160 -0.039861
399 | v 0.019349 0.022414 -0.037592
400 | v 0.027286 0.004482 0.003270
401 | v 0.020568 0.031425 -0.032287
402 | v 0.018341 0.025997 -0.035419
403 | v 0.033520 0.016498 -0.027018
404 | v 0.033556 0.020069 -0.039627
405 | v 0.033179 0.031955 -0.033479
406 | v 0.026713 0.033693 -0.030352
407 | v 0.031439 0.033265 -0.039303
408 | v 0.025577 0.034513 -0.037184
409 | v 0.019591 0.031935 -0.038154
410 | v 0.018087 0.026475 -0.039492
411 | v 0.019502 0.022785 -0.040370
412 | v 0.034073 0.030235 -0.041230
413 | v 0.034825 0.025639 -0.042666
414 | v 0.032679 0.020983 -0.042269
415 | v 0.032832 0.028235 -0.048465
416 | v 0.029773 0.031879 -0.048553
417 | v 0.015071 0.024371 -0.057427
418 | v 0.016119 0.026688 -0.051895
419 | v 0.017676 0.028438 -0.053118
420 | v 0.016877 0.026402 -0.058248
421 | v 0.017835 0.030609 -0.047203
422 | v 0.019110 0.030886 -0.049368
423 | v 0.023583 0.032409 -0.049064
424 | v 0.022282 0.029321 -0.054396
425 | v 0.027133 0.028882 -0.054621
426 | v 0.026019 0.027354 -0.060144
427 | v 0.021225 0.028016 -0.060220
428 | v 0.026936 0.031465 -0.050870
429 | v 0.017236 0.025368 -0.061751
430 | v 0.016643 0.023049 -0.062749
431 | v 0.023885 0.034152 -0.046282
432 | v 0.017025 0.026533 -0.043054
433 | v 0.015548 0.023631 -0.051343
434 | v 0.015388 0.021061 -0.057487
435 | v 0.018086 0.020354 -0.063090
436 | v 0.017312 0.019758 -0.051745
437 | v 0.017357 0.018447 -0.057673
438 | v 0.019857 0.018506 -0.062112
439 | v 0.020955 0.016676 -0.052318
440 | v 0.021005 0.016288 -0.058132
441 | v 0.022694 0.019322 -0.042317
442 | v 0.018647 0.022056 -0.042217
443 | v 0.021240 0.017153 -0.060641
444 | v 0.020503 0.024473 -0.065296
445 | v 0.021456 0.021238 -0.065396
446 | v 0.020501 0.025809 -0.063985
447 | v 0.029120 0.028020 -0.054607
448 | v 0.027419 0.025622 -0.060499
449 | v 0.030881 0.025708 -0.054825
450 | v 0.028456 0.023496 -0.061178
451 | v 0.025194 0.024541 -0.064622
452 | v 0.024108 0.025772 -0.064130
453 | v 0.023184 0.020345 -0.040797
454 | v 0.028772 0.019400 -0.041622
455 | v 0.028357 0.018590 -0.042970
456 | v 0.026304 0.016607 -0.053172
457 | v 0.032505 0.020637 -0.044291
458 | v 0.033702 0.024632 -0.047480
459 | v 0.031673 0.022962 -0.054318
460 | v 0.030224 0.019161 -0.054110
461 | v 0.029596 0.021628 -0.060956
462 | v 0.029012 0.019133 -0.058911
463 | v 0.025142 0.016262 -0.058335
464 | v 0.026674 0.020742 -0.064575
465 | v 0.025843 0.022927 -0.065005
466 | v 0.022373 0.019501 -0.064307
467 | v 0.027035 0.018805 -0.062315
468 | v 0.023581 0.017552 -0.062026
469 | v 0.031711 0.004803 -0.020777
470 | v 0.033950 0.000441 -0.024427
471 | v 0.034929 0.000804 -0.023458
472 | v 0.033036 0.004595 -0.019509
473 | v 0.040780 0.013903 -0.019983
474 | v 0.035820 0.012670 -0.019568
475 | v 0.038014 0.011712 -0.015544
476 | v 0.043492 0.012419 -0.014542
477 | v 0.044840 0.012370 -0.024081
478 | v 0.049633 0.010625 -0.017337
479 | v 0.044406 0.008587 -0.010088
480 | v 0.038541 0.008959 -0.012147
481 | v 0.037075 -0.004516 -0.004991
482 | v 0.043382 -0.005243 -0.003342
483 | v 0.032912 0.009054 -0.019685
484 | v 0.034336 0.008214 -0.017186
485 | v 0.042757 -0.002591 -0.027142
486 | v 0.038310 -0.001650 -0.026296
487 | v 0.038803 -0.001195 -0.026882
488 | v 0.042970 -0.001249 -0.027954
489 | v 0.031189 -0.009739 -0.012558
490 | v 0.034318 -0.013640 -0.016001
491 | v 0.032738 -0.006221 -0.008982
492 | v 0.048676 -0.000966 -0.024896
493 | v 0.052671 0.002534 -0.019359
494 | v 0.050451 -0.011946 -0.013701
495 | v 0.047099 -0.014232 -0.017928
496 | v 0.034814 0.006731 -0.015048
497 | v 0.033860 0.000080 -0.023033
498 | v 0.032395 0.003471 -0.018724
499 | v 0.049751 0.005648 -0.012876
500 | v 0.050877 0.006268 -0.023646
501 | v 0.038306 0.006850 -0.041322
502 | v 0.046631 0.002404 -0.029558
503 | v 0.042217 -0.001612 -0.029107
504 | v 0.035888 0.003036 -0.041307
505 | v 0.048039 0.001492 -0.027348
506 | v 0.048029 0.007550 -0.027408
507 | v 0.024555 0.005933 -0.033437
508 | v 0.027733 0.002042 -0.035931
509 | v 0.025711 0.009931 -0.031640
510 | v 0.049123 -0.008291 -0.006381
511 | v 0.040933 -0.015546 -0.019514
512 | v 0.030788 0.015459 -0.033132
513 | v 0.027412 0.012955 -0.031746
514 | v 0.038170 -0.001827 -0.027364
515 | v 0.032159 0.000790 -0.039392
516 | v 0.039047 0.011129 -0.039124
517 | v 0.036480 0.015113 -0.035271
518 | v 0.035999 0.011491 -0.043539
519 | v 0.033159 0.015354 -0.039850
520 | v 0.027825 0.016016 -0.037631
521 | v 0.024755 0.013302 -0.036154
522 | v 0.023316 0.010136 -0.035852
523 | v 0.035418 0.007370 -0.045603
524 | v 0.033315 0.004154 -0.045367
525 | v 0.030880 0.001741 -0.041068
526 | v 0.032701 0.007340 -0.049280
527 | v 0.031768 0.012609 -0.049385
528 | v 0.015678 0.015697 -0.053434
529 | v 0.018932 0.015538 -0.048644
530 | v 0.019882 0.015724 -0.049462
531 | v 0.016938 0.016360 -0.054443
532 | v 0.024071 0.016617 -0.044181
533 | v 0.023758 0.016443 -0.046316
534 | v 0.027386 0.015882 -0.047639
535 | v 0.023781 0.015785 -0.053058
536 | v 0.027232 0.011202 -0.054784
537 | v 0.023912 0.011886 -0.058686
538 | v 0.020670 0.016296 -0.057532
539 | v 0.029751 0.013164 -0.050284
540 | v 0.016050 0.016158 -0.057557
541 | v 0.014148 0.016415 -0.057335
542 | v 0.029153 0.016106 -0.045657
543 | v 0.021507 0.014160 -0.040818
544 | v 0.016984 0.013849 -0.047198
545 | v 0.013421 0.013883 -0.052001
546 | v 0.012739 0.013155 -0.056314
547 | v 0.016265 0.010073 -0.046766
548 | v 0.013536 0.010684 -0.051552
549 | v 0.012881 0.010170 -0.055441
550 | v 0.016926 0.006882 -0.047212
551 | v 0.014696 0.007624 -0.051727
552 | v 0.022484 0.006468 -0.038606
553 | v 0.021171 0.010407 -0.039123
554 | v 0.013933 0.008203 -0.054819
555 | v 0.016913 0.014880 -0.060943
556 | v 0.015438 0.010907 -0.060661
557 | v 0.018105 0.015297 -0.059767
558 | v 0.028329 0.009701 -0.054724
559 | v 0.024143 0.009830 -0.059902
560 | v 0.028250 0.006940 -0.054391
561 | v 0.023905 0.007106 -0.059527
562 | v 0.020893 0.011670 -0.061957
563 | v 0.021038 0.012616 -0.061170
564 | v 0.024144 0.006193 -0.036063
565 | v 0.027091 0.002594 -0.038070
566 | v 0.025379 0.002981 -0.040441
567 | v 0.019364 0.003690 -0.049467
568 | v 0.029094 0.002023 -0.043323
569 | v 0.030655 0.004361 -0.048867
570 | v 0.026293 0.004565 -0.053412
571 | v 0.023114 0.003164 -0.052041
572 | v 0.022554 0.005111 -0.058118
573 | v 0.021052 0.004330 -0.055823
574 | v 0.016794 0.004666 -0.053526
575 | v 0.019225 0.006859 -0.059802
576 | v 0.020225 0.008957 -0.061670
577 | v 0.015053 0.008300 -0.058833
578 | v 0.018484 0.005486 -0.057798
579 | v 0.015383 0.006602 -0.056917
580 | v 0.037468 -0.015487 -0.018634
581 | v 0.062295 -0.012067 -0.007006
582 | v 0.065502 -0.012557 -0.009379
583 | v 0.064176 -0.015679 -0.012598
584 | v 0.060479 -0.014806 -0.011122
585 | v 0.060965 -0.020475 0.000147
586 | v 0.059785 -0.016376 -0.001435
587 | v 0.056974 -0.018904 -0.004434
588 | v 0.058087 -0.022891 -0.002515
589 | v 0.064820 -0.023380 -0.000064
590 | v 0.062338 -0.025906 -0.002909
591 | v 0.056725 -0.024849 -0.003728
592 | v 0.055094 -0.021170 -0.006564
593 | v 0.047624 -0.030023 -0.012474
594 | v 0.049381 -0.034825 -0.009316
595 | v 0.055360 -0.042209 -0.025065
596 | v 0.051692 -0.038602 -0.028102
597 | v 0.057032 -0.032772 -0.024025
598 | v 0.059304 -0.036596 -0.022075
599 | v 0.060388 -0.013524 -0.004073
600 | v 0.057813 -0.015586 -0.007833
601 | v 0.068227 -0.025781 -0.015145
602 | v 0.066052 -0.021807 -0.016534
603 | v 0.067456 -0.017823 -0.014033
604 | v 0.070311 -0.021361 -0.013203
605 | v 0.049033 -0.033968 -0.026929
606 | v 0.046409 -0.030864 -0.025240
607 | v 0.049472 -0.027400 -0.023492
608 | v 0.054161 -0.028956 -0.023548
609 | v 0.047965 -0.025234 -0.019201
610 | v 0.068442 -0.028428 -0.011891
611 | v 0.066010 -0.030234 -0.007259
612 | v 0.058320 -0.042562 -0.014440
613 | v 0.059051 -0.040735 -0.018919
614 | v 0.056031 -0.017823 -0.010216
615 | v 0.062774 -0.018821 -0.015586
616 | v 0.059326 -0.017304 -0.014177
617 | v 0.061339 -0.028462 -0.003416
618 | v 0.067596 -0.027030 -0.005978
619 | v 0.077581 -0.015209 -0.001755
620 | v 0.072482 -0.021837 -0.008146
621 | v 0.071977 -0.017541 -0.010477
622 | v 0.077162 -0.012591 -0.002868
623 | v 0.070410 -0.025082 -0.010402
624 | v 0.069901 -0.024230 -0.003487
625 | v 0.067759 -0.006409 0.000491
626 | v 0.071605 -0.007496 -0.002761
627 | v 0.066108 -0.007912 0.002700
628 | v 0.053753 -0.040176 -0.009014
629 | v 0.067333 -0.014262 0.006070
630 | v 0.065874 -0.010241 0.005204
631 | v 0.068910 -0.014318 -0.010585
632 | v 0.074918 -0.009931 -0.003504
633 | v 0.075942 -0.017603 0.001115
634 | v 0.071211 -0.017909 0.004921
635 | v 0.078693 -0.014941 0.004250
636 | v 0.073350 -0.015423 0.007430
637 | v 0.069377 -0.011259 0.008968
638 | v 0.067726 -0.008309 0.007335
639 | v 0.067791 -0.005951 0.005177
640 | v 0.080252 -0.012463 0.001256
641 | v 0.079420 -0.009902 -0.000287
642 | v 0.076735 -0.007471 -0.001515
643 | v 0.082430 -0.009709 0.004396
644 | v 0.081228 -0.011301 0.008832
645 | v 0.076944 -0.000695 0.016234
646 | v 0.074692 -0.003247 0.014142
647 | v 0.075459 -0.004195 0.014426
648 | v 0.077236 -0.001524 0.017007
649 | v 0.072804 -0.007750 0.012062
650 | v 0.074356 -0.007840 0.012644
651 | v 0.076564 -0.010356 0.011356
652 | v 0.078835 -0.006845 0.014482
653 | v 0.083593 -0.007098 0.012046
654 | v 0.084847 -0.004286 0.014758
655 | v 0.080630 -0.004540 0.016878
656 | v 0.080718 -0.010433 0.010727
657 | v 0.079154 -0.000673 0.018646
658 | v 0.079167 0.000659 0.019006
659 | v 0.075779 -0.011627 0.010686
660 | v 0.070030 -0.005463 0.009749
661 | v 0.073880 -0.001039 0.013457
662 | v 0.076964 0.001593 0.016321
663 | v 0.080159 0.002373 0.017946
664 | v 0.073557 0.000028 0.011057
665 | v 0.077174 0.003214 0.014629
666 | v 0.080387 0.003856 0.016077
667 | v 0.074637 0.001007 0.008062
668 | v 0.078088 0.003436 0.012098
669 | v 0.071038 -0.002996 0.003457
670 | v 0.069838 -0.003813 0.006824
671 | v 0.080396 0.003671 0.014237
672 | v 0.082741 -0.000666 0.019022
673 | v 0.083335 0.001755 0.017512
674 | v 0.082431 -0.002075 0.018800
675 | v 0.084409 -0.006925 0.010844
676 | v 0.085980 -0.003413 0.014022
677 | v 0.085029 -0.005610 0.008582
678 | v 0.086265 -0.001730 0.012637
679 | v 0.085577 -0.001383 0.016860
680 | v 0.084399 -0.002777 0.017027
681 | v 0.069732 -0.004343 0.002204
682 | v 0.072928 -0.005492 -0.000710
683 | v 0.074401 -0.003898 0.000894
684 | v 0.077588 0.000503 0.005563
685 | v 0.078327 -0.005757 0.000215
686 | v 0.081155 -0.007951 0.001891
687 | v 0.083976 -0.003653 0.006719
688 | v 0.081658 -0.001340 0.005426
689 | v 0.085517 0.000082 0.011502
690 | v 0.083371 0.001792 0.010045
691 | v 0.080455 0.003116 0.010066
692 | v 0.084670 0.001894 0.014615
693 | v 0.085499 0.000698 0.015849
694 | v 0.083026 0.002861 0.015732
695 | v 0.083557 0.002792 0.013212
696 | v 0.082136 0.003370 0.013552
697 | v 0.056973 -0.042972 -0.021276
698 | v -0.020897 -0.002894 -0.024196
699 | v -0.016125 -0.006170 -0.023648
700 | v -0.012408 -0.010952 -0.038978
701 | v -0.009839 -0.010504 -0.032227
702 | v -0.017602 -0.008498 -0.042804
703 | v -0.026306 -0.003279 -0.039824
704 | v -0.023057 -0.006614 -0.042448
705 | v -0.011494 -0.008944 -0.026822
706 | v -0.025292 -0.001145 -0.027329
707 | v -0.028064 -0.001111 -0.033916
708 | v -0.019837 0.008720 -0.028009
709 | v -0.021754 0.008557 -0.035923
710 | v -0.019035 0.006296 -0.042304
711 | v -0.015591 0.000520 -0.044140
712 | v -0.012066 -0.002956 -0.044014
713 | v -0.015893 0.003265 -0.024266
714 | v -0.011766 -0.000886 -0.023646
715 | v -0.008070 -0.005246 -0.027340
716 | v -0.009080 0.007112 -0.023891
717 | v -0.003346 0.012872 -0.029131
718 | v 0.008424 0.012785 -0.044392
719 | v 0.002473 0.009859 -0.044564
720 | v 0.002160 0.011977 -0.042672
721 | v 0.007391 0.014800 -0.043017
722 | v -0.002960 0.010240 -0.043210
723 | v -0.001808 0.011326 -0.041442
724 | v -0.003586 0.012540 -0.036335
725 | v 0.001505 0.014942 -0.036902
726 | v -0.001933 0.012872 -0.031504
727 | v 0.002053 0.013647 -0.030838
728 | v 0.006480 0.017132 -0.037148
729 | v 0.009886 0.017379 -0.041656
730 | v 0.010708 0.018665 -0.037444
731 | v 0.012211 0.015529 -0.042304
732 | v -0.005688 0.015236 -0.036390
733 | v -0.006632 0.004870 -0.045297
734 | v 0.002198 0.006340 -0.045442
735 | v 0.009412 0.009324 -0.044331
736 | v 0.013633 0.012795 -0.041650
737 | v 0.003185 0.002468 -0.044112
738 | v 0.009540 0.005576 -0.042222
739 | v 0.013200 0.008808 -0.039087
740 | v 0.004153 -0.000188 -0.040279
741 | v 0.009432 0.003741 -0.038828
742 | v -0.006300 -0.005063 -0.039610
743 | v -0.007294 -0.001174 -0.044211
744 | v 0.011965 0.006909 -0.037525
745 | v 0.013246 0.018376 -0.037096
746 | v 0.014706 0.015322 -0.036205
747 | v 0.001878 0.012338 -0.027790
748 | v 0.006439 0.016387 -0.030927
749 | v 0.007343 0.014565 -0.028882
750 | v 0.001549 0.009137 -0.025497
751 | v 0.008184 0.012154 -0.027509
752 | v 0.011299 0.017132 -0.031593
753 | v 0.009356 0.018096 -0.033187
754 | v -0.008945 -0.006876 -0.040088
755 | v -0.007696 -0.007742 -0.033589
756 | v -0.005746 -0.005623 -0.032785
757 | v 0.003807 -0.000984 -0.033817
758 | v -0.005350 -0.003065 -0.026626
759 | v -0.007215 0.002286 -0.023497
760 | v 0.002569 0.005493 -0.025178
761 | v 0.003227 0.001420 -0.027972
762 | v 0.008905 0.008720 -0.028119
763 | v 0.008999 0.006264 -0.029937
764 | v 0.009730 0.003924 -0.034351
765 | v 0.012388 0.011024 -0.031112
766 | v 0.012322 0.014350 -0.030550
767 | v 0.014033 0.011014 -0.035106
768 | v 0.011353 0.008510 -0.031760
769 | v 0.012242 0.007978 -0.034656
770 | v 0.047274 -0.043146 -0.031191
771 | v 0.043148 -0.038237 -0.030409
772 | v 0.039410 -0.044149 -0.033209
773 | v 0.033299 -0.052297 -0.035749
774 | v 0.032717 -0.063157 -0.036862
775 | v 0.027956 -0.048828 -0.033113
776 | v 0.034305 -0.040812 -0.030794
777 | v 0.039039 -0.032822 -0.026901
778 | v 0.018246 -0.052615 -0.031002
779 | v 0.006830 -0.122918 -0.017047
780 | v 0.000490 -0.120157 -0.024493
781 | v -0.045556 -0.099100 -0.015428
782 | v -0.045210 -0.100193 -0.005424
783 | v -0.004449 -0.118249 -0.028984
784 | v -0.026199 -0.109099 -0.031898
785 | v -0.034531 -0.106667 -0.028299
786 | v -0.017898 -0.112825 -0.032748
787 | v -0.009838 -0.116153 -0.031617
788 | v -0.039663 -0.101371 0.003432
789 | v -0.032130 -0.104391 0.008664
790 | v -0.040661 -0.102085 -0.023231
791 | v 0.008804 -0.121381 -0.001730
792 | v 0.000746 -0.119370 0.004764
793 | v -0.009909 -0.114302 0.008561
794 | v -0.021241 -0.109359 0.010233
795 | v 0.000947 -0.132126 -0.016954
796 | v -0.005124 -0.130108 -0.026454
797 | v -0.049077 -0.109549 -0.014754
798 | v -0.048635 -0.110201 -0.006125
799 | v -0.009658 -0.128279 -0.030158
800 | v -0.029885 -0.119087 -0.030852
801 | v -0.037520 -0.115288 -0.027303
802 | v -0.022195 -0.122521 -0.031249
803 | v -0.015258 -0.125926 -0.031773
804 | v -0.042696 -0.111614 0.003367
805 | v -0.036314 -0.114249 0.008794
806 | v -0.044657 -0.111504 -0.022122
807 | v 0.002626 -0.130117 -0.005383
808 | v -0.004515 -0.128410 0.006424
809 | v -0.014316 -0.123974 0.009786
810 | v -0.025858 -0.118961 0.011263
811 | v -0.004802 -0.143206 -0.016587
812 | v -0.010189 -0.141352 -0.025256
813 | v -0.052673 -0.120494 -0.014591
814 | v -0.052170 -0.120704 -0.005697
815 | v -0.014880 -0.139198 -0.028421
816 | v -0.034462 -0.130389 -0.029251
817 | v -0.042000 -0.126598 -0.026168
818 | v -0.027219 -0.133839 -0.029401
819 | v -0.020498 -0.137042 -0.029385
820 | v -0.046400 -0.123541 0.003323
821 | v -0.041248 -0.125745 0.009001
822 | v -0.049047 -0.122911 -0.020733
823 | v -0.001783 -0.142601 -0.006169
824 | v -0.008953 -0.140075 0.007424
825 | v -0.019543 -0.135278 0.010951
826 | v -0.031254 -0.130182 0.012482
827 | v -0.011958 -0.156932 -0.016123
828 | v -0.017722 -0.154797 -0.023589
829 | v -0.057104 -0.134342 -0.014643
830 | v -0.057317 -0.136026 -0.004712
831 | v -0.021934 -0.152746 -0.025683
832 | v -0.040557 -0.143199 -0.027130
833 | v -0.047279 -0.139986 -0.024612
834 | v -0.033488 -0.147838 -0.027044
835 | v -0.026943 -0.150867 -0.026532
836 | v -0.051316 -0.138233 0.003249
837 | v -0.047421 -0.139968 0.009269
838 | v -0.054238 -0.135479 -0.020300
839 | v -0.007987 -0.156793 -0.006566
840 | v -0.014764 -0.154438 0.008256
841 | v -0.026096 -0.149292 0.012067
842 | v -0.037943 -0.144103 0.013920
843 | f 2 3 1
844 | f 1 3 4
845 | f 5 6 8
846 | f 8 6 7
847 | f 9 4 10
848 | f 10 4 3
849 | f 12 13 11
850 | f 11 13 9
851 | f 14 12 16
852 | f 16 12 15
853 | f 18 19 17
854 | f 17 19 20
855 | f 22 23 21
856 | f 21 23 24
857 | f 26 27 25
858 | f 25 27 28
859 | f 30 31 29
860 | f 29 31 32
861 | f 33 34 36
862 | f 36 34 35
863 | f 37 38 40
864 | f 40 38 39
865 | f 41 42 6
866 | f 6 42 43
867 | f 6 43 7
868 | f 7 43 44
869 | f 45 34 46
870 | f 46 34 33
871 | f 48 49 47
872 | f 47 49 50
873 | f 52 53 51
874 | f 51 53 54
875 | f 56 52 55
876 | f 55 52 51
877 | f 58 59 57
878 | f 57 59 60
879 | f 61 62 4
880 | f 4 62 1
881 | f 64 65 63
882 | f 63 65 66
883 | f 68 69 67
884 | f 67 69 70
885 | f 72 73 71
886 | f 71 73 74
887 | f 76 77 75
888 | f 75 77 78
889 | f 53 79 54
890 | f 54 79 80
891 | f 82 83 81
892 | f 81 83 84
893 | f 85 86 52
894 | f 52 86 53
895 | f 57 60 87
896 | f 87 60 88
897 | f 89 90 43
898 | f 43 90 44
899 | f 43 42 89
900 | f 89 42 91
901 | f 40 39 92
902 | f 92 39 93
903 | f 94 70 64
904 | f 64 70 69
905 | f 13 61 9
906 | f 9 61 4
907 | f 25 28 69
908 | f 69 28 95
909 | f 96 25 68
910 | f 68 25 69
911 | f 98 99 97
912 | f 97 99 100
913 | f 101 102 104
914 | f 104 102 103
915 | f 106 30 105
916 | f 105 30 29
917 | f 51 54 107
918 | f 107 54 108
919 | f 54 80 108
920 | f 108 80 109
921 | f 111 112 110
922 | f 110 112 113
923 | f 115 5 114
924 | f 114 5 116
925 | f 115 41 5
926 | f 5 41 6
927 | f 36 35 111
928 | f 111 35 117
929 | f 35 118 117
930 | f 117 118 119
931 | f 34 120 35
932 | f 35 120 118
933 | f 34 45 120
934 | f 120 45 121
935 | f 125 126 124
936 | f 124 126 127
937 | f 128 129 14
938 | f 14 129 130
939 | f 33 131 46
940 | f 46 131 132
941 | f 133 94 63
942 | f 63 94 64
943 | f 134 135 137
944 | f 137 135 136
945 | f 139 140 138
946 | f 138 140 141
947 | f 77 142 78
948 | f 78 142 143
949 | f 144 145 20
950 | f 20 145 146
951 | f 148 149 147
952 | f 147 149 72
953 | f 151 152 150
954 | f 150 152 153
955 | f 82 101 83
956 | f 83 101 104
957 | f 101 82 155
958 | f 155 82 154
959 | f 156 157 58
960 | f 58 157 59
961 | f 158 68 147
962 | f 147 68 67
963 | f 159 146 15
964 | f 15 146 16
965 | f 68 158 96
966 | f 96 158 160
967 | f 129 66 130
968 | f 130 66 65
969 | f 162 163 161
970 | f 161 163 164
971 | f 166 167 165
972 | f 165 167 168
973 | f 170 171 169
974 | f 169 171 172
975 | f 174 171 173
976 | f 173 171 170
977 | f 176 177 175
978 | f 175 177 178
979 | f 180 181 179
980 | f 179 181 24
981 | f 182 183 21
982 | f 21 183 184
983 | f 81 185 82
984 | f 82 185 154
985 | f 186 187 188
986 | f 188 187 144
987 | f 158 71 160
988 | f 160 71 189
989 | f 190 135 157
990 | f 157 135 59
991 | f 169 172 139
992 | f 139 172 140
993 | f 147 72 158
994 | f 158 72 71
995 | f 192 193 191
996 | f 191 193 194
997 | f 167 166 49
998 | f 49 166 195
999 | f 196 190 50
1000 | f 50 190 157
1001 | f 197 198 200
1002 | f 200 198 199
1003 | f 21 24 182
1004 | f 182 24 181
1005 | f 146 159 20
1006 | f 20 159 17
1007 | f 182 181 81
1008 | f 81 181 185
1009 | f 201 185 180
1010 | f 180 185 181
1011 | f 202 203 83
1012 | f 83 203 84
1013 | f 205 206 204
1014 | f 204 206 85
1015 | f 207 22 184
1016 | f 184 22 21
1017 | f 179 24 208
1018 | f 208 24 23
1019 | f 208 209 179
1020 | f 179 209 205
1021 | f 209 210 205
1022 | f 205 210 206
1023 | f 212 206 211
1024 | f 211 206 210
1025 | f 179 205 180
1026 | f 180 205 204
1027 | f 213 176 88
1028 | f 88 176 214
1029 | f 86 85 212
1030 | f 212 85 206
1031 | f 193 217 218
1032 | f 217 219 218
1033 | f 218 219 220
1034 | f 219 18 220
1035 | f 220 18 17
1036 | f 19 221 20
1037 | f 20 221 144
1038 | f 222 223 225
1039 | f 225 223 224
1040 | f 175 226 176
1041 | f 176 226 214
1042 | f 227 87 214
1043 | f 214 87 88
1044 | f 220 17 228
1045 | f 228 17 159
1046 | f 134 213 60
1047 | f 60 213 88
1048 | f 138 229 151
1049 | f 151 229 152
1050 | f 193 192 210
1051 | f 210 192 211
1052 | f 230 194 218
1053 | f 218 194 193
1054 | f 231 232 228
1055 | f 228 232 230
1056 | f 233 234 91
1057 | f 91 234 89
1058 | f 235 236 93
1059 | f 93 236 92
1060 | f 230 232 234
1061 | f 234 232 237
1062 | f 47 238 48
1063 | f 48 238 239
1064 | f 240 191 235
1065 | f 235 191 236
1066 | f 5 8 116
1067 | f 116 8 241
1068 | f 242 243 189
1069 | f 189 243 160
1070 | f 244 245 98
1071 | f 98 245 99
1072 | f 156 246 47
1073 | f 47 246 238
1074 | f 76 247 77
1075 | f 77 247 248
1076 | f 250 125 249
1077 | f 249 125 124
1078 | f 252 253 251
1079 | f 251 253 254
1080 | f 111 110 36
1081 | f 36 110 255
1082 | f 256 33 255
1083 | f 255 33 36
1084 | f 257 258 154
1085 | f 154 258 155
1086 | f 63 169 133
1087 | f 133 169 139
1088 | f 66 170 63
1089 | f 63 170 169
1090 | f 173 170 129
1091 | f 129 170 66
1092 | f 129 128 173
1093 | f 173 128 259
1094 | f 128 260 259
1095 | f 259 260 261
1096 | f 213 134 262
1097 | f 262 134 137
1098 | f 263 264 186
1099 | f 186 264 187
1100 | f 112 265 113
1101 | f 113 265 266
1102 | f 11 267 268
1103 | f 234 233 230
1104 | f 230 233 194
1105 | f 153 269 70
1106 | f 70 269 67
1107 | f 175 178 165
1108 | f 165 178 141
1109 | f 150 153 94
1110 | f 94 153 70
1111 | f 271 221 270
1112 | f 270 221 207
1113 | f 269 153 272
1114 | f 272 153 152
1115 | f 121 45 109
1116 | f 109 45 108
1117 | f 47 50 156
1118 | f 156 50 157
1119 | f 57 273 58
1120 | f 58 273 274
1121 | f 145 275 260
1122 | f 260 275 261
1123 | f 276 149 143
1124 | f 143 149 78
1125 | f 148 269 75
1126 | f 75 269 272
1127 | f 207 277 270
1128 | f 270 277 278
1129 | f 195 196 49
1130 | f 49 196 50
1131 | f 59 135 60
1132 | f 60 135 134
1133 | f 83 104 202
1134 | f 202 104 279
1135 | f 149 148 78
1136 | f 78 148 75
1137 | f 247 278 248
1138 | f 248 278 277
1139 | f 183 182 84
1140 | f 84 182 81
1141 | f 48 239 281
1142 | f 281 239 282
1143 | f 227 223 283
1144 | f 283 223 222
1145 | f 223 226 224
1146 | f 224 226 168
1147 | f 281 282 224
1148 | f 224 282 225
1149 | f 165 168 175
1150 | f 175 168 226
1151 | f 264 263 138
1152 | f 138 263 229
1153 | f 14 16 128
1154 | f 128 16 260
1155 | f 220 228 218
1156 | f 218 228 230
1157 | f 87 227 284
1158 | f 284 227 283
1159 | f 260 16 145
1160 | f 145 16 146
1161 | f 244 98 258
1162 | f 258 98 155
1163 | f 58 274 156
1164 | f 156 274 246
1165 | f 107 285 51
1166 | f 51 285 55
1167 | f 243 286 160
1168 | f 160 286 96
1169 | f 234 237 89
1170 | f 89 237 90
1171 | f 46 132 107
1172 | f 107 132 285
1173 | f 253 252 249
1174 | f 249 252 250
1175 | f 286 26 96
1176 | f 96 26 25
1177 | f 106 105 287
1178 | f 287 105 288
1179 | f 272 152 289
1180 | f 289 152 229
1181 | f 138 151 139
1182 | f 139 151 133
1183 | f 183 290 161
1184 | f 161 290 162
1185 | f 291 292 277
1186 | f 277 292 293
1187 | f 151 150 133
1188 | f 133 150 94
1189 | f 262 275 177
1190 | f 177 275 187
1191 | f 144 187 145
1192 | f 145 187 275
1193 | f 273 57 284
1194 | f 284 57 87
1195 | f 174 136 190
1196 | f 190 136 135
1197 | f 111 117 112
1198 | f 112 117 38
1199 | f 117 119 38
1200 | f 38 119 123
1201 | f 112 38 265
1202 | f 265 38 37
1203 | f 287 288 254
1204 | f 254 288 251
1205 | f 228 159 231
1206 | f 231 159 15
1207 | f 223 227 226
1208 | f 226 227 214
1209 | f 147 67 148
1210 | f 148 67 269
1211 | f 224 168 281
1212 | f 281 168 167
1213 | f 199 163 200
1214 | f 200 163 162
1215 | f 45 46 108
1216 | f 108 46 107
1217 | f 281 167 48
1218 | f 48 167 49
1219 | f 161 164 291
1220 | f 291 164 292
1221 | f 38 123 39
1222 | f 293 294 277
1223 | f 277 294 248
1224 | f 284 283 296
1225 | f 296 283 295
1226 | f 284 296 273
1227 | f 273 296 297
1228 | f 274 273 298
1229 | f 298 273 297
1230 | f 246 274 299
1231 | f 299 274 298
1232 | f 283 222 295
1233 | f 295 222 300
1234 | f 222 225 300
1235 | f 300 225 301
1236 | f 301 225 302
1237 | f 302 225 282
1238 | f 303 304 300
1239 | f 300 304 295
1240 | f 306 307 305
1241 | f 305 307 308
1242 | f 309 310 308
1243 | f 308 310 305
1244 | f 308 307 312
1245 | f 312 307 311
1246 | f 313 314 312
1247 | f 312 314 315
1248 | f 311 316 312
1249 | f 312 316 313
1250 | f 317 304 311
1251 | f 311 304 316
1252 | f 315 309 312
1253 | f 312 309 308
1254 | f 318 319 321
1255 | f 321 319 320
1256 | f 319 318 309
1257 | f 309 318 310
1258 | f 311 307 317
1259 | f 317 307 306
1260 | f 306 297 317
1261 | f 317 297 296
1262 | f 306 322 297
1263 | f 297 322 298
1264 | f 305 323 306
1265 | f 306 323 322
1266 | f 323 305 324
1267 | f 324 305 310
1268 | f 304 317 295
1269 | f 295 317 296
1270 | f 318 325 310
1271 | f 310 325 324
1272 | f 323 324 326
1273 | f 326 324 327
1274 | f 324 325 327
1275 | f 327 325 328
1276 | f 326 327 329
1277 | f 329 327 330
1278 | f 329 331 326
1279 | f 326 331 332
1280 | f 333 330 328
1281 | f 328 330 327
1282 | f 323 326 322
1283 | f 322 326 332
1284 | f 322 332 298
1285 | f 298 332 299
1286 | f 325 318 334
1287 | f 334 318 321
1288 | f 313 316 335
1289 | f 335 316 304
1290 | f 335 336 313
1291 | f 313 336 314
1292 | f 304 303 335
1293 | f 335 303 337
1294 | f 336 335 338
1295 | f 338 335 337
1296 | f 336 339 314
1297 | f 314 339 340
1298 | f 246 299 238
1299 | f 238 299 341
1300 | f 239 342 282
1301 | f 282 342 302
1302 | f 342 239 341
1303 | f 341 239 238
1304 | f 341 331 342
1305 | f 342 331 343
1306 | f 331 329 343
1307 | f 343 329 344
1308 | f 342 343 302
1309 | f 302 343 345
1310 | f 341 299 331
1311 | f 331 299 332
1312 | f 346 301 345
1313 | f 345 301 302
1314 | f 346 303 301
1315 | f 301 303 300
1316 | f 347 337 346
1317 | f 346 337 303
1318 | f 347 346 348
1319 | f 348 346 345
1320 | f 349 347 350
1321 | f 350 347 348
1322 | f 343 344 345
1323 | f 345 344 348
1324 | f 344 351 348
1325 | f 348 351 350
1326 | f 337 347 338
1327 | f 338 347 349
1328 | f 353 338 352
1329 | f 352 338 349
1330 | f 352 355 354
1331 | f 354 355 356
1332 | f 352 349 355
1333 | f 355 349 350
1334 | f 352 354 353
1335 | f 353 354 334
1336 | f 353 334 339
1337 | f 339 334 321
1338 | f 339 336 353
1339 | f 353 336 338
1340 | f 329 330 344
1341 | f 344 330 351
1342 | f 356 351 333
1343 | f 333 351 330
1344 | f 356 355 351
1345 | f 351 355 350
1346 | f 354 356 328
1347 | f 328 356 333
1348 | f 328 325 354
1349 | f 354 325 334
1350 | f 309 315 319
1351 | f 319 315 320
1352 | f 320 340 321
1353 | f 321 340 339
1354 | f 320 315 340
1355 | f 340 315 314
1356 | f 137 136 261
1357 | f 261 136 259
1358 | f 140 166 141
1359 | f 141 166 165
1360 | f 172 171 195
1361 | f 195 171 196
1362 | f 171 174 196
1363 | f 196 174 190
1364 | f 178 177 264
1365 | f 264 177 187
1366 | f 172 195 140
1367 | f 140 195 166
1368 | f 137 261 262
1369 | f 262 261 275
1370 | f 178 264 141
1371 | f 141 264 138
1372 | f 262 177 213
1373 | f 213 177 176
1374 | f 136 174 259
1375 | f 259 174 173
1376 | f 358 359 357
1377 | f 357 359 360
1378 | f 362 363 361
1379 | f 361 363 364
1380 | f 361 364 365
1381 | f 365 364 366
1382 | f 367 368 370
1383 | f 370 368 369
1384 | f 76 371 247
1385 | f 247 371 372
1386 | f 373 374 362
1387 | f 362 374 363
1388 | f 376 377 375
1389 | f 375 377 378
1390 | f 289 229 380
1391 | f 380 229 379
1392 | f 381 379 263
1393 | f 263 379 229
1394 | f 383 384 382
1395 | f 382 384 385
1396 | f 386 368 374
1397 | f 374 368 363
1398 | f 76 289 371
1399 | f 371 289 380
1400 | f 377 376 359
1401 | f 359 376 387
1402 | f 388 386 360
1403 | f 360 386 374
1404 | f 389 383 366
1405 | f 366 383 390
1406 | f 392 393 391
1407 | f 391 393 394
1408 | f 383 382 390
1409 | f 390 382 395
1410 | f 396 365 390
1411 | f 390 365 366
1412 | f 389 366 367
1413 | f 367 366 364
1414 | f 357 397 358
1415 | f 358 397 398
1416 | f 373 399 357
1417 | f 357 399 397
1418 | f 389 367 400
1419 | f 400 367 370
1420 | f 382 385 375
1421 | f 375 385 372
1422 | f 357 360 373
1423 | f 373 360 374
1424 | f 361 401 362
1425 | f 362 401 402
1426 | f 387 388 359
1427 | f 359 388 360
1428 | f 363 368 364
1429 | f 364 368 367
1430 | f 358 398 403
1431 | f 403 398 404
1432 | f 396 392 405
1433 | f 405 392 391
1434 | f 392 395 393
1435 | f 393 395 378
1436 | f 393 403 394
1437 | f 394 403 404
1438 | f 375 378 382
1439 | f 382 378 395
1440 | f 365 396 406
1441 | f 406 396 405
1442 | f 362 402 373
1443 | f 373 402 399
1444 | f 271 270 400
1445 | f 400 270 384
1446 | f 401 361 406
1447 | f 406 361 365
1448 | f 381 369 386
1449 | f 386 369 368
1450 | f 392 396 395
1451 | f 395 396 390
1452 | f 393 378 403
1453 | f 403 378 377
1454 | f 403 377 358
1455 | f 358 377 359
1456 | f 406 405 408
1457 | f 408 405 407
1458 | f 406 408 401
1459 | f 401 408 409
1460 | f 401 409 402
1461 | f 402 409 410
1462 | f 402 410 399
1463 | f 399 410 411
1464 | f 405 391 407
1465 | f 407 391 412
1466 | f 391 394 412
1467 | f 412 394 413
1468 | f 394 404 413
1469 | f 413 404 414
1470 | f 415 416 412
1471 | f 412 416 407
1472 | f 418 419 417
1473 | f 417 419 420
1474 | f 421 422 418
1475 | f 418 422 419
1476 | f 419 422 424
1477 | f 424 422 423
1478 | f 425 426 424
1479 | f 424 426 427
1480 | f 423 428 424
1481 | f 424 428 425
1482 | f 427 420 424
1483 | f 424 420 419
1484 | f 420 429 417
1485 | f 417 429 430
1486 | f 423 422 431
1487 | f 431 422 421
1488 | f 421 409 431
1489 | f 431 409 408
1490 | f 421 432 409
1491 | f 409 432 410
1492 | f 418 433 421
1493 | f 421 433 432
1494 | f 434 433 417
1495 | f 417 433 418
1496 | f 431 416 423
1497 | f 423 416 428
1498 | f 416 431 407
1499 | f 407 431 408
1500 | f 435 434 430
1501 | f 430 434 417
1502 | f 433 434 436
1503 | f 436 434 437
1504 | f 434 435 437
1505 | f 437 435 438
1506 | f 436 437 439
1507 | f 439 437 440
1508 | f 439 441 436
1509 | f 436 441 442
1510 | f 438 443 437
1511 | f 437 443 440
1512 | f 436 442 433
1513 | f 433 442 432
1514 | f 432 442 410
1515 | f 410 442 411
1516 | f 435 430 445
1517 | f 445 430 444
1518 | f 430 429 444
1519 | f 444 429 446
1520 | f 425 428 447
1521 | f 447 428 416
1522 | f 426 425 448
1523 | f 448 425 447
1524 | f 416 415 447
1525 | f 447 415 449
1526 | f 447 449 448
1527 | f 448 449 450
1528 | f 451 452 448
1529 | f 448 452 426
1530 | f 446 452 444
1531 | f 444 452 451
1532 | f 399 411 397
1533 | f 397 411 453
1534 | f 398 454 404
1535 | f 404 454 414
1536 | f 454 398 453
1537 | f 453 398 397
1538 | f 453 441 454
1539 | f 454 441 455
1540 | f 441 439 455
1541 | f 455 439 456
1542 | f 454 455 414
1543 | f 414 455 457
1544 | f 441 453 442
1545 | f 442 453 411
1546 | f 458 413 457
1547 | f 457 413 414
1548 | f 458 415 413
1549 | f 413 415 412
1550 | f 458 459 415
1551 | f 415 459 449
1552 | f 459 458 460
1553 | f 460 458 457
1554 | f 461 459 462
1555 | f 462 459 460
1556 | f 455 456 457
1557 | f 457 456 460
1558 | f 456 463 460
1559 | f 460 463 462
1560 | f 459 461 449
1561 | f 449 461 450
1562 | f 465 450 464
1563 | f 464 450 461
1564 | f 464 467 466
1565 | f 466 467 468
1566 | f 464 461 467
1567 | f 467 461 462
1568 | f 464 466 465
1569 | f 465 466 445
1570 | f 465 445 451
1571 | f 451 445 444
1572 | f 451 448 465
1573 | f 465 448 450
1574 | f 439 440 456
1575 | f 456 440 463
1576 | f 468 463 443
1577 | f 443 463 440
1578 | f 468 467 463
1579 | f 463 467 462
1580 | f 466 468 438
1581 | f 438 468 443
1582 | f 438 435 466
1583 | f 466 435 445
1584 | f 420 427 429
1585 | f 429 427 446
1586 | f 446 427 452
1587 | f 452 427 426
1588 | f 370 369 188
1589 | f 188 369 186
1590 | f 371 376 372
1591 | f 372 376 375
1592 | f 380 379 387
1593 | f 387 379 388
1594 | f 379 381 388
1595 | f 388 381 386
1596 | f 385 384 278
1597 | f 278 384 270
1598 | f 380 387 371
1599 | f 371 387 376
1600 | f 400 370 271
1601 | f 271 370 188
1602 | f 385 278 372
1603 | f 372 278 247
1604 | f 400 384 389
1605 | f 389 384 383
1606 | f 369 381 186
1607 | f 186 381 263
1608 | f 221 271 144
1609 | f 144 271 188
1610 | f 221 19 207
1611 | f 207 19 22
1612 | f 19 18 22
1613 | f 22 18 23
1614 | f 18 219 23
1615 | f 23 219 208
1616 | f 469 470 472
1617 | f 472 470 471
1618 | f 474 475 473
1619 | f 473 475 476
1620 | f 473 476 477
1621 | f 477 476 478
1622 | f 480 481 479
1623 | f 479 481 482
1624 | f 483 484 474
1625 | f 474 484 475
1626 | f 486 487 485
1627 | f 485 487 488
1628 | f 489 490 77
1629 | f 77 490 142
1630 | f 491 489 248
1631 | f 248 489 77
1632 | f 493 494 492
1633 | f 492 494 495
1634 | f 496 480 484
1635 | f 484 480 475
1636 | f 486 497 487
1637 | f 487 497 471
1638 | f 498 496 472
1639 | f 472 496 484
1640 | f 499 493 478
1641 | f 478 493 500
1642 | f 501 502 504
1643 | f 504 502 503
1644 | f 493 492 500
1645 | f 500 492 505
1646 | f 506 477 500
1647 | f 500 477 478
1648 | f 499 478 479
1649 | f 479 478 476
1650 | f 469 507 470
1651 | f 470 507 508
1652 | f 483 509 469
1653 | f 469 509 507
1654 | f 479 482 499
1655 | f 499 482 510
1656 | f 492 495 485
1657 | f 485 495 511
1658 | f 483 469 484
1659 | f 484 469 472
1660 | f 473 512 474
1661 | f 474 512 513
1662 | f 497 498 471
1663 | f 471 498 472
1664 | f 475 480 476
1665 | f 476 480 479
1666 | f 470 508 514
1667 | f 514 508 515
1668 | f 506 502 516
1669 | f 516 502 501
1670 | f 503 502 488
1671 | f 488 502 505
1672 | f 514 515 503
1673 | f 503 515 504
1674 | f 485 488 492
1675 | f 492 488 505
1676 | f 477 506 517
1677 | f 517 506 516
1678 | f 474 513 483
1679 | f 483 513 509
1680 | f 510 292 494
1681 | f 494 292 164
1682 | f 512 473 517
1683 | f 517 473 477
1684 | f 491 481 496
1685 | f 496 481 480
1686 | f 502 506 505
1687 | f 505 506 500
1688 | f 503 488 514
1689 | f 514 488 487
1690 | f 514 487 470
1691 | f 470 487 471
1692 | f 517 516 519
1693 | f 519 516 518
1694 | f 517 519 512
1695 | f 512 519 520
1696 | f 512 520 513
1697 | f 513 520 521
1698 | f 513 521 509
1699 | f 509 521 522
1700 | f 516 501 518
1701 | f 518 501 523
1702 | f 501 504 523
1703 | f 523 504 524
1704 | f 524 504 525
1705 | f 525 504 515
1706 | f 526 527 523
1707 | f 523 527 518
1708 | f 529 530 528
1709 | f 528 530 531
1710 | f 532 533 529
1711 | f 529 533 530
1712 | f 530 533 535
1713 | f 535 533 534
1714 | f 536 537 535
1715 | f 535 537 538
1716 | f 534 539 535
1717 | f 535 539 536
1718 | f 538 531 535
1719 | f 535 531 530
1720 | f 540 541 531
1721 | f 531 541 528
1722 | f 533 532 534
1723 | f 534 532 542
1724 | f 532 520 542
1725 | f 542 520 519
1726 | f 532 543 520
1727 | f 520 543 521
1728 | f 529 544 532
1729 | f 532 544 543
1730 | f 544 529 545
1731 | f 545 529 528
1732 | f 534 542 539
1733 | f 539 542 527
1734 | f 527 542 518
1735 | f 518 542 519
1736 | f 541 546 528
1737 | f 528 546 545
1738 | f 547 544 548
1739 | f 548 544 545
1740 | f 546 549 545
1741 | f 545 549 548
1742 | f 550 547 551
1743 | f 551 547 548
1744 | f 547 550 553
1745 | f 553 550 552
1746 | f 549 554 548
1747 | f 548 554 551
1748 | f 544 547 543
1749 | f 543 547 553
1750 | f 543 553 521
1751 | f 521 553 522
1752 | f 546 541 556
1753 | f 556 541 555
1754 | f 541 540 555
1755 | f 555 540 557
1756 | f 536 539 558
1757 | f 558 539 527
1758 | f 537 536 559
1759 | f 559 536 558
1760 | f 527 526 558
1761 | f 558 526 560
1762 | f 559 558 561
1763 | f 561 558 560
1764 | f 559 562 537
1765 | f 537 562 563
1766 | f 557 563 555
1767 | f 555 563 562
1768 | f 509 522 507
1769 | f 507 522 564
1770 | f 508 565 515
1771 | f 515 565 525
1772 | f 565 508 564
1773 | f 564 508 507
1774 | f 564 552 565
1775 | f 565 552 566
1776 | f 552 550 566
1777 | f 566 550 567
1778 | f 565 566 525
1779 | f 525 566 568
1780 | f 564 522 552
1781 | f 552 522 553
1782 | f 569 524 568
1783 | f 568 524 525
1784 | f 569 526 524
1785 | f 524 526 523
1786 | f 570 560 569
1787 | f 569 560 526
1788 | f 570 569 571
1789 | f 571 569 568
1790 | f 572 570 573
1791 | f 573 570 571
1792 | f 566 567 568
1793 | f 568 567 571
1794 | f 567 574 571
1795 | f 571 574 573
1796 | f 570 572 560
1797 | f 560 572 561
1798 | f 576 561 575
1799 | f 575 561 572
1800 | f 575 578 577
1801 | f 577 578 579
1802 | f 575 572 578
1803 | f 578 572 573
1804 | f 575 577 576
1805 | f 576 577 556
1806 | f 576 556 562
1807 | f 562 556 555
1808 | f 562 559 576
1809 | f 576 559 561
1810 | f 550 551 567
1811 | f 567 551 574
1812 | f 579 574 554
1813 | f 554 574 551
1814 | f 579 578 574
1815 | f 574 578 573
1816 | f 579 554 577
1817 | f 577 554 549
1818 | f 549 546 577
1819 | f 577 546 556
1820 | f 531 538 540
1821 | f 540 538 557
1822 | f 557 538 563
1823 | f 563 538 537
1824 | f 481 294 482
1825 | f 482 294 293
1826 | f 580 486 511
1827 | f 511 486 485
1828 | f 490 489 497
1829 | f 497 489 498
1830 | f 489 491 498
1831 | f 498 491 496
1832 | f 163 495 164
1833 | f 164 495 494
1834 | f 580 490 486
1835 | f 486 490 497
1836 | f 482 293 510
1837 | f 510 293 292
1838 | f 495 163 511
1839 | f 511 163 199
1840 | f 494 493 510
1841 | f 510 493 499
1842 | f 481 491 294
1843 | f 294 491 248
1844 | f 582 583 581
1845 | f 581 583 584
1846 | f 586 587 585
1847 | f 585 587 588
1848 | f 585 588 589
1849 | f 589 588 590
1850 | f 592 593 591
1851 | f 591 593 594
1852 | f 595 596 598
1853 | f 598 596 597
1854 | f 599 600 586
1855 | f 586 600 587
1856 | f 601 602 604
1857 | f 604 602 603
1858 | f 605 606 608
1859 | f 608 606 607
1860 | f 609 607 200
1861 | f 200 607 606
1862 | f 610 611 613
1863 | f 613 611 612
1864 | f 600 614 587
1865 | f 587 614 592
1866 | f 596 605 597
1867 | f 597 605 608
1868 | f 602 615 603
1869 | f 603 615 583
1870 | f 616 614 584
1871 | f 584 614 600
1872 | f 611 618 617
1873 | f 617 618 590
1874 | f 620 621 619
1875 | f 619 621 622
1876 | f 610 623 611
1877 | f 611 623 618
1878 | f 624 589 618
1879 | f 618 589 590
1880 | f 617 590 591
1881 | f 591 590 588
1882 | f 581 625 582
1883 | f 582 625 626
1884 | f 599 627 581
1885 | f 581 627 625
1886 | f 591 594 617
1887 | f 617 594 628
1888 | f 601 610 598
1889 | f 598 610 613
1890 | f 581 584 599
1891 | f 599 584 600
1892 | f 585 629 586
1893 | f 586 629 630
1894 | f 615 616 583
1895 | f 583 616 584
1896 | f 587 592 588
1897 | f 588 592 591
1898 | f 582 626 631
1899 | f 631 626 632
1900 | f 624 620 633
1901 | f 633 620 619
1902 | f 621 620 604
1903 | f 604 620 623
1904 | f 631 632 621
1905 | f 621 632 622
1906 | f 610 601 623
1907 | f 623 601 604
1908 | f 589 624 634
1909 | f 634 624 633
1910 | f 586 630 599
1911 | f 599 630 627
1912 | f 203 202 628
1913 | f 628 202 612
1914 | f 629 585 634
1915 | f 634 585 589
1916 | f 609 593 614
1917 | f 614 593 592
1918 | f 620 624 623
1919 | f 623 624 618
1920 | f 631 621 603
1921 | f 603 621 604
1922 | f 582 631 583
1923 | f 583 631 603
1924 | f 634 633 636
1925 | f 636 633 635
1926 | f 634 636 629
1927 | f 629 636 637
1928 | f 629 637 630
1929 | f 630 637 638
1930 | f 630 638 627
1931 | f 627 638 639
1932 | f 633 619 635
1933 | f 635 619 640
1934 | f 619 622 640
1935 | f 640 622 641
1936 | f 641 622 642
1937 | f 642 622 632
1938 | f 644 635 643
1939 | f 643 635 640
1940 | f 645 646 648
1941 | f 648 646 647
1942 | f 649 650 646
1943 | f 646 650 647
1944 | f 647 650 652
1945 | f 652 650 651
1946 | f 652 653 655
1947 | f 655 653 654
1948 | f 651 656 652
1949 | f 652 656 653
1950 | f 655 648 652
1951 | f 652 648 647
1952 | f 648 657 645
1953 | f 645 657 658
1954 | f 650 649 651
1955 | f 651 649 659
1956 | f 649 637 659
1957 | f 659 637 636
1958 | f 649 660 637
1959 | f 637 660 638
1960 | f 661 660 646
1961 | f 646 660 649
1962 | f 662 661 645
1963 | f 645 661 646
1964 | f 651 659 656
1965 | f 656 659 644
1966 | f 644 659 635
1967 | f 635 659 636
1968 | f 663 662 658
1969 | f 658 662 645
1970 | f 664 661 665
1971 | f 665 661 662
1972 | f 663 666 662
1973 | f 662 666 665
1974 | f 667 664 668
1975 | f 668 664 665
1976 | f 664 667 670
1977 | f 670 667 669
1978 | f 666 671 665
1979 | f 665 671 668
1980 | f 661 664 660
1981 | f 660 664 670
1982 | f 660 670 638
1983 | f 638 670 639
1984 | f 658 672 663
1985 | f 663 672 673
1986 | f 658 657 672
1987 | f 672 657 674
1988 | f 653 656 675
1989 | f 675 656 644
1990 | f 653 675 654
1991 | f 654 675 676
1992 | f 675 644 677
1993 | f 677 644 643
1994 | f 676 675 678
1995 | f 678 675 677
1996 | f 679 680 676
1997 | f 676 680 654
1998 | f 674 680 672
1999 | f 672 680 679
2000 | f 627 639 625
2001 | f 625 639 681
2002 | f 626 682 632
2003 | f 632 682 642
2004 | f 681 682 625
2005 | f 625 682 626
2006 | f 669 683 681
2007 | f 681 683 682
2008 | f 667 684 669
2009 | f 669 684 683
2010 | f 682 683 642
2011 | f 642 683 685
2012 | f 681 639 669
2013 | f 669 639 670
2014 | f 686 641 685
2015 | f 685 641 642
2016 | f 641 686 640
2017 | f 640 686 643
2018 | f 686 687 643
2019 | f 643 687 677
2020 | f 687 686 688
2021 | f 688 686 685
2022 | f 689 687 690
2023 | f 690 687 688
2024 | f 683 684 685
2025 | f 685 684 688
2026 | f 684 691 688
2027 | f 688 691 690
2028 | f 687 689 677
2029 | f 677 689 678
2030 | f 693 678 692
2031 | f 692 678 689
2032 | f 692 695 694
2033 | f 694 695 696
2034 | f 692 689 695
2035 | f 695 689 690
2036 | f 692 694 693
2037 | f 693 694 673
2038 | f 693 673 679
2039 | f 679 673 672
2040 | f 679 676 693
2041 | f 693 676 678
2042 | f 684 667 691
2043 | f 691 667 668
2044 | f 696 691 671
2045 | f 671 691 668
2046 | f 696 695 691
2047 | f 691 695 690
2048 | f 694 696 666
2049 | f 666 696 671
2050 | f 666 663 694
2051 | f 694 663 673
2052 | f 648 655 657
2053 | f 657 655 674
2054 | f 674 655 680
2055 | f 680 655 654
2056 | f 593 162 594
2057 | f 594 162 290
2058 | f 598 597 601
2059 | f 601 597 602
2060 | f 615 608 616
2061 | f 616 608 607
2062 | f 607 609 616
2063 | f 616 609 614
2064 | f 697 595 613
2065 | f 613 595 598
2066 | f 597 608 602
2067 | f 602 608 615
2068 | f 594 290 628
2069 | f 628 290 203
2070 | f 628 612 617
2071 | f 617 612 611
2072 | f 593 609 162
2073 | f 162 609 200
2074 | f 105 29 698
2075 | f 698 29 699
2076 | f 268 126 701
2077 | f 701 126 700
2078 | f 126 125 700
2079 | f 700 125 702
2080 | f 252 703 250
2081 | f 250 703 704
2082 | f 32 268 705
2083 | f 705 268 701
2084 | f 288 105 706
2085 | f 706 105 698
2086 | f 29 32 699
2087 | f 699 32 705
2088 | f 707 251 706
2089 | f 706 251 288
2090 | f 250 704 125
2091 | f 125 704 702
2092 | f 703 252 707
2093 | f 707 252 251
2094 | f 706 708 707
2095 | f 707 708 709
2096 | f 707 709 703
2097 | f 703 709 710
2098 | f 703 710 704
2099 | f 704 710 711
2100 | f 704 711 702
2101 | f 702 711 712
2102 | f 706 698 708
2103 | f 708 698 713
2104 | f 698 699 713
2105 | f 713 699 714
2106 | f 699 705 714
2107 | f 714 705 715
2108 | f 717 708 716
2109 | f 716 708 713
2110 | f 719 720 718
2111 | f 718 720 721
2112 | f 722 723 719
2113 | f 719 723 720
2114 | f 724 725 723
2115 | f 723 725 720
2116 | f 724 726 725
2117 | f 725 726 727
2118 | f 728 721 725
2119 | f 725 721 720
2120 | f 721 728 729
2121 | f 729 728 730
2122 | f 729 731 721
2123 | f 721 731 718
2124 | f 724 723 732
2125 | f 732 723 722
2126 | f 722 710 732
2127 | f 732 710 709
2128 | f 722 733 710
2129 | f 710 733 711
2130 | f 719 734 722
2131 | f 722 734 733
2132 | f 734 719 735
2133 | f 735 719 718
2134 | f 724 732 726
2135 | f 726 732 717
2136 | f 717 732 708
2137 | f 708 732 709
2138 | f 731 736 718
2139 | f 718 736 735
2140 | f 737 734 738
2141 | f 738 734 735
2142 | f 736 739 735
2143 | f 735 739 738
2144 | f 740 737 741
2145 | f 741 737 738
2146 | f 737 740 743
2147 | f 743 740 742
2148 | f 739 744 738
2149 | f 738 744 741
2150 | f 734 737 733
2151 | f 733 737 743
2152 | f 733 743 711
2153 | f 711 743 712
2154 | f 736 731 746
2155 | f 746 731 745
2156 | f 731 729 745
2157 | f 745 729 730
2158 | f 727 726 747
2159 | f 747 726 717
2160 | f 748 727 749
2161 | f 749 727 747
2162 | f 747 717 750
2163 | f 750 717 716
2164 | f 749 747 751
2165 | f 751 747 750
2166 | f 749 752 748
2167 | f 748 752 753
2168 | f 730 753 745
2169 | f 745 753 752
2170 | f 728 725 748
2171 | f 748 725 727
2172 | f 730 728 753
2173 | f 753 728 748
2174 | f 700 702 754
2175 | f 754 702 712
2176 | f 705 701 715
2177 | f 715 701 755
2178 | f 754 755 700
2179 | f 700 755 701
2180 | f 742 756 754
2181 | f 754 756 755
2182 | f 740 757 742
2183 | f 742 757 756
2184 | f 755 756 715
2185 | f 715 756 758
2186 | f 754 712 742
2187 | f 742 712 743
2188 | f 759 714 758
2189 | f 758 714 715
2190 | f 714 759 713
2191 | f 713 759 716
2192 | f 759 760 716
2193 | f 716 760 750
2194 | f 760 759 761
2195 | f 761 759 758
2196 | f 762 760 763
2197 | f 763 760 761
2198 | f 756 757 758
2199 | f 758 757 761
2200 | f 757 764 761
2201 | f 761 764 763
2202 | f 760 762 750
2203 | f 750 762 751
2204 | f 766 751 765
2205 | f 765 751 762
2206 | f 767 765 769
2207 | f 769 765 768
2208 | f 765 762 768
2209 | f 768 762 763
2210 | f 765 767 766
2211 | f 766 767 746
2212 | f 766 746 752
2213 | f 752 746 745
2214 | f 752 749 766
2215 | f 766 749 751
2216 | f 757 740 764
2217 | f 764 740 741
2218 | f 769 764 744
2219 | f 744 764 741
2220 | f 769 768 764
2221 | f 764 768 763
2222 | f 767 769 739
2223 | f 739 769 744
2224 | f 739 736 767
2225 | f 767 736 746
2226 | f 219 217 208
2227 | f 208 217 209
2228 | f 209 217 210
2229 | f 210 217 193
2230 | f 198 580 199
2231 | f 199 580 511
2232 | f 76 75 289
2233 | f 289 75 272
2234 | f 277 207 291
2235 | f 291 207 184
2236 | f 290 183 203
2237 | f 203 183 84
2238 | f 490 580 142
2239 | f 142 580 198
2240 | f 142 198 143
2241 | f 143 198 197
2242 | f 149 276 72
2243 | f 72 276 73
2244 | f 104 103 279
2245 | f 279 103 770
2246 | f 770 596 279
2247 | f 279 596 595
2248 | f 605 596 771
2249 | f 771 596 770
2250 | f 771 770 772
2251 | f 772 770 103
2252 | f 772 103 773
2253 | f 773 103 102
2254 | f 155 774 101
2255 | f 101 774 102
2256 | f 773 102 97
2257 | f 97 102 774
2258 | f 775 773 100
2259 | f 100 773 97
2260 | f 773 775 772
2261 | f 772 775 776
2262 | f 772 776 771
2263 | f 771 776 777
2264 | f 200 606 197
2265 | f 197 606 777
2266 | f 189 71 778
2267 | f 778 71 74
2268 | f 97 774 98
2269 | f 98 774 155
2270 | f 183 161 184
2271 | f 184 161 291
2272 | f 777 276 197
2273 | f 197 276 143
2274 | f 777 606 771
2275 | f 771 606 605
2276 | f 613 612 697
2277 | f 697 612 202
2278 | f 595 697 279
2279 | f 279 697 202
2280 | f 73 276 776
2281 | f 776 276 777
2282 | f 776 775 73
2283 | f 73 775 74
2284 | f 100 778 775
2285 | f 775 778 74
2286 | f 778 100 189
2287 | f 189 100 99
2288 | f 245 242 99
2289 | f 99 242 189
2290 | f 131 33 256
2291 | f 28 1 95
2292 | f 95 1 62
2293 | f 27 2 28
2294 | f 28 2 1
2295 | f 265 115 266
2296 | f 266 115 114
2297 | f 95 62 69
2298 | f 69 62 64
2299 | f 62 61 64
2300 | f 64 61 65
2301 | f 61 13 65
2302 | f 65 13 130
2303 | f 13 12 130
2304 | f 130 12 14
2305 | f 31 11 32
2306 | f 32 11 268
2307 | f 15 12 31
2308 | f 31 12 11
2309 | f 31 30 15
2310 | f 15 30 231
2311 | f 30 106 231
2312 | f 231 106 232
2313 | f 287 237 106
2314 | f 106 237 232
2315 | f 254 90 287
2316 | f 287 90 237
2317 | f 253 44 254
2318 | f 254 44 90
2319 | f 253 249 44
2320 | f 44 249 7
2321 | f 267 127 268
2322 | f 268 127 126
2323 | f 10 3 241
2324 | f 241 3 116
2325 | f 3 2 116
2326 | f 116 2 114
2327 | f 2 27 114
2328 | f 114 27 266
2329 | f 27 26 266
2330 | f 266 26 113
2331 | f 26 286 113
2332 | f 113 286 110
2333 | f 286 243 110
2334 | f 110 243 255
2335 | f 243 242 255
2336 | f 255 242 256
2337 | f 242 245 256
2338 | f 256 245 131
2339 | f 245 244 131
2340 | f 131 244 132
2341 | f 265 37 115
2342 | f 115 37 41
2343 | f 41 37 42
2344 | f 42 37 40
2345 | f 92 91 40
2346 | f 40 91 42
2347 | f 236 233 92
2348 | f 92 233 91
2349 | f 191 194 236
2350 | f 236 194 233
2351 | f 258 285 244
2352 | f 244 285 132
2353 | f 257 55 258
2354 | f 258 55 285
2355 | f 257 154 201
2356 | f 201 154 185
2357 | f 56 201 204
2358 | f 204 201 180
2359 | f 55 257 56
2360 | f 56 257 201
2361 | f 204 85 56
2362 | f 56 85 52
2363 | f 86 122 53
2364 | f 53 122 79
2365 | f 212 215 86
2366 | f 86 215 122
2367 | f 211 216 212
2368 | f 212 216 215
2369 | f 192 280 211
2370 | f 211 280 216
2371 | f 191 240 192
2372 | f 192 240 280
2373 | f 249 124 7
2374 | f 7 124 8
2375 | f 124 127 8
2376 | f 8 127 241
2377 | f 127 267 241
2378 | f 241 267 10
2379 | f 10 267 9
2380 | f 9 267 11
2381 | f 80 79 780
2382 | f 780 79 779
2383 | f 39 781 93
2384 | f 93 781 782
2385 | f 80 780 109
2386 | f 109 780 783
2387 | f 118 784 119
2388 | f 119 784 785
2389 | f 118 120 784
2390 | f 784 120 786
2391 | f 121 787 120
2392 | f 120 787 786
2393 | f 235 93 788
2394 | f 788 93 782
2395 | f 235 788 240
2396 | f 240 788 789
2397 | f 109 783 121
2398 | f 121 783 787
2399 | f 123 119 790
2400 | f 790 119 785
2401 | f 39 123 781
2402 | f 781 123 790
2403 | f 122 791 79
2404 | f 79 791 779
2405 | f 122 215 791
2406 | f 791 215 792
2407 | f 215 216 792
2408 | f 792 216 793
2409 | f 216 280 793
2410 | f 793 280 794
2411 | f 240 789 280
2412 | f 280 789 794
2413 | f 779 795 780
2414 | f 780 795 796
2415 | f 781 797 782
2416 | f 782 797 798
2417 | f 780 796 783
2418 | f 783 796 799
2419 | f 784 800 785
2420 | f 785 800 801
2421 | f 784 786 800
2422 | f 800 786 802
2423 | f 787 803 786
2424 | f 786 803 802
2425 | f 788 782 804
2426 | f 804 782 798
2427 | f 788 804 789
2428 | f 789 804 805
2429 | f 783 799 787
2430 | f 787 799 803
2431 | f 790 785 806
2432 | f 806 785 801
2433 | f 781 790 797
2434 | f 797 790 806
2435 | f 791 807 779
2436 | f 779 807 795
2437 | f 791 792 807
2438 | f 807 792 808
2439 | f 793 809 792
2440 | f 792 809 808
2441 | f 793 794 809
2442 | f 809 794 810
2443 | f 789 805 794
2444 | f 794 805 810
2445 | f 795 811 796
2446 | f 796 811 812
2447 | f 797 813 798
2448 | f 798 813 814
2449 | f 799 796 815
2450 | f 815 796 812
2451 | f 801 800 817
2452 | f 817 800 816
2453 | f 800 802 816
2454 | f 816 802 818
2455 | f 802 803 818
2456 | f 818 803 819
2457 | f 798 814 804
2458 | f 804 814 820
2459 | f 804 820 805
2460 | f 805 820 821
2461 | f 799 815 803
2462 | f 803 815 819
2463 | f 806 801 822
2464 | f 822 801 817
2465 | f 797 806 813
2466 | f 813 806 822
2467 | f 807 823 795
2468 | f 795 823 811
2469 | f 807 808 823
2470 | f 823 808 824
2471 | f 808 809 824
2472 | f 824 809 825
2473 | f 809 810 825
2474 | f 825 810 826
2475 | f 805 821 810
2476 | f 810 821 826
2477 | f 811 827 812
2478 | f 812 827 828
2479 | f 813 829 814
2480 | f 814 829 830
2481 | f 815 812 831
2482 | f 831 812 828
2483 | f 817 816 833
2484 | f 833 816 832
2485 | f 816 818 832
2486 | f 832 818 834
2487 | f 818 819 834
2488 | f 834 819 835
2489 | f 814 830 820
2490 | f 820 830 836
2491 | f 820 836 821
2492 | f 821 836 837
2493 | f 815 831 819
2494 | f 819 831 835
2495 | f 822 817 838
2496 | f 838 817 833
2497 | f 813 822 829
2498 | f 829 822 838
2499 | f 823 839 811
2500 | f 811 839 827
2501 | f 823 824 839
2502 | f 839 824 840
2503 | f 824 825 840
2504 | f 840 825 841
2505 | f 825 826 841
2506 | f 841 826 842
2507 | f 821 837 826
2508 | f 826 837 842
--------------------------------------------------------------------------------
/postprocess/tmp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DART2022/DART/4b12efc3aa19a1be782447c44c04b91a1d38ddad/postprocess/tmp.jpg
--------------------------------------------------------------------------------
/postprocess/utils/ico_sphere.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
2 |
3 |
4 | import torch
5 |
6 | from pytorch3d.ops.subdivide_meshes import SubdivideMeshes
7 | from pytorch3d.structures.meshes import Meshes
8 |
9 | # Vertex coordinates for a level 0 ico-sphere.
10 | _ico_verts0 = [
11 | [-0.5257, 0.8507, 0.0000],
12 | [0.5257, 0.8507, 0.0000],
13 | [-0.5257, -0.8507, 0.0000],
14 | [0.5257, -0.8507, 0.0000],
15 | [0.0000, -0.5257, 0.8507],
16 | [0.0000, 0.5257, 0.8507],
17 | [0.0000, -0.5257, -0.8507],
18 | [0.0000, 0.5257, -0.8507],
19 | [0.8507, 0.0000, -0.5257],
20 | [0.8507, 0.0000, 0.5257],
21 | [-0.8507, 0.0000, -0.5257],
22 | [-0.8507, 0.0000, 0.5257],
23 | ]
24 |
25 |
26 | # Faces for level 0 ico-sphere
27 | _ico_faces0 = [
28 | [0, 11, 5],
29 | [0, 5, 1],
30 | [0, 1, 7],
31 | [0, 7, 10],
32 | [0, 10, 11],
33 | [1, 5, 9],
34 | [5, 11, 4],
35 | [11, 10, 2],
36 | [10, 7, 6],
37 | [7, 1, 8],
38 | [3, 9, 4],
39 | [3, 4, 2],
40 | [3, 2, 6],
41 | [3, 6, 8],
42 | [3, 8, 9],
43 | [4, 9, 5],
44 | [2, 4, 11],
45 | [6, 2, 10],
46 | [8, 6, 7],
47 | [9, 8, 1],
48 | ]
49 |
50 |
51 | def ico_sphere(level: int = 0, device=None):
52 | """
53 | Create verts and faces for a unit ico-sphere, with all faces oriented
54 | consistently.
55 |
56 | Args:
57 | level: integer specifying the number of iterations for subdivision
58 | of the mesh faces. Each additional level will result in four new
59 | faces per face.
60 | device: A torch.device object on which the outputs will be allocated.
61 |
62 | Returns:
63 | Meshes object with verts and faces.
64 | """
65 | if device is None:
66 | device = torch.device("cpu")
67 | if level < 0:
68 | raise ValueError("level must be >= 0.")
69 | if level == 0:
70 | verts = torch.tensor(_ico_verts0, dtype=torch.float32, device=device)
71 | faces = torch.tensor(_ico_faces0, dtype=torch.int64, device=device)
72 |
73 | else:
74 | mesh = ico_sphere(level - 1, device)
75 | subdivide = SubdivideMeshes()
76 | mesh = subdivide(mesh)
77 | verts = mesh.verts_list()[0]
78 | verts /= verts.norm(p=2, dim=1, keepdim=True)
79 | faces = mesh.faces_list()[0]
80 | return Meshes(verts=[verts], faces=[faces])
81 |
--------------------------------------------------------------------------------
/postprocess/utils/libs.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 |
4 | """
5 | Ex-Post Density Estimation (XPDE).
6 | """
7 | import torch
8 | import numpy as np
9 |
10 |
11 | class LatentSpaceSampler(object):
12 | def __init__(self, encoder):
13 | self.encoder = encoder
14 | self.z_cov = None
15 |
16 | def get_z_covariance(self, batches_of_xs):
17 | """Takes one or more batches of xs of shape batches X data_dims"""
18 |
19 | zs = self.encoder(batches_of_xs).detach().cpu().numpy()
20 |
21 | z_original_shape = zs.shape
22 | zs = np.reshape(zs, (z_original_shape[0], -1))
23 | """
24 | >>> a
25 | tensor([[ 0.2408, 1.1318, -0.4333],
26 | [ 0.0169, -0.9865, 0.0957]])
27 |
28 | >>> b = np.cov(a)
29 | array([[ 0.61631135, -0.44011707],
30 | [-0.44011707, 0.36400009]])
31 |
32 | >>> a.shape
33 | torch.Size([2, 3])
34 |
35 | >>> mean1 = (0.2408 + 1.1318 -0.4333) /3
36 | 0.3131
37 |
38 | >>> cov(1, 1) = \sum_i^{n} (X1 - mean1) (X1 - mean1)
39 |
40 | >>> (0.2408 - 0.3131) ** 2 + (1.1318 - 0.3131) ** 2 + (-0.4333 - 0.3131) ** 2
41 | 1.23261
42 |
43 | >>> n = 2
44 |
45 | >>> 1.23261 / (n-1)
46 | 0.616305
47 | """
48 | self.z_cov = np.cov(zs.T) # https://blog.csdn.net/jeffery0207/article/details/83032325
49 | # shape of self.z_cov: [bottleneck_factor=16, bottleneck_factor=16]
50 |
51 | return self.z_cov, z_original_shape
52 |
53 | def get_zs(self, batches_of_xs):
54 | """batches_of_xs are only used to compute variance of Z on the fly"""
55 | num_smpls = batches_of_xs.shape[0]
56 | self.z_cov, z_dim = self.get_z_covariance(batches_of_xs)
57 |
58 | try:
59 | zs_flattened = np.random.multivariate_normal(np.zeros(np.prod(z_dim[1:]), ), cov=self.z_cov,
60 | size=num_smpls) # https://www.zhihu.com/question/288946037/answer/649328934
61 | except np.linalg.LinAlgError as e:
62 | print(self.z_cov)
63 | print(e)
64 | zs_flattened = np.random.multivariate_normal(np.zeros(np.prod(z_dim[1:]), ),
65 | cov=self.z_cov + 1e-5 * np.eye(self.z_cov.shape[0]),
66 | size=num_smpls)
67 |
68 | return np.reshape(zs_flattened, (num_smpls,) + z_dim[1:])
69 |
--------------------------------------------------------------------------------
/postprocess/utils/losses/geodesic.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright (C) 2019 Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG),
4 | # acting on behalf of its Max Planck Institute for Intelligent Systems and the
5 | # Max Planck Institute for Biological Cybernetics. All rights reserved.
6 | #
7 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is holder of all proprietary rights
8 | # on this computer program. You can only use this computer program if you have closed a license agreement
9 | # with MPG or you get the right to use the computer program from someone who is authorized to grant you that right.
10 | # Any use of the computer program without a valid license is prohibited and liable to prosecution.
11 | # Contact: ps-license@tuebingen.mpg.de
12 | #
13 | #
14 | # If you use this code in a research publication please consider citing the following:
15 | #
16 | # Expressive Body Capture: 3D Hands, Face, and Body from a Single Image
17 | #
18 | #
19 | # Code Developed by:
20 | # Nima Ghorbani
21 | #
22 | # 2020.12.12
23 |
24 | """
25 | @date: 2022.04.08 week15 星期五
26 | @func: 把geodesic_loss_R加入到VPoser的训练中(因为当前的KL loss和 V2V loss无法使得VAE学习到合理的复杂手势).
27 | """
28 |
29 |
30 | import torch.nn.functional as F
31 | import torch
32 | from torch import nn
33 |
34 | import numpy as np
35 |
36 | # numpy implementation of yi zhou's method
37 | def norm(v):
38 | return v/np.linalg.norm(v)
39 |
40 | def gs(M):
41 | a1 = M[:,0]
42 | a2 = M[:,1]
43 | b1 = norm(a1)
44 | b2 = norm((a2-np.dot(b1,a2)*b1))
45 | b3 = np.cross(b1,b2)
46 | return np.vstack([b1,b2,b3]).T
47 |
48 | # input sz bszx3x2
49 | def bgs(d6s):
50 |
51 | bsz = d6s.shape[0]
52 | b1 = F.normalize(d6s[:,:,0], p=2, dim=1)
53 | a2 = d6s[:,:,1]
54 | c = torch.bmm(b1.view(bsz,1,-1),a2.view(bsz,-1,1)).view(bsz,1)*b1
55 | b2 = F.normalize(a2-c,p=2,dim=1)
56 | b3=torch.cross(b1,b2,dim=1)
57 | return torch.stack([b1,b2,b3],dim=1).permute(0,2,1)
58 |
59 |
60 | class geodesic_loss_R(nn.Module):
61 | def __init__(self, reduction='batchmean'):
62 | super(geodesic_loss_R, self).__init__()
63 |
64 | self.reduction = reduction
65 | self.eps = 1e-6
66 |
67 | # batch geodesic loss for rotation matrices
68 | def bgdR(self,m1,m2):
69 | batch = m1.shape[0]
70 | m = torch.bmm(m1, m2.transpose(1, 2)) # batch*3*3
71 |
72 | cos = (m[:, 0, 0] + m[:, 1, 1] + m[:, 2, 2] - 1) / 2
73 | cos = torch.min(cos, m1.new(np.ones(batch)))
74 | cos = torch.max(cos, m1.new(np.ones(batch)) * -1)
75 |
76 | return torch.acos(cos)
77 |
78 | def forward(self, ypred, ytrue):
79 | theta = self.bgdR(ypred,ytrue)
80 | if self.reduction == 'mean':
81 | return torch.mean(theta)
82 | if self.reduction == 'batchmean':
83 | # breakpoint()
84 | return torch.mean(torch.sum(theta, dim=theta.shape[1:]))
85 |
86 | else:
87 | return theta
--------------------------------------------------------------------------------
/postprocess/utils/losses/loss.py:
--------------------------------------------------------------------------------
1 | # coding: UTF-8
2 | import torch
3 | import torch.nn as nn
4 |
5 | from opts.opts import INFO
6 | from .ssim import ssim
7 | from .percep_loss import VGG19
8 |
9 | """
10 | @date: 2020.05.06
11 | @author: samuel ko
12 | @target: transport the loss of original keras repo to pytorch.
13 | """
14 |
15 |
16 | def get_loss_from_name(name):
17 | if name == "l1":
18 | return L1LossWrapper()
19 | elif name == 'l2':
20 | return L2LossWrapper()
21 |
22 |
23 | class TotalLoss(nn.Module):
24 | def __init__(self,
25 | apply_grad_pen=False,
26 | grad_pen_weight=None,
27 | entropy_qz=None,
28 | regularization_loss=None,
29 | beta=1e-4,
30 | loss='l2'):
31 | super(TotalLoss, self).__init__()
32 |
33 | # Get the losses
34 | self.loss = get_loss_from_name(loss)
35 | self.embed_loss = EmbeddingLoss()
36 | self.grad_loss = GradPenLoss()
37 |
38 | # Extra parameters
39 | self.apply_grad_pen = apply_grad_pen
40 | self.grad_pen_weight = grad_pen_weight
41 | self.entropy_qz = entropy_qz
42 | self.regularization_loss = regularization_loss
43 | self.beta = beta
44 | # if torch.cuda.is_available():
45 | # return loss.cuda()
46 |
47 | def forward(self, pred_img, gt_img, embedding):
48 |
49 | # print("预测", pred_img.shape)
50 | loss = self.loss(pred_img, gt_img).mean(dim=[1, 2])
51 | # print("损失", loss.shape)
52 | loss += self.beta * self.embed_loss(embedding)
53 |
54 | if self.apply_grad_pen:
55 | loss += self.grad_pen_weight * self.grad_loss(self.entropy_qz, embedding, pred_img)
56 | if self.entropy_qz is not None:
57 | loss -= self.beta * self.entropy_qz
58 | if self.regularization_loss is not None:
59 | loss += self.regularization_loss
60 |
61 | return loss.mean()
62 |
63 |
64 | # Wrapper of the L1Loss so that the format matches what is expected
65 |
66 | class L1LossWrapper(nn.Module):
67 | def __init__(self):
68 | super(L1LossWrapper, self).__init__()
69 |
70 | def forward(self, pred_img, gt_img):
71 | return torch.mean(torch.abs(pred_img - gt_img), dim=1)
72 |
73 |
74 | class L2LossWrapper(nn.Module):
75 | def __init__(self):
76 | super(L2LossWrapper, self).__init__()
77 |
78 | def forward(self, pred_img, gt_img):
79 | return torch.mean((pred_img - gt_img) ** 2, dim=1)
80 |
81 |
82 | class EmbeddingLoss(nn.Module):
83 | def forward(self, embedding):
84 | return (embedding ** 2).mean(dim=1)
85 |
86 |
87 | class GradPenLoss(nn.Module):
88 | def forward(self, entropy_qz, embedding, y_pred):
89 | if entropy_qz is not None:
90 | return torch.mean((entropy_qz * torch.autograd.grad(y_pred ** 2,
91 | embedding)) ** 2) # No batch shape is there so mean accross everything is ok
92 | else:
93 | return torch.mean((torch.autograd.grad(y_pred ** 2, embedding)) ** 2)
94 |
--------------------------------------------------------------------------------
/postprocess/utils/losses/percep_loss.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import torch
4 | import torch.nn as nn
5 | import torchvision
6 |
7 | """
8 | @date: 2020.04.08 week15 星期三
9 | @readme: perceptual loss from synsin.
10 | """
11 |
12 |
13 | # VGG architecture, used for the perceptual loss using a pretrained VGG network
14 | class VGG19(torch.nn.Module):
15 | def __init__(self, requires_grad=False):
16 | super().__init__()
17 | vgg_pretrained_features = torchvision.models.vgg19(
18 | pretrained=True
19 | ).features
20 | self.slice1 = torch.nn.Sequential()
21 | self.slice2 = torch.nn.Sequential()
22 | self.slice3 = torch.nn.Sequential()
23 | self.slice4 = torch.nn.Sequential()
24 | self.slice5 = torch.nn.Sequential()
25 | for x in range(2):
26 | self.slice1.add_module(str(x), vgg_pretrained_features[x])
27 | for x in range(2, 7):
28 | self.slice2.add_module(str(x), vgg_pretrained_features[x])
29 | for x in range(7, 12):
30 | self.slice3.add_module(str(x), vgg_pretrained_features[x])
31 | for x in range(12, 21):
32 | self.slice4.add_module(str(x), vgg_pretrained_features[x])
33 | for x in range(21, 30):
34 | self.slice5.add_module(str(x), vgg_pretrained_features[x])
35 | if not requires_grad:
36 | for param in self.parameters():
37 | param.requires_grad = False
38 |
39 | def forward(self, X):
40 | # Normalize the image so that it is in the appropriate range
41 | h_relu1 = self.slice1(X)
42 | h_relu2 = self.slice2(h_relu1)
43 | h_relu3 = self.slice3(h_relu2)
44 | h_relu4 = self.slice4(h_relu3)
45 | h_relu5 = self.slice5(h_relu4)
46 | out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5]
47 | return out
48 |
--------------------------------------------------------------------------------
/postprocess/utils/losses/ssim.py:
--------------------------------------------------------------------------------
1 | # MIT Licence
2 |
3 | # Methods to predict the SSIM, taken from
4 | # https://github.com/Po-Hsun-Su/pytorch-ssim/blob/master/pytorch_ssim/__init__.py
5 |
6 | from math import exp
7 |
8 | import torch
9 | import torch.nn.functional as F
10 | from torch.autograd import Variable
11 |
12 |
13 | def gaussian(window_size, sigma):
14 | gauss = torch.Tensor(
15 | [
16 | exp(-((x - window_size // 2) ** 2) / float(2 * sigma ** 2))
17 | for x in range(window_size)
18 | ]
19 | )
20 | return gauss / gauss.sum()
21 |
22 |
23 | def create_window(window_size, channel):
24 | _1D_window = gaussian(window_size, 1.5).unsqueeze(1)
25 | _2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0)
26 | window = Variable(
27 | _2D_window.expand(channel, 1, window_size, window_size).contiguous()
28 | )
29 | return window
30 |
31 |
32 | def _ssim(
33 | img1, img2, window, window_size, channel, mask=None, size_average=True
34 | ):
35 | mu1 = F.conv2d(img1, window, padding=window_size // 2, groups=channel)
36 | mu2 = F.conv2d(img2, window, padding=window_size // 2, groups=channel)
37 |
38 | mu1_sq = mu1.pow(2)
39 | mu2_sq = mu2.pow(2)
40 | mu1_mu2 = mu1 * mu2
41 |
42 | sigma1_sq = (
43 | F.conv2d(img1 * img1, window, padding=window_size // 2, groups=channel)
44 | - mu1_sq
45 | )
46 | sigma2_sq = (
47 | F.conv2d(img2 * img2, window, padding=window_size // 2, groups=channel)
48 | - mu2_sq
49 | )
50 | sigma12 = (
51 | F.conv2d(img1 * img2, window, padding=window_size // 2, groups=channel)
52 | - mu1_mu2
53 | )
54 |
55 | C1 = (0.01) ** 2
56 | C2 = (0.03) ** 2
57 |
58 | ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / (
59 | (mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2)
60 | )
61 |
62 | if not (mask is None):
63 | b = mask.size(0)
64 | ssim_map = ssim_map.mean(dim=1, keepdim=True) * mask
65 | ssim_map = ssim_map.view(b, -1).sum(dim=1) / mask.view(b, -1).sum(
66 | dim=1
67 | ).clamp(min=1)
68 | return ssim_map
69 |
70 | import pdb
71 |
72 | pdb.set_trace
73 |
74 | if size_average:
75 | return ssim_map.mean()
76 | else:
77 | return ssim_map.mean(1).mean(1).mean(1)
78 |
79 |
80 | class SSIM(torch.nn.Module):
81 | def __init__(self, window_size=11, size_average=True):
82 | super(SSIM, self).__init__()
83 | self.window_size = window_size
84 | self.size_average = size_average
85 | self.channel = 1
86 | self.window = create_window(window_size, self.channel)
87 |
88 | def forward(self, img1, img2, mask=None):
89 | (_, channel, _, _) = img1.size()
90 |
91 | if (
92 | channel == self.channel
93 | and self.window.data.type() == img1.data.type()
94 | ):
95 | window = self.window
96 | else:
97 | window = create_window(self.window_size, channel)
98 |
99 | if img1.is_cuda:
100 | window = window.cuda(img1.get_device())
101 | window = window.type_as(img1)
102 |
103 | self.window = window
104 | self.channel = channel
105 |
106 | return _ssim(
107 | img1,
108 | img2,
109 | window,
110 | self.window_size,
111 | channel,
112 | mask,
113 | self.size_average,
114 | )
115 |
116 |
117 | def ssim(img1, img2, window_size=11, mask=None, size_average=True):
118 | (_, channel, _, _) = img1.size()
119 | window = create_window(window_size, channel)
120 |
121 | if img1.is_cuda:
122 | window = window.cuda(img1.get_device())
123 | window = window.type_as(img1)
124 |
125 | return _ssim(img1, img2, window, window_size, channel, mask, size_average)
126 |
127 |
128 | if __name__ == "__main__":
129 | import cv2
130 | import numpy as np
131 | a = cv2.imread("/home/samuel/gaodaiheng/3DFace/unsupervised/1.jpg")
132 | a = np.expand_dims(a, 0)
133 | b = cv2.imread("/home/samuel/gaodaiheng/3DFace/unsupervised/1.jpg")
134 | b = np.expand_dims(b, 0)
135 |
136 | a, b = a.astype(np.float), b.astype(np.float)
137 |
138 | a = torch.from_numpy(np.transpose(a, (0, 3, 1, 2)))
139 | b = torch.from_numpy(np.transpose(b, (0, 3, 1, 2)))
140 |
141 | # 2个相同的图片, ssim为1, 那么对loss来说. 应该取1-ssim.
142 | print(ssim(a, b))
--------------------------------------------------------------------------------
/postprocess/utils/mano_wrist.py:
--------------------------------------------------------------------------------
1 | # coding: UTF-8
2 |
3 | import os
4 | import torch
5 | import pickle
6 | import numpy as np
7 | from torch.autograd import Variable
8 | from pytorch3d.io import load_obj, save_obj
9 | from reprojection import generate_2d
10 |
11 |
12 | obj_filename = os.path.join('extra_data/hand_wrist/hand_01.obj')
13 | # obj_filename = os.path.join('extra_data/hand_mesh/hand.obj')
14 | verts, faces, aux = load_obj(
15 | obj_filename,
16 | device="cuda",
17 | load_textures=True,
18 | create_texture_atlas=True,
19 | texture_atlas_size=8,
20 | texture_wrap=None,
21 | )
22 | atlas = aux.texture_atlas
23 | mesh_num = 842
24 | keypoints_num = 16
25 |
26 | dd = pickle.load(open('extra_data/MANO_RIGHT.pkl', 'rb'),encoding='latin1')
27 | kintree_table = dd['kintree_table']
28 | id_to_col = {kintree_table[1, i]: i for i in range(kintree_table.shape[1])}
29 | parent = {i: id_to_col[kintree_table[0, i]] for i in range(1, kintree_table.shape[1])}
30 |
31 | mesh_mu = verts.unsqueeze(0) # [bs, 778, 3] ---> [bs, 842, 3](hand_01/02)
32 | posedirs = Variable(torch.from_numpy(np.expand_dims(dd['posedirs'], 0).astype(np.float32)).cuda()) # [bs, 778, 3, 135] (rot_pose_beta_mesh)
33 | J_regressor = Variable(torch.from_numpy(np.expand_dims(dd['J_regressor'].todense(), 0).astype(np.float32)).cuda()) # [bs, 16, 778] (rot_pose_beta_mesh)
34 | weights = Variable(torch.from_numpy(np.expand_dims(dd['weights'], 0).astype(np.float32)).cuda()) # [bs, 778, 16] (rot_pose_beta_mesh)
35 | # hands_components = Variable(
36 | # torch.from_numpy(np.expand_dims(np.vstack(dd['hands_components'][:]), 0).astype(np.float32)).cuda())
37 | hands_mean = Variable(torch.from_numpy(np.expand_dims(dd['hands_mean'], 0).astype(np.float32)).cuda()) # [bs, 45] unchanged
38 | root_rot = Variable(torch.FloatTensor([0., 0., 0.]).unsqueeze(0).cuda())
39 | FACES = faces[0]
40 |
41 | # set extra vertex fpr J_regressor & posedirs & weights
42 |
43 | Final_J_regressor = torch.zeros(1, 16, 842)
44 | Final_J_regressor[:, :, :778] = J_regressor
45 |
46 | Final_J_regressor[:, :, 793] = J_regressor[:, :, 777]
47 | Final_J_regressor[:, :, 809] = J_regressor[:, :, 777]
48 | Final_J_regressor[:, :, 825] = J_regressor[:, :, 777]
49 | Final_J_regressor[:, :, 841] = J_regressor[:, :, 777]
50 |
51 | # Final_J_regressor[:, :, 788] = J_regressor[:, :, 235]
52 | # Final_J_regressor[:, :, 804] = J_regressor[:, :, 235]
53 | # Final_J_regressor[:, :, 820] = J_regressor[:, :, 235]
54 | # Final_J_regressor[:, :, 836] = J_regressor[:, :, 235]
55 |
56 | # Final_J_regressor[:, :, 787] = J_regressor[:, :, 230]
57 | # Final_J_regressor[:, :, 803] = J_regressor[:, :, 230]
58 | # Final_J_regressor[:, :, 819] = J_regressor[:, :, 230]
59 | # Final_J_regressor[:, :, 835] = J_regressor[:, :, 230]
60 |
61 | # Final_J_regressor[:, :, 781] = J_regressor[:, :, 92]
62 | # Final_J_regressor[:, :, 797] = J_regressor[:, :, 92]
63 | # Final_J_regressor[:, :, 813] = J_regressor[:, :, 92]
64 | # Final_J_regressor[:, :, 829] = J_regressor[:, :, 92]
65 |
66 | # Final_J_regressor[:, :, 780] = J_regressor[:, :, 39]
67 | # Final_J_regressor[:, :, 796] = J_regressor[:, :, 39]
68 | # Final_J_regressor[:, :, 812] = J_regressor[:, :, 39]
69 | # Final_J_regressor[:, :, 828] = J_regressor[:, :, 39]
70 |
71 | # Final_J_regressor[:, :, 789] = J_regressor[:, :, 288]
72 | # Final_J_regressor[:, :, 805] = J_regressor[:, :, 288]
73 | # Final_J_regressor[:, :, 821] = J_regressor[:, :, 288]
74 | # Final_J_regressor[:, :, 837] = J_regressor[:, :, 288]
75 |
76 | # Final_J_regressor[:, :, 784] = J_regressor[:, :, 118]
77 | # Final_J_regressor[:, :, 800] = J_regressor[:, :, 118]
78 | # Final_J_regressor[:, :, 815] = J_regressor[:, :, 118]
79 | # Final_J_regressor[:, :, 831] = J_regressor[:, :, 118]
80 |
81 | # Final_J_regressor[:, :, 783] = J_regressor[:, :, 117]
82 | # Final_J_regressor[:, :, 799] = J_regressor[:, :, 117]
83 | # Final_J_regressor[:, :, 816] = J_regressor[:, :, 117]
84 | # Final_J_regressor[:, :, 832] = J_regressor[:, :, 117]
85 |
86 | # Final_J_regressor[:, :, 785] = J_regressor[:, :, 119]
87 | # Final_J_regressor[:, :, 801] = J_regressor[:, :, 119]
88 | # Final_J_regressor[:, :, 817] = J_regressor[:, :, 119]
89 | # Final_J_regressor[:, :, 833] = J_regressor[:, :, 119]
90 |
91 | # Final_J_regressor[:, :, 786] = J_regressor[:, :, 120]
92 | # Final_J_regressor[:, :, 802] = J_regressor[:, :, 120]
93 | # Final_J_regressor[:, :, 818] = J_regressor[:, :, 120]
94 | # Final_J_regressor[:, :, 834] = J_regressor[:, :, 120]
95 |
96 | # Final_J_regressor[:, :, 782] = J_regressor[:, :, 108]
97 | # Final_J_regressor[:, :, 798] = J_regressor[:, :, 108]
98 | # Final_J_regressor[:, :, 814] = J_regressor[:, :, 108]
99 | # Final_J_regressor[:, :, 830] = J_regressor[:, :, 108]
100 |
101 | # Final_J_regressor[:, :, 778] = J_regressor[:, :, 79]
102 | # Final_J_regressor[:, :, 795] = J_regressor[:, :, 79]
103 | # Final_J_regressor[:, :, 811] = J_regressor[:, :, 79]
104 | # Final_J_regressor[:, :, 827] = J_regressor[:, :, 79]
105 |
106 | # Final_J_regressor[:, :, 779] = J_regressor[:, :, 78]
107 | # Final_J_regressor[:, :, 794] = J_regressor[:, :, 78]
108 | # Final_J_regressor[:, :, 810] = J_regressor[:, :, 78]
109 | # Final_J_regressor[:, :, 826] = J_regressor[:, :, 78]
110 |
111 | # Final_J_regressor[:, :, 790] = J_regressor[:, :, 774]
112 | # Final_J_regressor[:, :, 806] = J_regressor[:, :, 774]
113 | # Final_J_regressor[:, :, 822] = J_regressor[:, :, 774]
114 | # Final_J_regressor[:, :, 838] = J_regressor[:, :, 774]
115 |
116 | # Final_J_regressor[:, :, 791] = J_regressor[:, :, 775]
117 | # Final_J_regressor[:, :, 807] = J_regressor[:, :, 775]
118 | # Final_J_regressor[:, :, 823] = J_regressor[:, :, 775]
119 | # Final_J_regressor[:, :, 839] = J_regressor[:, :, 775]
120 |
121 | # Final_J_regressor[:, :, 792] = J_regressor[:, :, 776]
122 | # Final_J_regressor[:, :, 808] = J_regressor[:, :, 776]
123 | # Final_J_regressor[:, :, 824] = J_regressor[:, :, 776]
124 | # Final_J_regressor[:, :, 840] = J_regressor[:, :, 776]
125 |
126 |
127 | Final_posedirs = torch.zeros(1, 842, 3, 135)
128 | Final_posedirs[:, :778, :, :] = posedirs
129 |
130 | Final_weights = torch.zeros(1, 842, 16)
131 | Final_weights[:, :778] = weights
132 | Final_weights[:, 778:] = weights[:, 777]
133 |
134 | Final_J_regressor = Final_J_regressor.cuda()
135 | Final_weights = Final_weights.cuda()
136 | Final_posedirs = Final_posedirs.cuda()
137 |
138 |
139 | def rodrigues(r):
140 | #print(r)
141 | theta = torch.sqrt(torch.sum(torch.pow(r, 2), 1))
142 |
143 | def S(n_):
144 | ns = torch.split(n_, 1, 1)
145 | Sn_ = torch.cat([torch.zeros_like(ns[0]), -ns[2], ns[1], ns[2], torch.zeros_like(ns[0]), -ns[0], -ns[1], ns[0],
146 | torch.zeros_like(ns[0])], 1)
147 | Sn_ = Sn_.view(-1, 3, 3)
148 | return Sn_
149 |
150 | n = r / (theta.view(-1, 1))
151 | Sn = S(n)
152 |
153 | # R = torch.eye(3).unsqueeze(0) + torch.sin(theta).view(-1, 1, 1)*Sn\
154 | # +(1.-torch.cos(theta).view(-1, 1, 1)) * torch.matmul(Sn,Sn)
155 |
156 | I3 = Variable(torch.eye(3).unsqueeze(0).cuda())
157 | #print(theta,Sn)
158 | R = I3 + torch.sin(theta).view(-1, 1, 1) * Sn \
159 | + (1. - torch.cos(theta).view(-1, 1, 1)) * torch.matmul(Sn, Sn)
160 |
161 | Sr = S(r)
162 | theta2 = theta ** 2
163 | R2 = I3 + (1. - theta2.view(-1, 1, 1) / 6.) * Sr \
164 | + (.5 - theta2.view(-1, 1, 1) / 24.) * torch.matmul(Sr, Sr)
165 |
166 | idx = np.argwhere((theta < 1e-30).data.cpu().numpy())
167 |
168 | if (idx.size):
169 | R[idx, :, :] = R2[idx, :, :]
170 |
171 | return R, Sn
172 |
173 |
174 | def get_poseweights(poses, bsize):
175 | # pose: batch x 24 x 3
176 | pose_matrix, _ = rodrigues(poses[:, 1:, :].contiguous().view(-1, 3))
177 | # pose_matrix, _ = rodrigues(poses.view(-1,3))
178 | pose_matrix = pose_matrix - Variable(torch.from_numpy(
179 | np.repeat(np.expand_dims(np.eye(3, dtype=np.float32), 0), bsize * (keypoints_num - 1), axis=0)).cuda())
180 | pose_matrix = pose_matrix.view(bsize, -1)
181 | return pose_matrix
182 |
183 |
184 | # NOTICE: remove shape parameter.
185 | def rot_pose_beta_to_mesh(rots, poses):
186 |
187 | batch_size = rots.size(0)
188 | #print(hands_mean.shape,poses.unsqueeze(1).shape,hands_components.shape)
189 | #poses = (hands_mean + torch.matmul(poses.unsqueeze(1), hands_components).squeeze(1)).view(batch_size,keypoints_num - 1, 3) #if use pca
190 | poses = (hands_mean + poses).view(batch_size, keypoints_num - 1, 3)
191 | # poses = torch.cat((poses[:,:3].contiguous().view(batch_size,1,3),poses_),1)
192 | poses = torch.cat((root_rot.repeat(batch_size, 1).view(batch_size, 1, 3), poses), 1)
193 |
194 | v_shaped = mesh_mu.repeat(batch_size, 1, 1).view(batch_size, -1).view(batch_size, mesh_num, 3)
195 | pose_weights = get_poseweights(poses, batch_size)
196 |
197 | v_posed = v_shaped + torch.matmul(Final_posedirs.repeat(batch_size, 1, 1, 1),
198 | (pose_weights.view(batch_size, 1, (keypoints_num - 1) * 9, 1)).repeat(1, mesh_num,
199 | 1,
200 | 1)).squeeze(
201 | 3)
202 |
203 | J_posed = torch.matmul(v_shaped.permute(0, 2, 1), Final_J_regressor.repeat(batch_size, 1, 1).permute(0, 2, 1))
204 | J_posed = J_posed.permute(0, 2, 1)
205 | J_posed_split = [sp.contiguous().view(batch_size, 3) for sp in torch.split(J_posed.permute(1, 0, 2), 1, 0)]
206 |
207 | pose = poses.permute(1, 0, 2)
208 | pose_split = torch.split(pose, 1, 0)
209 |
210 |
211 | angle_matrix = []
212 | for i in range(keypoints_num):
213 | #print(i, pose_split[i])
214 | out, tmp = rodrigues(pose_split[i].contiguous().view(-1, 3))
215 | angle_matrix.append(out)
216 |
217 | # with_zeros = lambda x: torch.cat((x,torch.FloatTensor([[[0.0, 0.0, 0.0, 1.0]]]).repeat(batch_size,1,1)),1)
218 |
219 | with_zeros = lambda x: \
220 | torch.cat((x, Variable(torch.FloatTensor([[[0.0, 0.0, 0.0, 1.0]]]).repeat(batch_size, 1, 1).cuda())), 1)
221 |
222 | pack = lambda x: torch.cat((Variable(torch.zeros(batch_size, 4, 3).cuda()), x), 2)
223 |
224 | results = {}
225 | results[0] = with_zeros(torch.cat((angle_matrix[0], J_posed_split[0].view(batch_size, 3, 1)), 2))
226 |
227 | for i in range(1, kintree_table.shape[1]):
228 | tmp = with_zeros(torch.cat((angle_matrix[i],
229 | (J_posed_split[i] - J_posed_split[parent[i]]).view(batch_size, 3, 1)), 2))
230 | results[i] = torch.matmul(results[parent[i]], tmp)
231 |
232 | results_global = results
233 |
234 | results2 = []
235 |
236 | for i in range(len(results)):
237 | vec = (torch.cat((J_posed_split[i], Variable(torch.zeros(batch_size, 1).cuda())), 1)).view(batch_size, 4, 1)
238 | results2.append((results[i] - pack(torch.matmul(results[i], vec))).unsqueeze(0))
239 |
240 | results = torch.cat(results2, 0)
241 |
242 | T = torch.matmul(results.permute(1, 2, 3, 0),
243 | Final_weights.repeat(batch_size, 1, 1).permute(0, 2, 1).unsqueeze(1).repeat(1, 4, 1, 1))
244 | Ts = torch.split(T, 1, 2)
245 | rest_shape_h = torch.cat((v_posed, Variable(torch.ones(batch_size, mesh_num, 1).cuda())), 2)
246 | rest_shape_hs = torch.split(rest_shape_h, 1, 2)
247 |
248 | v = Ts[0].contiguous().view(batch_size, 4, mesh_num) * rest_shape_hs[0].contiguous().view(-1, 1, mesh_num) \
249 | + Ts[1].contiguous().view(batch_size, 4, mesh_num) * rest_shape_hs[1].contiguous().view(-1, 1, mesh_num) \
250 | + Ts[2].contiguous().view(batch_size, 4, mesh_num) * rest_shape_hs[2].contiguous().view(-1, 1, mesh_num) \
251 | + Ts[3].contiguous().view(batch_size, 4, mesh_num) * rest_shape_hs[3].contiguous().view(-1, 1, mesh_num)
252 |
253 | # v = v.permute(0,2,1)[:,:,:3]
254 | Rots = rodrigues(rots)[0]
255 |
256 | Jtr = []
257 |
258 | for j_id in range(len(results_global)):
259 | Jtr.append(results_global[j_id][:, :3, 3:4])
260 |
261 | #definition as frankmocap smplx @meshlab
262 | # Jtr.append(v[:, :3, 333].unsqueeze(2)) #index
263 | # Jtr.append(v[:, :3, 444].unsqueeze(2)) #middle
264 | # Jtr.append(v[:, :3, 672].unsqueeze(2)) #pinky
265 | # Jtr.append(v[:, :3, 555].unsqueeze(2)) #ring
266 | # Jtr.append(v[:, :3, 745].unsqueeze(2)) #thumb
267 |
268 | # 2022.05.11 lixin.yang
269 | Jtr.append(v[:, :3, 745].unsqueeze(2)) #thumb
270 | Jtr.append(v[:, :3, 317].unsqueeze(2)) #index
271 | Jtr.append(v[:, :3, 444].unsqueeze(2)) #middle
272 | Jtr.append(v[:, :3, 556].unsqueeze(2)) #ring
273 | Jtr.append(v[:, :3, 673].unsqueeze(2)) #pinky
274 |
275 | Jtr = torch.cat(Jtr, 2) # .permute(0,2,1)
276 |
277 | v = torch.matmul(Rots, v[:, :3, :]).permute(0, 2, 1) # .contiguous().view(batch_size,-1)
278 | Jtr = torch.matmul(Rots, Jtr).permute(0, 2, 1) # .contiguous().view(batch_size,-1)
279 |
280 | #translate to be same as smplx
281 | root=Jtr[:,1].clone().unsqueeze(1)
282 | Jtr-=root
283 | v-=root
284 |
285 | return torch.cat((Jtr, v), 1)
--------------------------------------------------------------------------------
/postprocess/utils/utils.py:
--------------------------------------------------------------------------------
1 | # coding: UTF-8
2 | """
3 | @author: samuel ko
4 | @date: 2019.12.13
5 | @readme: Miscellaneous utility classes and functions.
6 | """
7 |
8 | import re
9 | import importlib
10 | import torch
11 | import os
12 | import sys
13 | import types
14 | from typing import Any, List, Tuple, Union
15 |
16 |
17 | def SaveModel(encoder, decoder, dir, epoch):
18 | params = {}
19 | params['encoder'] = encoder.state_dict()
20 | params['decoder'] = decoder.state_dict()
21 | torch.save(params, os.path.join(dir, '_params_{}.pt'.format(epoch)))
22 |
--------------------------------------------------------------------------------