├── .gitignore ├── README.md ├── download_scripts ├── README.md └── download.py ├── eval_part_seg_folder.py ├── eval_part_seg_h5.py ├── main_cls.py ├── main_part_seg.py ├── misc ├── __pycache__ │ ├── modelnet40_pcl_datasets.cpython-36.pyc │ ├── shapenet_test_from_list.cpython-36.pyc │ ├── shapenetcore_partanno_datasets.cpython-36.pyc │ ├── transforms.cpython-36.pyc │ └── utils.cpython-36.pyc ├── modelnet40_pcl_datasets.py ├── shapenet_test_from_list.py ├── shapenetcore_partanno_datasets.py ├── transforms.py └── utils.py ├── models ├── __init__.py ├── __init__.pyc ├── __pycache__ │ ├── __init__.cpython-36.pyc │ └── pointnet.cpython-36.pyc ├── pointnet.py └── pointnet.pyc ├── run.sh └── tools ├── obj_data ├── 0_labels_mask.obj └── display.png ├── pics └── display.png ├── test.lua └── visualizations ├── build.sh ├── render_balls_so.cpp ├── render_balls_so.so └── show3d_balls.py /.gitignore: -------------------------------------------------------------------------------- 1 | datasets/ 2 | test_results_folder/ 3 | models_checkpoint/*.pth 4 | 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This project largely borrowed code from [pointnet.pytorch](https://github.com/fxia22/pointnet.pytorch) by fxia22. 2 | ### all the steps to run this project can be checked in [run.sh](https://github.com/eriche2016/pointnet2.pytorch/blob/master/run.sh), except for visualization steps 3 | 4 | ### visualization step 5 | Here i download opencv binary in [Download opencv-3.2.0-vc14.exe (123.5 MB)](https://sourceforge.net/projects/opencvlibrary/files/opencv-win/) 6 | and then install it, Goto ```opencv/build/python/2.7``` folder. 7 | Copy ```cv2.pyd``` to ```C:/Python27/lib/site-packages```. 8 | then build the ```render_balls_so.cpp```. 9 | And run ```python display_results.py```. see [here](https://github.com/eriche2016/pointnet2.pytorch/tree/master/tools/visualizations) 10 | 11 | ### demo images 12 | part segmentation result: 13 | ![seg](https://github.com/eriche2016/pointnet2.pytorch/blob/master/tools/pics/display.png) -------------------------------------------------------------------------------- /download_scripts/README.md: -------------------------------------------------------------------------------- 1 | ### note that in order to run the project without error, we need to only modify 2 | ### the files in train_files.txt and test_files.txt to be base file name(without parent dir as prefix), e.g., like below: 3 | ```bash 4 | ply_data_train0.h5 5 | ply_data_train1.h5 6 | ply_data_train2.h5 7 | ply_data_train3.h5 8 | ply_data_train4.h5 9 | ``` 10 | -------------------------------------------------------------------------------- /download_scripts/download.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | 5 | """ 6 | Modification of https://github.com/stanfordnlp/treelstm/blob/master/scripts/download.py 7 | Downloads the following: 8 | - point cloud data version of modelnet40 dataset 9 | """ 10 | 11 | from __future__ import print_function 12 | import os 13 | import sys 14 | import gzip 15 | import json 16 | import shutil 17 | import zipfile 18 | import argparse 19 | import subprocess 20 | from six.moves import urllib 21 | 22 | from IPython.core.debugger import Tracer 23 | debug_here = Tracer() 24 | 25 | parser = argparse.ArgumentParser(description='Download dataset for pointnet.') 26 | parser.add_argument('--dataset', required=True, help=' can be [modelnet40_pcl, shapenetcore_partanno, ??]') 27 | 28 | def download(url, dirpath): 29 | filename = url.split('/')[-1] 30 | filepath = os.path.join(dirpath, filename) 31 | # print(url) 32 | u = urllib.request.urlopen(url) 33 | # './datasets/modelnet40_ply_hdf5_2048.zip' 34 | # print(filepath) 35 | f = open(filepath, 'wb') 36 | filesize = int(u.headers["Content-Length"]) 37 | print("Downloading: %s Bytes: %s" % (filename, filesize)) 38 | 39 | downloaded = 0 40 | block_sz = 8192 41 | status_width = 70 42 | while True: 43 | buf = u.read(block_sz) 44 | if not buf: 45 | print('') 46 | break 47 | else: 48 | print('', end='\r') 49 | downloaded += len(buf) 50 | f.write(buf) 51 | status = (("[%-" + str(status_width + 1) + "s] %3.2f%%") % 52 | ('=' * int(float(downloaded) / filesize * status_width) + '>', downloaded * 100. / filesize)) 53 | print(status, end='') 54 | sys.stdout.flush() 55 | f.close() 56 | return filepath 57 | 58 | def unzip(filepath): 59 | 60 | print("Extracting: " + filepath) 61 | dirpath = os.path.dirname(filepath) 62 | with zipfile.ZipFile(filepath) as zf: 63 | zf.extractall(dirpath) 64 | os.remove(filepath) 65 | 66 | 67 | def download_modelnet40_pcl(dirpath): 68 | """ 69 | dirpath = './datasets/' 70 | """ 71 | data_folder = 'modelnet40_pcl' 72 | data_dir = os.path.join(dirpath, data_folder) 73 | if os.path.exists(data_dir): 74 | print('Found modelnet40_pcl - skip') 75 | return 76 | url = 'https://shapenet.cs.stanford.edu/media/modelnet40_ply_hdf5_2048.zip' 77 | file_path = download(url, dirpath) 78 | unzip(file_path) 79 | 80 | # download original shapenetcore_partanno dataset 81 | def download_shapenetcore_partanno(dirpath): 82 | """ 83 | dirpath = './datasets/' 84 | """ 85 | data_folder = 'shapenetcore_partanno' 86 | data_dir = os.path.join(dirpath, data_folder) 87 | if os.path.exists(data_dir): 88 | print('Found shapenetcore_partanno - skip') 89 | return 90 | url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_v0.zip' 91 | file_path = download(url, dirpath) 92 | unzip(file_path) 93 | 94 | # download original shapenetcore_partanno dataset 95 | def download_shapenetcore_partanno_h5(dirpath): 96 | """ 97 | dirpath = './datasets/' 98 | """ 99 | data_folder = 'shapenet_part_seg_hdf5_dataset' 100 | data_dir = os.path.join(dirpath, data_folder) 101 | ''' 102 | if os.path.exists(data_dir): 103 | print('Found shapenetcore_partanno_h5 - skip') 104 | return 105 | ''' 106 | url = 'https://shapenet.cs.stanford.edu/media/shapenet_part_seg_hdf5_data.zip' 107 | # file_path = download(url, dirpath) 108 | filename = url.split('/')[-1] 109 | file_path = os.path.join(dirpath, filename) 110 | # print(url) 111 | unzip(file_path) 112 | 113 | # download original shapenetcore_partanno dataset 114 | def download_shapenet_partanno_seg_bench_v0(dirpath): 115 | """ 116 | dirpath = './datasets/' 117 | """ 118 | data_folder = 'shapenetcore_partanno_segmentation_benchmark_v0' 119 | data_dir = os.path.join(dirpath, data_folder) 120 | if os.path.exists(data_dir): 121 | print('Found shapenetcore_partanno_h5 - skip') 122 | return 123 | url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' 124 | file_path = download(url, dirpath) 125 | # print(url) 126 | unzip(file_path) 127 | 128 | 129 | 130 | 131 | def download_celeb_a(dirpath): 132 | data_dir = 'celebA' 133 | if os.path.exists(os.path.join(dirpath, data_dir)): 134 | print('Found Celeb-A - skip') 135 | return 136 | url = 'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/AADIKlz8PR9zr6Y20qbkunrba/Img/img_align_celeba.zip?dl=1&pv=1' 137 | filepath = download(url, dirpath) 138 | zip_dir = '' 139 | with zipfile.ZipFile(filepath) as zf: 140 | zip_dir = zf.namelist()[0] 141 | zf.extractall(dirpath) 142 | os.remove(filepath) 143 | os.rename(os.path.join(dirpath, zip_dir), os.path.join(dirpath, data_dir)) 144 | 145 | def _list_categories(tag): 146 | url = 'http://lsun.cs.princeton.edu/htbin/list.cgi?tag=' + tag 147 | f = urllib.request.urlopen(url) 148 | return json.loads(f.read()) 149 | 150 | def _download_lsun(out_dir, category, set_name, tag): 151 | url = 'http://lsun.cs.princeton.edu/htbin/download.cgi?tag={tag}' \ 152 | '&category={category}&set={set_name}'.format(**locals()) 153 | print(url) 154 | if set_name == 'test': 155 | out_name = 'test_lmdb.zip' 156 | else: 157 | out_name = '{category}_{set_name}_lmdb.zip'.format(**locals()) 158 | out_path = os.path.join(out_dir, out_name) 159 | cmd = ['curl', url, '-o', out_path] 160 | print('Downloading', category, set_name, 'set') 161 | subprocess.call(cmd) 162 | 163 | def download_lsun(dirpath): 164 | data_dir = os.path.join(dirpath, 'lsun') 165 | if os.path.exists(data_dir): 166 | print('Found LSUN - skip') 167 | return 168 | else: 169 | os.mkdir(data_dir) 170 | 171 | tag = 'latest' 172 | #categories = _list_categories(tag) 173 | categories = ['bedroom'] 174 | 175 | for category in categories: 176 | _download_lsun(data_dir, category, 'train', tag) 177 | _download_lsun(data_dir, category, 'val', tag) 178 | _download_lsun(data_dir, '', 'test', tag) 179 | 180 | def download_mnist(dirpath): 181 | data_dir = os.path.join(dirpath, 'mnist') 182 | if os.path.exists(data_dir): 183 | print('Found MNIST - skip') 184 | return 185 | else: 186 | os.mkdir(data_dir) 187 | url_base = 'http://yann.lecun.com/exdb/mnist/' 188 | file_names = ['train-images-idx3-ubyte.gz','train-labels-idx1-ubyte.gz','t10k-images-idx3-ubyte.gz','t10k-labels-idx1-ubyte.gz'] 189 | for file_name in file_names: 190 | url = (url_base+file_name).format(**locals()) 191 | print(url) 192 | out_path = os.path.join(data_dir,file_name) 193 | cmd = ['curl', url, '-o', out_path] 194 | print('Downloading ', file_name) 195 | subprocess.call(cmd) 196 | cmd = ['gzip', '-d', out_path] 197 | print('Decompressing ', file_name) 198 | subprocess.call(cmd) 199 | 200 | def prepare_data_dir(path = './datasets/'): 201 | if not os.path.exists(path): 202 | os.mkdir(path) 203 | 204 | if __name__ == '__main__': 205 | opt = parser.parse_args() 206 | prepare_data_dir() 207 | 208 | if opt.dataset == 'celebA': 209 | download_celeb_a('./datasets/') 210 | elif opt.dataset == 'lsun': 211 | download_lsun('./datasets/') 212 | elif opt.dataset == 'mnist': 213 | download_mnist('./datasets/') 214 | # for experiment on pointnet 215 | elif opt.dataset == 'modelnet40_pcl': 216 | download_modelnet40_pcl('./datasets/') 217 | elif opt.dataset == 'shapenetcore_partanno': 218 | download_shapenetcore_partanno('./datasets/') 219 | elif opt.dataset == 'shapenetcore_partanno_h5': 220 | debug_here() 221 | download_shapenetcore_partanno_h5('./datasets/') 222 | elif opt.dataset == 'shapenetcore_partanno_ben_v0': 223 | download_shapenet_partanno_seg_bench_v0('./datasets/') 224 | else: 225 | print('not supported dataset dowloading') 226 | 227 | 228 | -------------------------------------------------------------------------------- /eval_part_seg_folder.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import argparse 3 | import random 4 | import time 5 | import os 6 | import json 7 | import numpy as np 8 | 9 | import torch 10 | import torch.nn as nn 11 | import torch.nn.parallel # for multi-GPU training 12 | import torch.backends.cudnn as cudnn 13 | import torch.optim as optim 14 | import torchvision.transforms as transforms # using transforms 15 | import torch.utils.data 16 | 17 | from torch.autograd import Variable 18 | 19 | import models.pointnet as pointnet 20 | import misc.shapenetcore_partanno_datasets as shapenetcore_partanno_dset 21 | import misc.utils as utils 22 | 23 | import misc.transforms as pc_transforms 24 | 25 | import misc.shapenet_test_from_list as shpnt_t_frm_lst 26 | 27 | # import my_modules.utils as mutils 28 | 29 | from IPython.core.debugger import Tracer 30 | debug_here = Tracer() 31 | 32 | parser = argparse.ArgumentParser() 33 | 34 | # specify data and datapath 35 | parser.add_argument('--dataset', default='shapenetcore_partanno', help='shapenetcore_partanno | ?? ') 36 | # ply data dir 37 | parser.add_argument('--ply_data_dir', default='./datasets/raw_datasets/PartAnnotation', help='path to ply data') 38 | parser.add_argument('--h5_data_dir', default='./datasets/shapenet_part_seg_hdf5_dataset', help='path to h5 data') 39 | # number of workers for loading data 40 | parser.add_argument('--workers', type=int, help='number of data loading workers', default=2) 41 | # loading data 42 | parser.add_argument('--batch_size', type=int, default=1, help='input batch size') 43 | parser.add_argument('--num_points', type=int, default=2048, help='input batch size') 44 | 45 | parser.add_argument('--print_freq', type=int, default=25, help='number of iterations to print ') 46 | parser.add_argument('--pretrained_model', type=str, default = './models_checkpoint/model_best.pth', help='model path') 47 | parser.add_argument('--test_results_dir', type=str, default = None, help='model path') 48 | 49 | # cuda stuff 50 | parser.add_argument('--gpu_id' , type=str, default='1', help='which gpu to use, used only when ngpu is 1') 51 | parser.add_argument('--cuda' , action='store_true', help='enables cuda') 52 | parser.add_argument('--ngpu' , type=int, default=1, help='number of GPUs to use') 53 | 54 | ##################################################################### 55 | ## global setting 56 | ##################################################################### 57 | opt = parser.parse_args() 58 | print(opt) 59 | if opt.test_results_dir is None: 60 | opt.test_results_folder = 'test_results_folder' 61 | 62 | # make dir 63 | os.system('mkdir {0}'.format(opt.test_results_dir)) 64 | 65 | 66 | os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpu_id 67 | 68 | ngpu = int(opt.ngpu) 69 | # opt.manualSeed = random.randint(1, 10000) # fix seed 70 | opt.manualSeed = 123456 71 | 72 | if torch.cuda.is_available() and not opt.cuda: 73 | print("WARNING: You have a CUDA device, so you should probably run with --cuda") 74 | else: 75 | if ngpu == 1: 76 | print('so we use 1 gpu to training') 77 | print('setting gpu on gpuid {0}'.format(opt.gpu_id)) 78 | 79 | if opt.cuda: 80 | torch.cuda.manual_seed(opt.manualSeed) 81 | 82 | cudnn.benchmark = True 83 | print("Random Seed: ", opt.manualSeed) 84 | random.seed(opt.manualSeed) 85 | torch.manual_seed(opt.manualSeed) 86 | ############################################################## 87 | ## 88 | ############################################################## 89 | def printout(flog, data): 90 | print(data) 91 | flog.write(data + '\n') 92 | 93 | def output_color_point_cloud(data, seg, out_file): 94 | with open(out_file, 'w') as f: 95 | l = len(seg) 96 | for i in range(l): 97 | color = color_map[seg[i]] 98 | f.write('v %f %f %f %f %f %f\n' % (data[i][0], data[i][1], data[i][2], color[0], color[1], color[2])) 99 | 100 | def output_color_point_cloud_red_blue(data, seg, out_file): 101 | with open(out_file, 'w') as f: 102 | l = len(seg) 103 | for i in range(l): 104 | if seg[i] == 1: 105 | color = [0, 0, 1] 106 | elif seg[i] == 0: 107 | color = [1, 0, 0] 108 | else: 109 | color = [0, 0, 0] 110 | 111 | f.write('v %f %f %f %f %f %f\n' % (data[i][0], data[i][1], data[i][2], color[0], color[1], color[2])) 112 | 113 | def predict(model, test_loader): 114 | # switch to evaluate mode 115 | model.eval() 116 | 117 | ################Note############################## 118 | # each sample may have different number of points 119 | # so just use batch of size 1 120 | ################################################## 121 | debug_here() 122 | for i, (points_data, _seg_data, labels) in enumerate(test_loader, 0): 123 | if i%10 == 0: 124 | print('{0}/{1}'.format(i, len(test_loader))) 125 | # print(points_data.size()) 126 | 127 | points_data = Variable(points_data, volatile=True) 128 | points_data = points_data.transpose(2, 1) 129 | _seg_data = Variable(_seg_data, volatile=True) 130 | 131 | if opt.cuda: 132 | points_data = points_data.cuda() 133 | _seg_data = _seg_data.long().cuda() # must be long cuda tensor 134 | 135 | # forward, backward optimize 136 | pred, _ = model(points_data) 137 | pred = pred.view(-1, opt.num_seg_classes) 138 | _seg_data = _seg_data.view(-1, 1)[:, 0] # min is already 0 139 | pred_choice = pred.data.max(1)[1] 140 | 141 | print('finished loading') 142 | 143 | def main(): 144 | global opt 145 | ################################################ 146 | # should specify it to be 3000?????????????? 147 | ################################################ 148 | MAX_NUM_POINTS = 3000 # the max number of points in the all testing data shapes 149 | 150 | pc_transform_all = transforms.Compose([ 151 | pc_transforms.Normalize_PC(), 152 | pc_transforms.Augment2PointNum(MAX_NUM_POINTS), 153 | ]) 154 | 155 | 156 | # part id(1, 2, ..) 157 | # for each label_id, there is a set of part id(encoding using 1, 2, 3, 4) 158 | label_ids2pid = json.load(open(os.path.join(opt.h5_data_dir, 'overallid_to_catid_partid.json'), 'r')) 159 | 160 | label_id2pid_set = {} 161 | for idx in range(len(label_ids2pid)): # 50 162 | label_id, pid = label_ids2pid[idx] # objid = '02691156' 163 | if not label_id in label_id2pid_set.keys(): 164 | label_id2pid_set[label_id] = [] 165 | label_id2pid_set[label_id].append(idx) # 0, 1, 2, ... 166 | 167 | 168 | all_label_names2label_ids = os.path.join(opt.h5_data_dir, 'all_object_categories.txt') 169 | fin = open(all_label_names2label_ids, 'r') 170 | lines = [line.rstrip() for line in fin.readlines()] 171 | debug_here() 172 | label_ids = [line.split()[1] for line in lines] 173 | label_names = [line.split()[0] for line in lines] 174 | # 175 | label_ids2ids = {label_ids[i]:i for i in range(len(label_ids))} 176 | fin.close() 177 | 178 | color_map_file_path = os.path.join(opt.h5_data_dir, 'part_color_mapping.json') 179 | # 50 color map 180 | # color_map[0] = [0.65, 0.95, 0.05] 181 | color_map = json.load(open(color_map_file_path, 'r')) 182 | NUM_LABELS = len(label_ids) # 16 183 | NUM_PARTS = len(label_ids2pid) # 50 184 | opt.num_seg_classes = NUM_PARTS 185 | 186 | # 02691156_1: 0 187 | # 02691156_2: 1 188 | label_id_pid2pid_in_set = json.load(open(os.path.join(opt.h5_data_dir, 'catid_partid_to_overallid.json'), 'r')) 189 | 190 | # call predict 191 | test_ply_data_list_path = os.path.join(opt.ply_data_dir, 'test_ply_file_list.txt') 192 | 193 | test_dataset = shpnt_t_frm_lst.PlyFileList(opt.ply_data_dir, test_ply_data_list_path, 194 | label_id_pid2pid_in_set, label_ids2ids, label_ids, transform=pc_transform_all) 195 | 196 | ################Note############################## 197 | # each sample may have different number of points 198 | # so just use batch of size 1 199 | ################################################## 200 | test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=1, shuffle=True, num_workers=0, pin_memory=True) 201 | 202 | ######################################################################## 203 | ## 204 | ######################################################################## 205 | assert opt.pretrained_model != '', 'must specify the pre-trained model' 206 | print("Loading Pretrained Model from {0}".format(opt.pretrained_model)) 207 | model = pointnet.PointNetDenseCls(num_points=opt.num_points, k=opt.num_seg_classes) 208 | model.load_state_dict(torch.load(opt.pretrained_model)) 209 | 210 | if opt.cuda: 211 | print('shift model and criterion to GPU .. ') 212 | model = model.cuda() 213 | 214 | predict(model, test_loader) 215 | 216 | if __name__ == '__main__': 217 | main() 218 | -------------------------------------------------------------------------------- /eval_part_seg_h5.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import argparse 3 | import random 4 | import time 5 | import os 6 | import json 7 | import numpy as np 8 | 9 | import torch 10 | import torch.nn as nn 11 | import torch.nn.parallel # for multi-GPU training 12 | import torch.backends.cudnn as cudnn 13 | import torch.optim as optim 14 | import torch.utils.data 15 | 16 | from torch.autograd import Variable 17 | 18 | import models.pointnet as pointnet 19 | import misc.shapenetcore_partanno_datasets as shapenetcore_partanno_dset 20 | import misc.utils as utils 21 | 22 | # import my_modules.utils as mutils 23 | 24 | from IPython.core.debugger import Tracer 25 | debug_here = Tracer() 26 | 27 | parser = argparse.ArgumentParser() 28 | 29 | # specify data and datapath 30 | parser.add_argument('--dataset', default='shapenetcore_partanno', help='shapenetcore_partanno | ?? ') 31 | # ply data dir 32 | parser.add_argument('--ply_data_dir', default='./datasets/raw_datasets/PartAnnotation', help='path to ply data') 33 | parser.add_argument('--h5_data_dir', default='./datasets/shapenet_part_seg_hdf5_dataset', help='path to h5 data') 34 | # number of workers for loading data 35 | parser.add_argument('--workers', type=int, help='number of data loading workers', default=2) 36 | # loading data 37 | parser.add_argument('--batch_size', type=int, default=1, help='input batch size') 38 | parser.add_argument('--num_points', type=int, default=2048, help='input batch size') 39 | 40 | parser.add_argument('--print_freq', type=int, default=25, help='number of iterations to print ') 41 | parser.add_argument('--pretrained_model', type=str, default = './models_checkpoint/model_best.pth', help='model path') 42 | parser.add_argument('--test_results_dir', type=str, default = None, help='test path') 43 | parser.add_argument('--output_verbose', type=bool, default=True, help='output verbose') 44 | # cuda stuff 45 | parser.add_argument('--gpu_id' , type=str, default='1', help='which gpu to use, used only when ngpu is 1') 46 | parser.add_argument('--cuda' , action='store_true', help='enables cuda') 47 | parser.add_argument('--ngpu' , type=int, default=1, help='number of GPUs to use') 48 | 49 | ##################################################################### 50 | ## global setting 51 | ##################################################################### 52 | opt = parser.parse_args() 53 | print(opt) 54 | if opt.test_results_dir is None: 55 | opt.test_results_dir = 'test_results_folder' 56 | 57 | # make dir 58 | os.system('mkdir {0}'.format(opt.test_results_dir)) 59 | 60 | 61 | os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpu_id 62 | 63 | ngpu = int(opt.ngpu) 64 | # opt.manualSeed = random.randint(1, 10000) # fix seed 65 | opt.manualSeed = 123456 66 | 67 | if torch.cuda.is_available() and not opt.cuda: 68 | print("WARNING: You have a CUDA device, so you should probably run with --cuda") 69 | else: 70 | if ngpu == 1: 71 | print('so we use 1 gpu to training') 72 | print('setting gpu on gpuid {0}'.format(opt.gpu_id)) 73 | 74 | if opt.cuda: 75 | torch.cuda.manual_seed(opt.manualSeed) 76 | 77 | cudnn.benchmark = True 78 | print("Random Seed: ", opt.manualSeed) 79 | random.seed(opt.manualSeed) 80 | torch.manual_seed(opt.manualSeed) 81 | ############################################################## 82 | ## 83 | ############################################################## 84 | def printout(flog, data): 85 | print(data) 86 | flog.write(data + '\n') 87 | 88 | def output_color_point_cloud(data, seg, color_map, out_file): 89 | with open(out_file, 'w') as f: 90 | l = seg.size(0) 91 | for i in range(l): 92 | color = color_map[seg[i]] 93 | f.write('v %f %f %f %f %f %f\n' % (data[0][i][0], data[0][i][1], data[0][i][2], color[0], color[1], color[2])) 94 | 95 | def output_point_cloud_label_mask(data, seg, out_file): 96 | with open(out_file, 'w') as f: 97 | l = seg.size(0) 98 | for i in range(l): 99 | f.write('v %f %f %f %f\n' % (data[0][i][0], data[0][i][1], data[0][i][2], seg[i])) 100 | 101 | 102 | 103 | def output_color_point_cloud_red_blue(data, seg, out_file): 104 | with open(out_file, 'w') as f: 105 | l = len(seg) 106 | for i in range(l): 107 | if seg[i] == 1: 108 | color = [0, 0, 1] 109 | elif seg[i] == 0: 110 | color = [1, 0, 0] 111 | else: 112 | color = [0, 0, 0] 113 | 114 | f.write('v %f %f %f %f %f %f\n' % (data[0][i][0], data[0][i][1], data[0][i][2], color[0], color[1], color[2])) 115 | 116 | def predict(model, test_loader,color_map, opt): 117 | ################################################## 118 | # switch to evaluate mode 119 | ################################################## 120 | model.eval() 121 | ################################################## 122 | ## log file 123 | ################################################## 124 | # debug_here() 125 | flog = open(os.path.join(opt.test_results_dir, 'log.txt'), 'w') 126 | 127 | ################Note############################## 128 | # each sample may have different number of points 129 | # so just use batch of size 1 130 | ################################################## 131 | # debug_here() 132 | total_acc = 0.0 133 | total_seen = 0 134 | total_acc_iou = 0.0 135 | total_per_label_acc = np.zeros(opt.num_labels).astype(np.float32) 136 | total_per_label_iou = np.zeros(opt.num_labels).astype(np.float32) 137 | total_per_label_seen = np.zeros(opt.num_labels).astype(np.int32) 138 | # currently only support batch size equal to 1 139 | for shape_idx, (points_data, _labels, _seg_data) in enumerate(test_loader): 140 | if shape_idx%10 == 0: 141 | print('{0}/{1}'.format(shape_idx, len(test_loader))) 142 | 143 | points_data = Variable(points_data, volatile=True) 144 | points_data = points_data.transpose(2, 1) 145 | _labels = _labels.long() 146 | _seg_data = _seg_data.long() 147 | labels_onehot = utils.labels_batch2one_hot_batch(_labels, opt.num_labels) 148 | labels_onehot = Variable(labels_onehot, volatile=True) # we dnonot calculate the gradients here 149 | 150 | _seg_data = Variable(_seg_data, volatile=True) 151 | ################################################## 152 | ## 153 | ################################################## 154 | 155 | cur_gt_label = _labels[0][0] 156 | cur_label_one_hot = np.zeros((1, opt.num_labels), dtype=np.float32) 157 | cur_label_one_hot[0, cur_gt_label] = 1 158 | # ex: [12, 13, 14, 15] 159 | iou_pids = opt.label_id2pid_set[opt.label_ids[cur_gt_label]] 160 | # [0, 1, .., 11, 16, ..., 49] 161 | non_part_labels = list(set(np.arange(opt.num_seg_classes)).difference(set(iou_pids))) 162 | 163 | if opt.cuda: 164 | points_data = points_data.cuda() 165 | labels_onehot = labels_onehot.float().cuda() 166 | _seg_data = _seg_data.cuda() # must be long cuda tensor 167 | 168 | pred_seg, _, _ = model(points_data, labels_onehot) 169 | pred_seg = pred_seg.view(-1, opt.num_seg_classes) 170 | mini = np.min(pred_seg.data.numpy()) 171 | # debug_here() 172 | pred_seg[:, torch.from_numpy(np.array(non_part_labels))] = mini - 1000 173 | pred_seg_choice = pred_seg.data.max(1)[1] 174 | 175 | ################################################################## 176 | ## groundtruth segment mask 177 | ################################################################## 178 | _seg_data = _seg_data.view(-1, 1)[:, 0] # min is already 0 179 | 180 | seg_acc = np.mean(pred_seg_choice.numpy() == _seg_data.data.long().numpy()) 181 | total_acc = seg_acc + total_acc 182 | 183 | total_seen += 1 184 | 185 | total_per_label_seen[cur_gt_label] += 1 186 | total_per_label_acc[cur_gt_label] += seg_acc 187 | ############################################ 188 | ## 189 | ############################################ 190 | mask = np.int32(pred_seg_choice.numpy() == _seg_data.data.long().numpy()) 191 | total_iou = 0.0 192 | iou_log = '' 193 | 194 | for pid in iou_pids: 195 | n_pred = np.sum(pred_seg_choice.numpy() == pid) 196 | n_gt = np.sum(_seg_data.data.long().numpy() == pid) 197 | n_intersect = np.sum(np.int32(_seg_data.data.long().numpy() == pid) * mask) 198 | n_union = n_pred + n_gt - n_intersect 199 | iou_log += '_' + str(n_pred)+'_'+str(n_gt)+'_'+str(n_intersect)+'_'+str(n_union)+'_' 200 | if n_union == 0: 201 | total_iou += 1 202 | iou_log += '_1\n' 203 | else: 204 | total_iou += n_intersect * 1.0 / n_union 205 | iou_log += '_'+str(n_intersect * 1.0 / n_union)+'\n' 206 | 207 | 208 | 209 | avg_iou = total_iou / len(iou_pids) 210 | total_acc_iou += avg_iou 211 | total_per_label_iou[cur_gt_label] += avg_iou 212 | # debug_here() 213 | ######################################## 214 | ## transpose data 215 | ######################################## 216 | points_data = points_data.transpose(1, 2) 217 | if opt.output_verbose: 218 | output_point_cloud_label_mask(points_data.data, _seg_data.data.long(), os.path.join(opt.test_results_dir, str(shape_idx)+'_labels_mask.obj')) 219 | 220 | output_color_point_cloud(points_data.data, _seg_data.data.long(), color_map, os.path.join(opt.test_results_dir, str(shape_idx)+'_gt.obj')) 221 | output_color_point_cloud(points_data.data, pred_seg_choice, color_map, os.path.join(opt.test_results_dir, str(shape_idx)+'_pred.obj')) 222 | output_color_point_cloud_red_blue(points_data.data, np.int32(_seg_data.data.long().numpy() == pred_seg_choice.numpy()), 223 | os.path.join(opt.test_results_dir, str(shape_idx)+'_diff.obj')) 224 | 225 | with open(os.path.join(opt.test_results_dir, str(shape_idx)+'.log'), 'w') as fout: 226 | # fout.write('Total Point: %d\n\n' % ori_point_num) 227 | fout.write('Ground Truth: %s\n' % opt.label_names[cur_gt_label]) 228 | # fout.write('Predict: %s\n\n' % opt.label_names[label_pred_val]) 229 | fout.write('Accuracy: %f\n' % seg_acc) 230 | fout.write('IoU: %f\n\n' % avg_iou) 231 | fout.write('IoU details: %s\n' % iou_log) 232 | 233 | printout(flog, 'Accuracy: %f' % (total_acc / total_seen)) 234 | printout(flog, 'IoU: %f' % (total_acc_iou / total_seen)) 235 | 236 | for idx in range(opt.num_labels): 237 | printout(flog, '\t ' + opt.label_ids[idx] + ' Total Number: ' + str(total_per_label_seen[idx])) 238 | if total_per_label_acc[idx] > 0: 239 | printout(flog, '\t ' + opt.label_ids[idx] + ' Accuracy: ' + \ 240 | str(total_per_label_acc[idx] / total_per_label_acc[idx])) 241 | printout(flog, '\t ' + opt.label_ids[idx] + ' IoU: '+ \ 242 | str(total_per_label_iou[idx] / total_per_label_acc[idx])) 243 | 244 | 245 | 246 | 247 | 248 | print('finished prediction') 249 | 250 | def main(): 251 | global opt 252 | 253 | # part id(1, 2, ..) 254 | # for each label_id, there is a set of part id(encoding using 1, 2, 3, 4) 255 | label_ids2pid = json.load(open(os.path.join(opt.h5_data_dir, 'overallid_to_catid_partid.json'), 'r')) 256 | 257 | label_id2pid_set = {} 258 | for idx in range(len(label_ids2pid)): # 50 259 | label_id, pid = label_ids2pid[idx] # objid = '02691156' 260 | if not label_id in label_id2pid_set.keys(): 261 | label_id2pid_set[label_id] = [] 262 | label_id2pid_set[label_id].append(idx) # 0, 1, 2, ... 263 | opt.label_id2pid_set = label_id2pid_set 264 | 265 | 266 | all_label_names2label_ids = os.path.join(opt.h5_data_dir, 'all_object_categories.txt') 267 | fin = open(all_label_names2label_ids, 'r') 268 | lines = [line.rstrip() for line in fin.readlines()] 269 | # debug_here() 270 | label_ids = [line.split()[1] for line in lines] 271 | opt.label_ids = label_ids 272 | label_names = [line.split()[0] for line in lines] 273 | opt.label_names = label_names 274 | # 275 | label_ids2ids = {label_ids[i]:i for i in range(len(label_ids))} 276 | fin.close() 277 | 278 | color_map_file_path = os.path.join(opt.h5_data_dir, 'part_color_mapping.json') 279 | # 50 color map 280 | # color_map[0] = [0.65, 0.95, 0.05] 281 | color_map = json.load(open(color_map_file_path, 'r')) 282 | NUM_LABELS = len(label_ids) # 16 283 | NUM_PARTS = len(label_ids2pid) # 50 284 | opt.num_seg_classes = NUM_PARTS 285 | opt.num_labels = NUM_LABELS 286 | 287 | # 02691156_1: 0 288 | # 02691156_2: 1 289 | label_id_pid2pid_in_set = json.load(open(os.path.join(opt.h5_data_dir, 'catid_partid_to_overallid.json'), 'r')) 290 | 291 | #################################################################### 292 | # dataset 293 | #################################################################### 294 | test_dataset = shapenetcore_partanno_dset.Shapenetcore_Part_Dataset(opt.h5_data_dir, mode='test') 295 | test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=opt.batch_size, 296 | shuffle=True, num_workers=int(opt.workers)) 297 | 298 | ######################################################################## 299 | ## 300 | ######################################################################## 301 | assert opt.pretrained_model != '', 'must specify the pre-trained model' 302 | print("Loading Pretrained Model from {0}".format(opt.pretrained_model)) 303 | model = pointnet.PointNetPartDenseCls(num_points=opt.num_points, k=opt.num_seg_classes) 304 | model.load_state_dict(torch.load(opt.pretrained_model)) 305 | 306 | if opt.cuda: 307 | print('shift model and criterion to GPU .. ') 308 | model = model.cuda() 309 | 310 | predict(model, test_loader, color_map, opt) 311 | 312 | if __name__ == '__main__': 313 | main() 314 | -------------------------------------------------------------------------------- /main_cls.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import argparse 3 | import random 4 | import time 5 | import os 6 | import numpy as np 7 | 8 | import torch 9 | import torch.nn as nn 10 | import torch.nn.parallel # for multi-GPU training 11 | import torch.backends.cudnn as cudnn 12 | import torch.optim as optim 13 | import torch.utils.data 14 | 15 | from torch.autograd import Variable 16 | 17 | import models.pointnet as pointnet 18 | import misc.modelnet40_pcl_datasets as modelnet40_dset 19 | import misc.utils as utils 20 | 21 | # import my_modules.utils as mutils 22 | 23 | from IPython.core.debugger import Tracer 24 | debug_here = Tracer() 25 | 26 | 27 | parser = argparse.ArgumentParser() 28 | 29 | # specify data and datapath 30 | parser.add_argument('--dataset', default='modelnet40_pcl', help='modelnet40_pcl | ?? ') 31 | parser.add_argument('--data_dir', default='./datasets/modelnet40_ply_hdf5_2048', help='path to dataset') 32 | # number of workers for loading data 33 | parser.add_argument('--workers', type=int, help='number of data loading workers', default=2) 34 | # loading data 35 | parser.add_argument('--batchSize', type=int, default=32, help='input batch size') 36 | # parser.add_argument('--imageSize', type=int, default=64, help='the height / width of the input image to network') 37 | # parser.add_argument('--nc', type=int, default=3, help='input image channels') 38 | # spicify noise dimension to the Generator 39 | 40 | # on network 41 | parser.add_argument('--num_classes', type=int, default=40, help='number of classes') 42 | parser.add_argument('--num_points', type=int, default=2048, help='number of points per example') 43 | # spcify optimization stuff 44 | parser.add_argument('--adam', action='store_true', help='Whether to use adam (default is rmsprop)') 45 | parser.add_argument('--lr', '--learning-rate', default=0.001, type=float, 46 | help='initial learning rate') 47 | parser.add_argument('--momentum', default=0.9, type=float, help='momentum') 48 | parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float, 49 | help='weight decay (default: 1e-4)') 50 | 51 | parser.add_argument('--max_epochs', type=int, default=140, help='number of epochs to train for') 52 | # parser.add_argument('--workers', type=int, help='number of data loading workers', default=4) 53 | parser.add_argument('--nepoch', type=int, default=25, help='number of epochs to train for') 54 | parser.add_argument('--print_freq', type=int, default=25, help='number of iterations to print ') 55 | parser.add_argument('--checkpoint_folder', default=None, help='check point path') 56 | parser.add_argument('--model', type=str, default = '', help='model path') 57 | 58 | # cuda stuff 59 | parser.add_argument('--gpu_id' , type=str, default='1', help='which gpu to use, used only when ngpu is 1') 60 | parser.add_argument('--cuda' , action='store_true', help='enables cuda') 61 | parser.add_argument('--ngpu' , type=int, default=1, help='number of GPUs to use') 62 | # clamp parameters into a cube 63 | parser.add_argument('--gradient_clip', type=float, default=0.01) 64 | 65 | # resume training from a checkpoint 66 | parser.add_argument('--init_model', default='', help="model to resume training") 67 | parser.add_argument('--optim_state_from', default='', help="optim state to resume training") 68 | 69 | opt = parser.parse_args() 70 | print(opt) 71 | 72 | if opt.checkpoint_folder is None: 73 | opt.checkpoint_folder = 'models_checkpoint' 74 | 75 | # make dir 76 | os.system('mkdir {0}'.format(opt.checkpoint_folder)) 77 | 78 | # dataset 79 | if opt.dataset == 'modelnet40_pcl': 80 | train_dataset = modelnet40_dset.Modelnet40_PCL_Dataset(opt.data_dir, npoints=2048, train=True) 81 | test_dataset = modelnet40_dset.Modelnet40_PCL_Dataset(opt.data_dir, npoints=2048, train=False) 82 | else: 83 | print('not supported dataset, so exit') 84 | exit() 85 | 86 | print('number of train samples is: ', len(train_dataset)) 87 | print('number of test samples is: ', len(test_dataset)) 88 | print('finished loading data') 89 | 90 | os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpu_id 91 | 92 | ngpu = int(opt.ngpu) 93 | # opt.manualSeed = random.randint(1, 10000) # fix seed 94 | opt.manualSeed = 123456 95 | 96 | if torch.cuda.is_available() and not opt.cuda: 97 | print("WARNING: You have a CUDA device, so you should probably run with --cuda") 98 | else: 99 | if ngpu == 1: 100 | print('so we use 1 gpu to training') 101 | print('setting gpu on gpuid {0}'.format(opt.gpu_id)) 102 | 103 | if opt.cuda: 104 | torch.cuda.manual_seed(opt.manualSeed) 105 | 106 | cudnn.benchmark = True 107 | print("Random Seed: ", opt.manualSeed) 108 | random.seed(opt.manualSeed) 109 | torch.manual_seed(opt.manualSeed) 110 | 111 | 112 | def train(train_loader, model, criterion, optimizer, epoch, opt): 113 | """ 114 | train for one epoch on the training set 115 | """ 116 | batch_time = utils.AverageMeter() 117 | losses = utils.AverageMeter() 118 | top1 = utils.AverageMeter() 119 | 120 | # training mode 121 | model.train() 122 | 123 | end = time.time() 124 | for i, (input_points, labels) in enumerate(train_loader): 125 | # bz x 2048 x 3 126 | input_points = Variable(input_points) 127 | input_points = input_points.transpose(2, 1) 128 | labels = Variable(labels[:, 0]) 129 | 130 | # print(points.size()) 131 | # print(labels.size()) 132 | # shift data to GPU 133 | if opt.cuda: 134 | input_points = input_points.cuda() 135 | labels = labels.long().cuda() # must be long cuda tensor 136 | 137 | # forward, backward optimize 138 | output, _ = model(input_points) 139 | # debug_here() 140 | loss = criterion(output, labels) 141 | ############################## 142 | # measure accuracy 143 | ############################## 144 | prec1 = utils.accuracy(output.data, labels.data, topk=(1,))[0] 145 | losses.update(loss.data[0], input_points.size(0)) 146 | top1.update(prec1[0], input_points.size(0)) 147 | 148 | ############################## 149 | # compute gradient and do sgd 150 | ############################## 151 | optimizer.zero_grad() 152 | loss.backward() 153 | ############################## 154 | # gradient clip stuff 155 | ############################## 156 | utils.clip_gradient(optimizer, opt.gradient_clip) 157 | 158 | optimizer.step() 159 | 160 | # measure elapsed time 161 | batch_time.update(time.time() - end) 162 | end = time.time() 163 | if i % opt.print_freq == 0: 164 | print('Epoch: [{0}][{1}/{2}]\t' 165 | 'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' 166 | 'Loss {loss.val:.4f} ({loss.avg:.4f})\t' 167 | 'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format( 168 | epoch, i, len(train_loader), batch_time=batch_time, 169 | loss=losses, top1=top1)) 170 | 171 | 172 | def validate(test_loader, model, criterion, epoch, opt): 173 | """Perform validation on the validation set""" 174 | batch_time = utils.AverageMeter() 175 | losses = utils.AverageMeter() 176 | top1 = utils.AverageMeter() 177 | 178 | # switch to evaluate mode 179 | model.eval() 180 | 181 | end = time.time() 182 | # tested_samples = 0 183 | for i, (input_points, labels) in enumerate(test_loader): 184 | # tested_samples = tested_samples + input_points.size(0) 185 | 186 | if opt.cuda: 187 | input_points = input_points.cuda() 188 | labels = labels.long().cuda(async=True) 189 | input_points = input_points.transpose(2, 1) 190 | input_var = Variable(input_points, volatile=True) 191 | target_var = Variable(labels[:, 0], volatile=True) 192 | 193 | # compute output 194 | output, _ = model(input_var) 195 | loss = criterion(output, target_var) 196 | 197 | # measure accuracy and record loss 198 | prec1 = utils.accuracy(output.data, target_var.data, topk=(1,))[0] 199 | losses.update(loss.data[0], input_points.size(0)) 200 | top1.update(prec1[0], input_points.size(0)) 201 | 202 | # measure elapsed time 203 | batch_time.update(time.time() - end) 204 | end = time.time() 205 | 206 | if i % opt.print_freq == 0: 207 | print('Test: [{0}/{1}]\t' 208 | 'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' 209 | 'Loss {loss.val:.4f} ({loss.avg:.4f})\t' 210 | 'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format( 211 | i, len(test_loader), batch_time=batch_time, loss=losses, 212 | top1=top1)) 213 | 214 | print(' * Prec@1 {top1.avg:.3f}'.format(top1=top1)) 215 | # print(tested_samples) 216 | return top1.avg 217 | 218 | def main(): 219 | global opt 220 | best_prec1 = 0 221 | # only used when we resume training from some checkpoint model 222 | resume_epoch = 0 223 | # train data loader 224 | # for loader, droplast by default is set to false 225 | train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=opt.batchSize, 226 | shuffle=True, num_workers=int(opt.workers)) 227 | test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=opt.batchSize, 228 | shuffle=True, num_workers=int(opt.workers)) 229 | 230 | 231 | # create model 232 | # for modelnet40, opt.num_points is set to be 2048, opt.num_classes is 40 233 | model = pointnet.PointNetCls(num_points = opt.num_points, k = opt.num_classes) 234 | if opt.init_model != '': 235 | print('loading pretrained model from {0}'.format(opt.init_model)) 236 | model.load_state_dict(torch.load(opt.init_model)) 237 | 238 | criterion = nn.CrossEntropyLoss() 239 | 240 | if opt.cuda: 241 | print('shift model and criterion to GPU .. ') 242 | model = model.cuda() 243 | # define loss function (criterion) and pptimizer 244 | criterion = criterion.cuda() 245 | # optimizer 246 | 247 | optimizer = optim.SGD(model.parameters(), opt.lr, 248 | momentum=opt.momentum, 249 | weight_decay=opt.weight_decay) 250 | 251 | if opt.optim_state_from != '': 252 | print('loading optim_state_from {0}'.format(opt.optim_state_from)) 253 | optim_state = torch.load(opt.optim_state_from) 254 | resume_epoch = optim_state['epoch'] 255 | best_prec1 = optim_state['best_prec1'] 256 | # configure optimzer 257 | optimizer.load_state_dict(optim_state['optim_state_best']) 258 | 259 | for epoch in range(resume_epoch, opt.max_epochs): 260 | ################################# 261 | # train for one epoch 262 | # debug_here() 263 | ################################# 264 | train(train_loader, model, criterion, optimizer, epoch, opt) 265 | 266 | 267 | ################################# 268 | # validate 269 | ################################# 270 | prec1 = validate(test_loader, model, criterion, epoch, opt) 271 | 272 | ################################## 273 | # save checkpoints 274 | ################################## 275 | if best_prec1 < prec1: 276 | best_prec1 = prec1 277 | path_checkpoint = '{0}/model_best.pth'.format(opt.checkpoint_folder) 278 | utils.save_checkpoint(model.state_dict(), path_checkpoint) 279 | 280 | # save optim state 281 | path_optim_state = '{0}/optim_state_best.pth'.format(opt.checkpoint_folder) 282 | optim_state = {} 283 | optim_state['epoch'] = epoch + 1 # because epoch starts from 0 284 | optim_state['best_prec1'] = best_prec1 285 | optim_state['optim_state_best'] = optimizer.state_dict() 286 | utils.save_checkpoint(optim_state, path_optim_state) 287 | # problem, should we store latest optim state or model, currently, we donot 288 | 289 | print('best accuracy: ', best_prec1) 290 | 291 | 292 | if __name__ == '__main__': 293 | main() 294 | -------------------------------------------------------------------------------- /main_part_seg.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import argparse 3 | import random 4 | import time 5 | import os 6 | import numpy as np 7 | 8 | import torch 9 | import torch.nn as nn 10 | import torch.nn.parallel # for multi-GPU training 11 | import torch.backends.cudnn as cudnn 12 | import torch.optim as optim 13 | import torch.utils.data 14 | 15 | from torch.autograd import Variable 16 | 17 | import models.pointnet as pointnet 18 | import misc.shapenetcore_partanno_datasets as shapenetcore_partanno_dset 19 | import misc.utils as utils 20 | 21 | # import my_modules.utils as mutils 22 | 23 | from IPython.core.debugger import Tracer 24 | debug_here = Tracer() 25 | 26 | 27 | parser = argparse.ArgumentParser() 28 | 29 | # specify data and datapath 30 | parser.add_argument('--dataset', default='shapenetcore_partanno', help='shapenetcore_partanno | ?? ') 31 | parser.add_argument('--data_dir', default='./datasets/shapenet_part_seg_hdf5_dataset', help='path to dataset') 32 | # number of workers for loading data 33 | parser.add_argument('--workers', type=int, help='number of data loading workers', default=2) 34 | # loading data 35 | parser.add_argument('--batch_size', type=int, default=32, help='input batch size') 36 | 37 | # on network 38 | # spcify optimization stuff 39 | parser.add_argument('--adam', action='store_true', help='Whether to use adam (default is rmsprop)') 40 | parser.add_argument('--lr', '--learning-rate', default=0.001, type=float, 41 | help='initial learning rate') 42 | parser.add_argument('--momentum', default=0.9, type=float, help='momentum') 43 | parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float, 44 | help='weight decay (default: 1e-4)') 45 | 46 | parser.add_argument('--max_epochs', type=int, default=140, help='number of epochs to train for') 47 | # parser.add_argument('--workers', type=int, help='number of data loading workers', default=4) 48 | parser.add_argument('--nepoch', type=int, default=25, help='number of epochs to train for') 49 | parser.add_argument('--print_freq', type=int, default=25, help='number of iterations to print ') 50 | parser.add_argument('--checkpoint_folder', default=None, help='check point path') 51 | parser.add_argument('--model', type=str, default = '', help='model path') 52 | 53 | # cuda stuff 54 | parser.add_argument('--gpu_id' , type=str, default='1', help='which gpu to use, used only when ngpu is 1') 55 | parser.add_argument('--cuda' , action='store_true', help='enables cuda') 56 | parser.add_argument('--ngpu' , type=int, default=1, help='number of GPUs to use') 57 | # clamp parameters into a cube 58 | parser.add_argument('--gradient_clip', type=float, default=0.01) 59 | 60 | # resume training from a checkpoint 61 | parser.add_argument('--init_model', default='', help="model to resume training") 62 | parser.add_argument('--optim_state_from', default='', help="optim state to resume training") 63 | 64 | opt = parser.parse_args() 65 | print(opt) 66 | 67 | os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpu_id 68 | 69 | ngpu = int(opt.ngpu) 70 | # opt.manualSeed = random.randint(1, 10000) # fix seed 71 | opt.manualSeed = 123456 72 | 73 | if torch.cuda.is_available() and not opt.cuda: 74 | print("WARNING: You have a CUDA device, so you should probably run with --cuda") 75 | else: 76 | if ngpu == 1: 77 | print('so we use 1 gpu to training') 78 | print('setting gpu on gpuid {0}'.format(opt.gpu_id)) 79 | 80 | if opt.cuda: 81 | torch.cuda.manual_seed(opt.manualSeed) 82 | 83 | cudnn.benchmark = True 84 | print("Random Seed: ", opt.manualSeed) 85 | random.seed(opt.manualSeed) 86 | torch.manual_seed(opt.manualSeed) 87 | 88 | if opt.checkpoint_folder is None: 89 | opt.checkpoint_folder = 'models_checkpoint' 90 | 91 | # make dir 92 | os.system('mkdir {0}'.format(opt.checkpoint_folder)) 93 | 94 | # dataset 95 | if opt.dataset == 'shapenetcore_partanno': 96 | train_dataset = shapenetcore_partanno_dset.Shapenetcore_Part_Dataset(opt.data_dir, mode='train') 97 | val_dataset = shapenetcore_partanno_dset.Shapenetcore_Part_Dataset(opt.data_dir, mode='val') 98 | # we can add test_dataset here 99 | else: 100 | print('not supported dataset, so exit') 101 | exit() 102 | 103 | print('number of train samples is: ', len(train_dataset)) 104 | print('number of test samples is: ', len(val_dataset)) 105 | print('finished loading data') 106 | 107 | def train(train_loader, model, criterion, optimizer, epoch, opt): 108 | """ 109 | train for one epoch on the training set 110 | """ 111 | # training mode 112 | model.train() 113 | 114 | for i, (input_points, _labels, segs) in enumerate(train_loader): 115 | # bz x 2048 x 3 116 | input_points = Variable(input_points) 117 | input_points = input_points.transpose(2, 1) 118 | ############### 119 | ## 120 | ############### 121 | _labels = _labels.long() 122 | segs = segs.long() 123 | labels_onehot = utils.labels_batch2one_hot_batch(_labels, opt.num_classes) 124 | labels_onehot = Variable(labels_onehot) # we dnonot calculate the gradients here 125 | # labels_onehot.requires_grad = True 126 | segs = Variable(segs) 127 | 128 | if opt.cuda: 129 | input_points = input_points.cuda() 130 | segs = segs.cuda() # must be long cuda tensor 131 | labels_onehot = labels_onehot.float().cuda() # this will be feed into the network 132 | 133 | optimizer.zero_grad() 134 | # forward, backward optimize 135 | # pred, _ = model(input_points, labels_onehot) 136 | pred, _, _ = model(input_points, labels_onehot) 137 | pred = pred.view(-1, opt.num_seg_classes) 138 | segs = segs.view(-1, 1)[:, 0] 139 | # debug_here() 140 | loss = criterion(pred, segs) 141 | loss.backward() 142 | ############################## 143 | # gradient clip stuff 144 | ############################## 145 | utils.clip_gradient(optimizer, opt.gradient_clip) 146 | optimizer.step() 147 | pred_choice = pred.data.max(1)[1] 148 | correct = pred_choice.eq(segs.data).cpu().sum() 149 | 150 | if i % opt.print_freq == 0: 151 | print('[%d: %d] train loss: %f accuracy: %f' %(i, len(train_loader), loss.data[0], correct/float(opt.batch_size * opt.num_points))) 152 | 153 | 154 | def validate(val_loader, model, criterion, epoch, opt): 155 | """Perform validation on the validation set""" 156 | # switch to evaluate mode 157 | model.eval() 158 | 159 | top1 = utils.AverageMeter() 160 | 161 | for i, (input_points, _labels, segs) in enumerate(val_loader): 162 | # bz x 2048 x 3 163 | input_points = Variable(input_points, volatile=True) 164 | input_points = input_points.transpose(2, 1) 165 | _labels = _labels.long() # this will be feed to the network 166 | segs = segs.long() 167 | labels_onehot = utils.labels_batch2one_hot_batch(_labels, opt.num_classes) 168 | segs = Variable(segs, volatile=True) 169 | labels_onehot = Variable(labels_onehot, volatile=True) 170 | 171 | if opt.cuda: 172 | input_points = input_points.cuda() 173 | segs = segs.cuda() # must be long cuda tensor 174 | labels_onehot = labels_onehot.float().cuda() # this will be feed into the network 175 | 176 | # forward, backward optimize 177 | pred, _, _ = model(input_points, labels_onehot) 178 | pred = pred.view(-1, opt.num_seg_classes) 179 | segs = segs.view(-1, 1)[:, 0] # min is already 0 180 | # debug_here() 181 | loss = criterion(pred, segs) 182 | 183 | pred_choice = pred.data.max(1)[1] 184 | correct = pred_choice.eq(segs.data).cpu().sum() 185 | 186 | acc = correct/float(opt.batch_size * opt.num_points) 187 | top1.update(acc, input_points.size(0)) 188 | 189 | if i % opt.print_freq == 0: 190 | print('[%d: %d] val loss: %f accuracy: %f' %(i, len(val_loader), loss.data[0], acc)) 191 | # print(tested_samples) 192 | return top1.avg 193 | 194 | def main(): 195 | global opt 196 | best_prec1 = 0 197 | # only used when we resume training from some checkpoint model 198 | resume_epoch = 0 199 | # train data loader 200 | # for loader, droplast by default is set to false 201 | train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=opt.batch_size, 202 | shuffle=True, num_workers=int(opt.workers)) 203 | val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=opt.batch_size, 204 | shuffle=True, num_workers=int(opt.workers)) 205 | 206 | 207 | # create model 208 | # for modelnet40, opt.num_points is set to be 2048, opt.num_classes is 40 209 | opt.num_seg_classes = train_dataset.num_seg_classes 210 | opt.num_points = train_dataset.num_points 211 | opt.num_classes = train_dataset.num_classes 212 | 213 | model = pointnet.PointNetPartDenseCls(num_points=opt.num_points, k=opt.num_seg_classes) 214 | 215 | if opt.init_model != '': 216 | print('loading pretrained model from {0}'.format(opt.init_model)) 217 | 218 | model.load_state_dict(torch.load(opt.init_model)) 219 | # segmentation loss 220 | criterion = nn.NLLLoss() 221 | 222 | if opt.cuda: 223 | print('shift model and criterion to GPU .. ') 224 | model = model.cuda() 225 | # define loss function (criterion) and pptimizer 226 | criterion = criterion.cuda() 227 | # optimizer 228 | 229 | optimizer = optim.SGD(model.parameters(), opt.lr, 230 | momentum=opt.momentum, 231 | weight_decay=opt.weight_decay) 232 | 233 | if opt.optim_state_from != '': 234 | print('loading optim_state_from {0}'.format(opt.optim_state_from)) 235 | optim_state = torch.load(opt.optim_state_from) 236 | resume_epoch = optim_state['epoch'] 237 | best_prec1 = optim_state['best_prec1'] 238 | # configure optimzer 239 | optimizer.load_state_dict(optim_state['optim_state_best']) 240 | 241 | for epoch in range(resume_epoch, opt.max_epochs): 242 | ################################# 243 | # train for one epoch 244 | # debug_here() 245 | ################################# 246 | train(train_loader, model, criterion, optimizer, epoch, opt) 247 | 248 | ################################# 249 | # validate 250 | ################################# 251 | prec1 = validate(val_loader, model, criterion, epoch, opt) 252 | ################################## 253 | # save checkpoints 254 | ################################## 255 | if best_prec1 < prec1: 256 | best_prec1 = prec1 257 | path_checkpoint = '{0}/model_best.pth'.format(opt.checkpoint_folder) 258 | utils.save_checkpoint(model.state_dict(), path_checkpoint) 259 | 260 | # save optim state 261 | path_optim_state = '{0}/optim_state_best.pth'.format(opt.checkpoint_folder) 262 | optim_state = {} 263 | optim_state['epoch'] = epoch + 1 # because epoch starts from 0 264 | optim_state['best_prec1'] = best_prec1 265 | optim_state['optim_state_best'] = optimizer.state_dict() 266 | utils.save_checkpoint(optim_state, path_optim_state) 267 | # problem, should we store latest optim state or model, currently, we donot 268 | 269 | print('best accuracy: ', best_prec1) 270 | 271 | 272 | if __name__ == '__main__': 273 | main() 274 | -------------------------------------------------------------------------------- /misc/__pycache__/modelnet40_pcl_datasets.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/misc/__pycache__/modelnet40_pcl_datasets.cpython-36.pyc -------------------------------------------------------------------------------- /misc/__pycache__/shapenet_test_from_list.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/misc/__pycache__/shapenet_test_from_list.cpython-36.pyc -------------------------------------------------------------------------------- /misc/__pycache__/shapenetcore_partanno_datasets.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/misc/__pycache__/shapenetcore_partanno_datasets.cpython-36.pyc -------------------------------------------------------------------------------- /misc/__pycache__/transforms.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/misc/__pycache__/transforms.cpython-36.pyc -------------------------------------------------------------------------------- /misc/__pycache__/utils.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/misc/__pycache__/utils.cpython-36.pyc -------------------------------------------------------------------------------- /misc/modelnet40_pcl_datasets.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import torch.utils.data as data 3 | import os 4 | import os.path 5 | import errno 6 | import torch 7 | import json 8 | import h5py 9 | 10 | from IPython.core.debugger import Tracer 11 | debug_here = Tracer() 12 | 13 | import numpy as np 14 | import sys 15 | 16 | import json 17 | 18 | 19 | class Modelnet40_PCL_Dataset(data.Dataset): 20 | def __init__(self, data_dir, npoints = 2048, train = True): 21 | self.npoints = npoints 22 | self.data_dir = data_dir 23 | self.train = train 24 | # train files 25 | self.train_files_path = os.path.join(self.data_dir, 'train_files.txt') 26 | self.test_files_path = os.path.join(self.data_dir, 'test_files.txt') 27 | 28 | self.train_files_list = [line.rstrip() for line in open(self.train_files_path)] 29 | self.test_files_list = [line.rstrip() for line in open(self.test_files_path)] 30 | # loading train files 31 | if self.train: 32 | print('loading training data ') 33 | self.train_data = [] 34 | self.train_labels = [] 35 | for file_name in self.train_files_list: 36 | file_path = os.path.join(self.data_dir, file_name) 37 | file_data = h5py.File(file_path) 38 | data = file_data['data'][:] 39 | labels = file_data['label'][:] 40 | self.train_data.append(data) 41 | self.train_labels.append(labels) 42 | self.train_data = np.concatenate(self.train_data) 43 | self.train_labels = np.concatenate(self.train_labels) 44 | else: 45 | print('loading test data ') 46 | self.test_data = [] 47 | self.test_labels = [] 48 | 49 | for file_name in self.test_files_list: 50 | file_path = os.path.join(self.data_dir, file_name) 51 | file_data = h5py.File(file_path) 52 | data = file_data['data'][:] 53 | labels = file_data['label'][:] 54 | self.test_data.append(data) 55 | self.test_labels.append(labels) 56 | self.test_data = np.concatenate(self.test_data) 57 | self.test_labels = np.concatenate(self.test_labels) 58 | 59 | 60 | 61 | def __getitem__(self, index): 62 | if self.train: 63 | points, label = self.train_data[index], self.train_labels[index] 64 | else: 65 | points, label = self.test_data[index], self.test_labels[index] 66 | 67 | return points, label 68 | 69 | 70 | def __len__(self): 71 | if self.train: 72 | return self.train_data.shape[0] 73 | else: 74 | return self.test_data.shape[0] 75 | 76 | if __name__ == '__main__': 77 | print('test') 78 | d = Modelnet40_PCL_Dataset(data_dir='../datasets/modelnet40_ply_hdf5_2048', train=True) 79 | print(len(d)) 80 | print(d[0]) 81 | 82 | points, label = d[0] 83 | # debug_here() 84 | print(points) 85 | print(points.shape) 86 | print(points.dtype) 87 | print(label.shape) 88 | print(label.dtype) 89 | 90 | d = Modelnet40_PCL_Dataset(data_dir = '../datasets/modelnet40_ply_hdf5_2048', train=False) 91 | print(len(d)) 92 | points, label = d[0] 93 | print(points) 94 | print(points.shape) 95 | print(points.dtype) 96 | print(label.shape) 97 | print(label.dtype) 98 | -------------------------------------------------------------------------------- /misc/shapenet_test_from_list.py: -------------------------------------------------------------------------------- 1 | import torch.utils.data as data 2 | import os 3 | import numpy as np 4 | 5 | # this function can be optimized 6 | def default_flist_reader(ply_data_dir, flist): 7 | """ 8 | flist format: pts_file seg_file label for each line 9 | """ 10 | ffiles = open(flist, 'r') 11 | lines = [line.rstrip() for line in ffiles.readlines()] 12 | 13 | pts_files = [os.path.join(ply_data_dir, line.split()[0]) for line in lines] 14 | seg_files = [os.path.join(ply_data_dir, line.split()[1]) for line in lines] 15 | labels = [line.split()[2] for line in lines] 16 | ffiles.close() 17 | 18 | all_data = [] 19 | for pts_file_path, seg_file_path, label_id in zip(pts_files, seg_files, labels): 20 | all_data.append((pts_file_path, seg_file_path, label_id)) 21 | 22 | return all_data # (pts_file_path, seg_file_path, label_id) 23 | 24 | def default_loader(pts_file_path, seg_file_path): 25 | with open(pts_file_path, 'r') as f: 26 | pts_str = [item.rstrip() for item in f.readlines()] 27 | pts = np.array([np.float32(s.split()) for s in pts_str], dtype=np.float32) 28 | 29 | with open(seg_file_path, 'r') as f: 30 | part_ids = np.array([int(item.rstrip()) for item in f.readlines()], dtype=np.uint8) 31 | 32 | return pts, part_ids 33 | 34 | class PlyFileList(data.Dataset): 35 | def __init__(self, ply_data_dir, 36 | test_ply_file_list_path, 37 | label_id_pid2pid_in_set, 38 | label_ids2ids, label_ids, 39 | flist_reader=default_flist_reader, 40 | transform=None, target_transform=None, 41 | loader=default_loader): 42 | 43 | self.ply_data_full_paths = flist_reader(ply_data_dir, test_ply_file_list_path) 44 | self.label_id_pid2pid_in_set = label_id_pid2pid_in_set 45 | self.label_ids2ids = label_ids2ids 46 | self.label_ids = label_ids 47 | self.transform = transform 48 | self.target_transform = target_transform 49 | self.loader = loader 50 | 51 | def __getitem__(self, index): 52 | pts_file_path, seg_file_path, label_id = self.ply_data_full_paths[index] 53 | 54 | cur_gt_label = self.label_ids2ids[label_id] 55 | 56 | pts_data, part_ids= self.loader(pts_file_path, seg_file_path) 57 | # convert to seg_data 58 | seg_data = np.array([self.label_id_pid2pid_in_set[self.label_ids[cur_gt_label]+'_'+str(x)] for x in part_ids]) 59 | 60 | if self.transform is not None: 61 | pts_data = self.transform(pts_data) 62 | if self.target_transform is not None: 63 | seg_data = self.target_transform(seg_data) 64 | 65 | return pts_data, seg_data, label_id 66 | 67 | def __len__(self): 68 | return len(self.ply_data_full_paths) -------------------------------------------------------------------------------- /misc/shapenetcore_partanno_datasets.py: -------------------------------------------------------------------------------- 1 | 2 | from __future__ import print_function 3 | import torch.utils.data as data 4 | import os 5 | import os.path 6 | import errno 7 | import torch 8 | import json 9 | import h5py 10 | 11 | from IPython.core.debugger import Tracer 12 | debug_here = Tracer() 13 | 14 | import numpy as np 15 | import sys 16 | 17 | import json 18 | 19 | 20 | class Shapenetcore_Part_Dataset(data.Dataset): 21 | def __init__(self, data_dir, num_points=2048, mode='train'): 22 | self.num_points = num_points 23 | self.data_dir = data_dir 24 | self.mode = mode 25 | self.color_map = json.load(open(os.path.join(self.data_dir, 'part_color_mapping.json'), 'r')) 26 | 27 | self.categories2folders = {} 28 | with open(os.path.join(self.data_dir, 'all_object_categories.txt'), 'r') as f: 29 | for line in f: 30 | ls = line.strip().split() 31 | self.categories2folders[ls[0]] = ls[1] 32 | 33 | print(self.categories2folders) 34 | # debug_here() 35 | # category 36 | self.all_cats = json.load(open(os.path.join(self.data_dir, 'overallid_to_catid_partid.json'), 'r')) 37 | self.num_classes = len(self.categories2folders) 38 | 39 | self.num_seg_classes = len(self.all_cats) 40 | 41 | self.train_files_path = os.path.join(self.data_dir, 'train_hdf5_file_list.txt') 42 | self.val_files_path = os.path.join(self.data_dir, 'val_hdf5_file_list.txt') 43 | self.test_files_path = os.path.join(self.data_dir, 'test_hdf5_file_list.txt') 44 | 45 | self.train_files_list = [line.rstrip() for line in open(self.train_files_path)] 46 | self.val_files_list = [line.rstrip() for line in open(self.val_files_path)] 47 | self.test_files_list = [line.rstrip() for line in open(self.test_files_path)] 48 | 49 | # loading train data 50 | if self.mode == 'train': 51 | print('loading train data ') 52 | self.train_data = [] 53 | self.train_labels = [] 54 | self.train_segs = [] 55 | 56 | for file_name in self.train_files_list: 57 | file_path = os.path.join(self.data_dir, file_name) 58 | file_data = h5py.File(file_path) 59 | data = file_data['data'][:] 60 | labels = file_data['label'][:] 61 | segs = file_data['pid'][:] 62 | 63 | self.train_data.append(data) 64 | self.train_labels.append(labels) 65 | self.train_segs.append(segs) 66 | 67 | self.train_data = np.concatenate(self.train_data) 68 | self.train_labels = np.concatenate(self.train_labels) 69 | self.train_segs = np.concatenate(self.train_segs) 70 | 71 | self.num_points = self.train_data.shape[1] # will be 2048 72 | # debug_here() 73 | # print('hello') 74 | elif self.mode == 'val': # validation 75 | print('loading val data') 76 | self.val_data = [] 77 | self.val_labels = [] 78 | self.val_segs = [] 79 | for file_name in self.val_files_list: 80 | file_path = os.path.join(self.data_dir, file_name) 81 | file_data = h5py.File(file_path) 82 | data = file_data['data'][:] 83 | labels = file_data['label'][:] 84 | segs = file_data['pid'][:] 85 | self.val_data.append(data) 86 | self.val_labels.append(labels) 87 | self.val_segs.append(segs) 88 | 89 | self.val_data = np.concatenate(self.val_data) 90 | self.val_labels = np.concatenate(self.val_labels) 91 | self.val_segs = np.concatenate(self.val_segs) 92 | self.num_points = self.val_data.shape[1] 93 | else: # test 94 | # debug_here() 95 | print('loading test data ') 96 | self.test_data = [] 97 | self.test_labels = [] 98 | self.test_segs = [] 99 | for file_name in self.test_files_list: 100 | file_path = os.path.join(self.data_dir, file_name) 101 | file_data = h5py.File(file_path) 102 | data = file_data['data'][:] 103 | labels = file_data['label'][:] 104 | segs = file_data['pid'][:] 105 | self.test_data.append(data) 106 | self.test_labels.append(labels) 107 | self.test_segs.append(segs) 108 | 109 | self.test_data = np.concatenate(self.test_data) 110 | self.test_labels = np.concatenate(self.test_labels) 111 | self.test_segs = np.concatenate(self.test_segs) 112 | self.num_points = self.test_data.shape[1] 113 | 114 | def __getitem__(self, index): 115 | if self.mode == 'train': 116 | points, label, segs = self.train_data[index], self.train_labels[index], self.train_segs[index] 117 | elif self.mode == 'val': 118 | points, label, segs = self.val_data[index], self.val_labels[index], self.val_segs[index] 119 | else: # test 120 | points, label, segs = self.test_data[index], self.test_labels[index], self.test_segs[index] 121 | 122 | 123 | return points, label, segs 124 | 125 | def __len__(self): 126 | if self.mode == 'train': 127 | return self.train_data.shape[0] 128 | elif self.mode == 'val': 129 | return self.val_data.shape[0] 130 | else: 131 | return self.test_data.shape[0] 132 | 133 | 134 | if __name__ == '__main__': 135 | print('test') 136 | # debug_here() 137 | data = Shapenetcore_Part_Dataset(data_dir='../datasets/shapenet_part_seg_hdf5_dataset', mode='train') 138 | # debug_here() 139 | 140 | print(len(data)) 141 | print(data[0]) 142 | 143 | data = Shapenetcore_Part_Dataset(data_dir = '../datasets/shapenet_part_seg_hdf5_dataset', mode='val') 144 | print(len(data)) 145 | points, label, segs = data[0] 146 | # debug_here() 147 | data = Shapenetcore_Part_Dataset(data_dir = '../datasets/shapenet_part_seg_hdf5_dataset', mode='test') 148 | print(len(data)) 149 | points, label, segs = data[0] 150 | # debug_here() 151 | -------------------------------------------------------------------------------- /misc/transforms.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | import math 3 | import random 4 | import numpy as np 5 | 6 | import torch 7 | 8 | 9 | class Normalize_PC(object): 10 | def __init__(self): 11 | """ 12 | do nothing 13 | """ 14 | pass 15 | def __call__(self, pc): 16 | l = pc.shape[0] 17 | centroid = np.mean(pc, axis=0) 18 | pc = pc - centroid 19 | m = np.max(np.sqrt(np.sum(pc**2, axis=1))) 20 | pc = pc/m 21 | 22 | return pc 23 | 24 | class Augment2PointNum(object): 25 | def __init__(self, num_points): 26 | self.num_points = num_points 27 | 28 | def __call__(self, pc): 29 | assert(pc.shape[0] <= self.num_points) 30 | cur_len = pc.shape[0] 31 | res = np.array(pc) 32 | ################################### 33 | # copy over and slice 34 | ################################### 35 | while cur_len < self.num_points: 36 | res = np.concatenate((res, pc)) 37 | cur_len += pc.shape[0] 38 | 39 | return res[:self.num_points, :] 40 | 41 | 42 | -------------------------------------------------------------------------------- /misc/utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import os 4 | import time 5 | 6 | class AverageMeter(object): 7 | """Computes and stores the average and current value""" 8 | def __init__(self): 9 | self.reset() 10 | 11 | def reset(self): 12 | self.val = 0 13 | self.avg = 0 14 | self.sum = 0 15 | self.count = 0 16 | 17 | def update(self, val, n=1): 18 | self.val = val 19 | self.sum += val * n # val*n: how many samples predicted correctly among the n samples 20 | self.count += n # totoal samples has been through 21 | self.avg = self.sum / self.count 22 | 23 | def accuracy(output, target, topk=(1,)): 24 | """Computes the precision@k for the specified values of k""" 25 | maxk = max(topk) 26 | batch_size = target.size(0) 27 | 28 | _, pred = output.topk(maxk, 1, True, True) 29 | pred = pred.t() 30 | correct = pred.eq(target.view(1, -1).expand_as(pred)) 31 | 32 | res = [] 33 | for k in topk: 34 | # top k 35 | correct_k = correct[:k].view(-1).float().sum(0) 36 | res.append(correct_k.mul_(100.0 / batch_size)) 37 | return res 38 | 39 | def save_checkpoint(model, output_path): 40 | 41 | ## if not os.path.exists(output_dir): 42 | ## os.makedirs("model/") 43 | torch.save(model, output_path) 44 | 45 | print("Checkpoint saved to {}".format(output_path)) 46 | 47 | 48 | # do gradient clip 49 | def clip_gradient(optimizer, grad_clip): 50 | assert grad_clip>0, 'gradient clip value must be greater than 0' 51 | 52 | for group in optimizer.param_groups: 53 | for param in group['params']: 54 | param.grad.data.clamp_(-grad_clip, grad_clip) 55 | 56 | # input labels: bz x 1 57 | # output: bz x num_classes is one hot 58 | def labels_batch2one_hot_batch(labels_batch, num_classes): 59 | bz = labels_batch.size(0) 60 | labels_onehot = torch.FloatTensor(bz, num_classes).type_as(labels_batch).zero_() 61 | labels_onehot.scatter_(1, labels_batch, 1) 62 | return labels_onehot 63 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /models/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/models/__init__.pyc -------------------------------------------------------------------------------- /models/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/models/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/pointnet.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/models/__pycache__/pointnet.cpython-36.pyc -------------------------------------------------------------------------------- /models/pointnet.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import argparse 3 | import os 4 | import random 5 | import torch 6 | import torch.nn as nn 7 | import torch.nn.parallel 8 | import torch.backends.cudnn as cudnn 9 | import torch.optim as optim 10 | import torch.utils.data 11 | from torch.autograd import Variable 12 | from PIL import Image 13 | import numpy as np 14 | import matplotlib.pyplot as plt 15 | import pdb 16 | import torch.nn.functional as F 17 | 18 | from IPython.core.debugger import Tracer 19 | debug_here = Tracer() 20 | 21 | # transform on raw input data 22 | # spatial transformer network 23 | class STN3d(nn.Module): 24 | # for modelnet40, a 3d shape is with 2048 points 25 | def __init__(self, num_points = 2500): 26 | super(STN3d, self).__init__() 27 | self.num_points = num_points 28 | self.conv1 = nn.Conv1d(3, 64, 1) 29 | self.conv2 = nn.Conv1d(64, 128, 1) 30 | self.conv3 = nn.Conv1d(128, 1024, 1) 31 | self.mp1 = nn.MaxPool1d(num_points) 32 | self.fc1 = nn.Linear(1024, 512) 33 | self.fc2 = nn.Linear(512, 256) 34 | self.fc3 = nn.Linear(256, 9) 35 | self.relu = nn.ReLU() 36 | 37 | self.bn1 = nn.BatchNorm1d(64) 38 | self.bn2 = nn.BatchNorm1d(128) 39 | self.bn3 = nn.BatchNorm1d(1024) 40 | self.bn4 = nn.BatchNorm1d(512) 41 | self.bn5 = nn.BatchNorm1d(256) 42 | 43 | 44 | def forward(self, x): 45 | # x (bz x 3 x 2048) -> conv(3, 64) -> conv(64, 128) -> conv(128, 1024) -> max_pool(2048) -> 1024 -> fc(1024, 512) 46 | # -> fc(512, 256) -> fc(256, 9) 47 | 48 | batchsize = x.size()[0] 49 | x = F.relu(self.bn1(self.conv1(x))) 50 | x = F.relu(self.bn2(self.conv2(x))) 51 | x = F.relu(self.bn3(self.conv3(x))) 52 | x = self.mp1(x) 53 | x = x.view(-1, 1024) 54 | 55 | x = F.relu(self.bn4(self.fc1(x))) 56 | x = F.relu(self.bn5(self.fc2(x))) 57 | x = self.fc3(x) # bz x 9 58 | # identity transform 59 | # bz x 9 60 | iden = Variable(torch.from_numpy(np.array([1,0,0,0,1,0,0,0,1]).astype(np.float32))).view(1,9).repeat(batchsize,1) 61 | if x.is_cuda: 62 | iden = iden.cuda() 63 | x = x + iden 64 | x = x.view(-1, 3, 3) # bz x 3 x 3 65 | return x 66 | 67 | # 128 x 128 transform 68 | class Feats_STN3d(nn.Module): 69 | # for modelnet40, a 3d shape is with 2048 points 70 | def __init__(self, num_points = 2500): 71 | super(Feats_STN3d, self).__init__() 72 | self.conv1 = nn.Conv1d(128, 256, 1) 73 | self.conv2 = nn.Conv1d(256, 1024, 1) 74 | self.mp1 = nn.MaxPool1d(num_points) 75 | 76 | self.fc1 = nn.Linear(1024, 512) 77 | self.fc2 = nn.Linear(512, 256) 78 | self.fc3 = nn.Linear(256, 128*128) 79 | 80 | self.bn1 = nn.BatchNorm1d(256) 81 | self.bn2 = nn.BatchNorm1d(1024) 82 | self.bn3 = nn.BatchNorm1d(512) 83 | self.bn4 = nn.BatchNorm1d(256) 84 | 85 | def forward(self, x): 86 | batchsize = x.size()[0] 87 | x = F.relu(self.bn1(self.conv1(x))) # bz x 256 x 2048 88 | x = F.relu(self.bn2(self.conv2(x))) # bz x 1024 x 2048 89 | x = self.mp1(x) # bz x 1024 x 1 90 | x = x.view(-1, 1024) 91 | 92 | x = F.relu(self.bn3(self.fc1(x))) # bz x 512 93 | x = F.relu(self.bn4(self.fc2(x))) # bz x 256 94 | x = self.fc3(x) # bz x (128*128) 95 | # identity transform 96 | # bz x (128*128) 97 | iden = Variable(torch.from_numpy(np.eye(128).astype(np.float32))).view(1,128*128).repeat(batchsize,1) 98 | if x.is_cuda: 99 | iden = iden.cuda() 100 | x = x + iden 101 | x = x.view(-1, 128, 128) # bz x 3 x 3 102 | return x 103 | 104 | class PointNetfeat(nn.Module): 105 | def __init__(self, num_points = 2500, global_feat = True): 106 | super(PointNetfeat, self).__init__() 107 | self.stn = STN3d(num_points = num_points) # bz x 3 x 3 108 | self.conv1 = torch.nn.Conv1d(3, 64, 1) 109 | self.conv2 = torch.nn.Conv1d(64, 128, 1) 110 | self.conv3 = torch.nn.Conv1d(128, 1024, 1) 111 | self.bn1 = nn.BatchNorm1d(64) 112 | self.bn2 = nn.BatchNorm1d(128) 113 | self.bn3 = nn.BatchNorm1d(1024) 114 | self.mp1 = torch.nn.MaxPool1d(num_points) 115 | self.num_points = num_points 116 | self.global_feat = global_feat 117 | def forward(self, x): 118 | batchsize = x.size()[0] 119 | trans = self.stn(x) # regressing the transforming parameters using STN 120 | x = x.transpose(2,1) # bz x 2048 x 3 121 | x = torch.bmm(x, trans) # (bz x 2048 x 3) x (bz x 3 x 3) 122 | x = x.transpose(2,1) # bz x 3 x 2048 123 | x = F.relu(self.bn1(self.conv1(x))) 124 | pointfeat = x # bz x 64 x 2048 125 | x = F.relu(self.bn2(self.conv2(x))) # bz x 128 x 2048 126 | x = self.bn3(self.conv3(x)) # bz x 1024 x 2048 127 | x = self.mp1(x) 128 | x = x.view(-1, 1024) # bz x 1024 129 | if self.global_feat: # using global feats for classification 130 | return x, trans 131 | else: 132 | x = x.view(-1, 1024, 1).repeat(1, 1, self.num_points) 133 | return torch.cat([x, pointfeat], 1), trans 134 | 135 | class PointNetCls(nn.Module): 136 | # on modelnet40, it is set to be 2048 137 | def __init__(self, num_points = 2500, k = 2): 138 | super(PointNetCls, self).__init__() 139 | self.num_points = num_points 140 | self.feat = PointNetfeat(num_points, global_feat=True) # bz x 1024 141 | self.fc1 = nn.Linear(1024, 512) 142 | self.fc2 = nn.Linear(512, 256) 143 | self.fc3 = nn.Linear(256, k) 144 | self.bn1 = nn.BatchNorm1d(512) 145 | self.bn2 = nn.BatchNorm1d(256) 146 | def forward(self, x): 147 | x, trans = self.feat(x) 148 | x = F.relu(self.bn1(self.fc1(x))) 149 | x = F.relu(self.bn2(self.fc2(x))) 150 | x = self.fc3(x) # bz x 40 151 | return F.log_softmax(x), trans 152 | 153 | # part segmentation 154 | class PointNetPartDenseCls(nn.Module): 155 | ################################### 156 | ## Note that we must use up all the modules defined in __init___, 157 | ## otherwise, when gradient clippling, it will cause errors like 158 | ## param.grad.data.clamp_(-grad_clip, grad_clip) 159 | ## AttributeError: 'NoneType' object has no attribute 'data' 160 | #################################### 161 | def __init__(self, num_points = 2500, k = 2): 162 | super(PointNetPartDenseCls, self).__init__() 163 | self.num_points = num_points 164 | self.k = k 165 | # T1 166 | self.stn1 = STN3d(num_points = num_points) # bz x 3 x 3, after transform => bz x 2048 x 3 167 | 168 | self.conv1 = torch.nn.Conv1d(3, 64, 1) 169 | self.conv2 = torch.nn.Conv1d(64, 128, 1) 170 | self.conv3 = torch.nn.Conv1d(128, 128, 1) 171 | self.bn1 = nn.BatchNorm1d(64) 172 | self.bn2 = nn.BatchNorm1d(128) 173 | self.bn3 = nn.BatchNorm1d(128) 174 | 175 | # T2 176 | self.stn2 = Feats_STN3d(num_points = num_points) 177 | 178 | self.conv4 = torch.nn.Conv1d(128, 128, 1) 179 | self.conv5 = torch.nn.Conv1d(128, 512, 1) 180 | self.conv6 = torch.nn.Conv1d(512, 2048, 1) 181 | self.bn4 = nn.BatchNorm1d(128) 182 | self.bn5 = nn.BatchNorm1d(512) 183 | self.bn6 = nn.BatchNorm1d(2048) 184 | # pool layer 185 | self.mp1 = torch.nn.MaxPool1d(num_points) 186 | 187 | # MLP(256, 256, 128) 188 | self.conv7 = torch.nn.Conv1d(3024-16, 256, 1) 189 | self.conv8 = torch.nn.Conv1d(256, 256, 1) 190 | self.conv9 = torch.nn.Conv1d(256, 128, 1) 191 | self.bn7 = nn.BatchNorm1d(256) 192 | self.bn8 = nn.BatchNorm1d(256) 193 | self.bn9 = nn.BatchNorm1d(128) 194 | # last layer 195 | self.conv10 = torch.nn.Conv1d(128, self.k, 1) # 50 196 | self.bn10 = nn.BatchNorm1d(self.k) 197 | 198 | def forward(self, x, one_hot_labels): 199 | batch_size = x.size()[0] 200 | # T1 201 | trans_1 = self.stn1(x) # regressing the transforming parameters using STN 202 | x = x.transpose(2,1) # bz x 2048 x 3 203 | x = torch.bmm(x, trans_1) # (bz x 2048 x 3) x (bz x 3 x 3) 204 | # change back 205 | x = x.transpose(2,1) # bz x 3 x 2048 206 | out1 = F.relu(self.bn1(self.conv1(x))) # bz x 64 x 2048 207 | out2 = F.relu(self.bn2(self.conv2(out1))) # bz x 128 x 2048 208 | out3 = F.relu(self.bn3(self.conv3(out2))) # bz x 128 x 2048 209 | ####################################################################### 210 | # T2, currently has bugs so now remove this temporately 211 | trans_2 = self.stn2(out3) # regressing the transforming parameters using STN 212 | out3_t = out3.transpose(2,1) # bz x 2048 x 128 213 | out3_trsf = torch.bmm(out3_t, trans_2) # (bz x 2048 x 128) x (bz x 128 x 3) 214 | # change back 215 | out3_trsf = out3_trsf.transpose(2,1) # bz x 128 x 2048 216 | 217 | out4 = F.relu(self.bn4(self.conv4(out3_trsf))) # bz x 128 x 2048 218 | out5 = F.relu(self.bn5(self.conv5(out4))) # bz x 512 x 2048 219 | out6 = F.relu(self.bn6(self.conv6(out5))) # bz x 2048 x 2048 220 | out6 = self.mp1(out6) # bz x 2048 221 | 222 | # concat out1, out2, ..., out5 223 | out6 = out6.view(-1, 2048, 1).repeat(1, 1, self.num_points) 224 | # out6 = x 225 | # cetegories is 16 226 | # one_hot_labels: bz x 16 227 | one_hot_labels = one_hot_labels.unsqueeze(2).repeat(1, 1, self.num_points) 228 | # 64 + 128 * 3 + 512 + 2048 + 16 229 | # point_feats = torch.cat([out1, out2, out3, out4, out5, out6, one_hot_labels], 1) 230 | point_feats = torch.cat([out1, out2, out3, out4, out5, out6], 1) 231 | # Then feed point_feats to MLP(256, 256, 128) 232 | mlp = F.relu(self.bn7(self.conv7(point_feats))) 233 | mlp = F.relu(self.bn8(self.conv8(mlp))) 234 | mlp = F.relu(self.bn9(self.conv9(mlp))) 235 | 236 | # last layer 237 | pred_out = F.relu(self.bn10(self.conv10(mlp))) # bz x 50(self.k) x 2048 238 | pred_out = pred_out.transpose(2,1).contiguous() 239 | pred_out = F.log_softmax(pred_out.view(-1,self.k)) 240 | pred_out = pred_out.view(batch_size, self.num_points, self.k) 241 | return pred_out, trans_1, trans_2 242 | 243 | # regular segmentation 244 | class PointNetDenseCls(nn.Module): 245 | def __init__(self, num_points = 2500, k = 2): 246 | super(PointNetDenseCls, self).__init__() 247 | self.num_points = num_points 248 | self.k = k 249 | self.feat = PointNetfeat(num_points, global_feat=False) 250 | self.conv1 = torch.nn.Conv1d(1088, 512, 1) 251 | self.conv2 = torch.nn.Conv1d(512, 256, 1) 252 | self.conv3 = torch.nn.Conv1d(256, 128, 1) 253 | self.conv4 = torch.nn.Conv1d(128, self.k, 1) 254 | self.bn1 = nn.BatchNorm1d(512) 255 | self.bn2 = nn.BatchNorm1d(256) 256 | self.bn3 = nn.BatchNorm1d(128) 257 | 258 | def forward(self, x): 259 | batchsize = x.size()[0] 260 | x, trans = self.feat(x) 261 | x = F.relu(self.bn1(self.conv1(x))) 262 | x = F.relu(self.bn2(self.conv2(x))) 263 | x = F.relu(self.bn3(self.conv3(x))) 264 | x = self.conv4(x) 265 | x = x.transpose(2,1).contiguous() 266 | x = F.log_softmax(x.view(-1,self.k)) 267 | x = x.view(batchsize, self.num_points, self.k) 268 | return x, trans 269 | 270 | 271 | if __name__ == '__main__': 272 | sim_data = Variable(torch.rand(32,3,2500)) 273 | 274 | trans = STN3d() 275 | out = trans(sim_data) 276 | print('stn', out.size()) 277 | 278 | pointfeat = PointNetfeat(global_feat=True) 279 | out, _ = pointfeat(sim_data) 280 | print('global feat', out.size()) 281 | 282 | pointfeat = PointNetfeat(global_feat=False) 283 | out, _ = pointfeat(sim_data) 284 | print('point feat', out.size()) 285 | 286 | cls = PointNetCls(k = 5) 287 | out, _ = cls(sim_data) 288 | print('class', out.size()) 289 | 290 | seg = PointNetDenseCls(k = 3) 291 | out, _ = seg(sim_data) 292 | print('seg', out.size()) 293 | 294 | part_seg = PointNetPartDenseCls(k=7) 295 | one_hot_labels = torch.rand(32, 16) 296 | debug_here() 297 | out, _ = part_seg(sim_data, one_hot_labels) 298 | print('seg', out.size()) 299 | 300 | 301 | -------------------------------------------------------------------------------- /models/pointnet.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/models/pointnet.pyc -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # download modelnet40 pcl data 4 | if [ 1 -eq 0 ]; then 5 | python ./download_scripts/download.py --dataset modelnet40_pcl 6 | fi 7 | 8 | # run classification experiments on modelnet40 point cloud 9 | if [ 1 -eq 0 ]; then 10 | python main_cls.py --cuda --gpu_id 3 11 | fi 12 | 13 | if [ 1 -eq 0 ]; then 14 | python main_cls.py --cuda \ 15 | --gpu_id 3 \ 16 | --init_model ./models_checkpoint/model_best.pth \ 17 | --optim_state_from ./models_checkpoint/optim_state_best.pth 18 | fi 19 | 20 | # download original seg data 21 | if [ 1 -eq 0 ]; then 22 | python ./download_scripts/download.py --dataset shapenetcore_partanno 23 | fi 24 | # dowload seg h5 data 25 | if [ 1 -eq 0 ]; then 26 | python ./download_scripts/download.py --dataset shapenetcore_partanno_h5 27 | fi 28 | 29 | if [ 1 -eq 0 ]; then 30 | python ./download_scripts/download.py --dataset shapenetcore_partanno_ben_v0 31 | fi 32 | ########################################### 33 | # run part segmentation experiments on shapenetcore_partanno 34 | ########################################### 35 | if [ 1 -eq 1 ]; then 36 | python main_part_seg.py --cuda --gpu_id 2 37 | fi 38 | ########################################### 39 | # test part seg, note data from folder 40 | ########################################### 41 | if [ 1 -eq 0 ]; then 42 | python ./eval_part_seg_folder.py 43 | fi 44 | if [ 1 -eq 0 ]; then 45 | python eval_part_seg_h5.py 46 | fi 47 | 48 | 49 | ############################################ 50 | ## visulization 51 | ############################################ 52 | if [ 1 -eq 0 ]; then 53 | echo 'build cpp code for visualization of 3d point data' 54 | cd ./tools/visualizations/ 55 | sh build.sh 56 | cd .. 57 | cd .. 58 | fi 59 | if [ 1 -eq 0 ]; then 60 | python ./tools/visualizations/show3d_balls.py 61 | fi 62 | -------------------------------------------------------------------------------- /tools/obj_data/0_labels_mask.obj: -------------------------------------------------------------------------------- 1 | v -0.197974 0.072958 0.039085 12.000000 2 | v -0.151842 -0.636771 0.389456 14.000000 3 | v -0.210818 0.084790 -0.238980 12.000000 4 | v -0.199472 0.513102 -0.085569 12.000000 5 | v -0.201681 0.015124 -0.309976 12.000000 6 | v 0.404456 -0.110784 0.066326 13.000000 7 | v -0.179926 0.499566 0.318685 12.000000 8 | v -0.209058 -0.170883 -0.319224 13.000000 9 | v -0.138025 -0.081465 0.294776 12.000000 10 | v 0.502299 -0.670752 0.309661 14.000000 11 | v -0.168674 -0.065963 0.260702 12.000000 12 | v 0.486142 -0.110784 0.238010 13.000000 13 | v -0.111346 0.717982 0.276241 12.000000 14 | v 0.502299 -0.111140 0.037287 13.000000 15 | v -0.214993 0.619090 0.029518 12.000000 16 | v -0.090002 0.698679 0.329731 12.000000 17 | v 0.502299 -0.315644 -0.342365 14.000000 18 | v 0.406628 -0.110784 -0.121347 13.000000 19 | v -0.289620 0.670670 0.019819 12.000000 20 | v -0.276196 0.671456 -0.170363 12.000000 21 | v -0.202355 -0.595376 0.360005 14.000000 22 | v -0.194979 0.425875 -0.299416 12.000000 23 | v 0.477361 -0.488976 -0.319187 14.000000 24 | v -0.205987 0.355385 -0.180754 12.000000 25 | v 0.389366 -0.110784 0.022122 13.000000 26 | v 0.038976 -0.170883 -0.269423 13.000000 27 | v -0.154295 -0.616850 0.305579 14.000000 28 | v -0.118423 -0.289789 0.326623 14.000000 29 | v 0.018981 -0.170883 0.223088 13.000000 30 | v -0.202355 -0.190654 0.367607 14.000000 31 | v -0.256444 0.649046 -0.080345 12.000000 32 | v -0.170733 0.097334 -0.290991 12.000000 33 | v 0.120381 -0.170883 0.008455 13.000000 34 | v -0.183783 -0.110784 0.170460 13.000000 35 | v -0.195784 0.230694 0.031671 12.000000 36 | v -0.132727 -0.139111 0.389924 13.000000 37 | v -0.156785 -0.549169 -0.400704 14.000000 38 | v -0.153546 -0.247757 0.306778 14.000000 39 | v 0.496177 -0.561451 -0.319449 14.000000 40 | v 0.418367 -0.584929 -0.362866 14.000000 41 | v 0.443961 -0.363031 -0.403307 14.000000 42 | v -0.217090 0.641557 -0.375991 12.000000 43 | v -0.202355 -0.624358 0.318853 14.000000 44 | v -0.183090 0.458564 -0.349742 12.000000 45 | v 0.389778 -0.110784 -0.230462 13.000000 46 | v -0.202599 -0.170883 -0.136419 13.000000 47 | v -0.126043 -0.187490 -0.399543 14.000000 48 | v 0.303299 -0.110784 0.280117 13.000000 49 | v -0.084516 -0.110784 -0.070404 13.000000 50 | v -0.200577 0.288677 -0.334764 12.000000 51 | v -0.149015 -0.713514 -0.401247 14.000000 52 | v -0.012061 -0.110784 0.062244 13.000000 53 | v -0.247177 0.107051 -0.116723 12.000000 54 | v 0.107051 -0.170883 0.003793 13.000000 55 | v -0.238134 -0.134880 0.306141 13.000000 56 | v 0.452198 -0.110784 -0.326601 13.000000 57 | v -0.178896 0.032311 -0.189497 12.000000 58 | v -0.212540 -0.126005 -0.386662 13.000000 59 | v -0.217614 -0.100093 -0.002441 12.000000 60 | v -0.197993 0.238239 0.293728 12.000000 61 | v -0.198835 0.116131 -0.090362 12.000000 62 | v -0.150457 -0.031963 -0.340362 12.000000 63 | v -0.196177 0.152153 0.019089 12.000000 64 | v -0.144653 -0.110784 0.028263 13.000000 65 | v -0.149408 0.180593 -0.303067 12.000000 66 | v -0.245978 -0.031588 0.162147 12.000000 67 | v -0.208047 -0.472781 -0.344631 14.000000 68 | v -0.245829 0.384274 0.163233 12.000000 69 | v -0.196533 -0.765469 0.305111 14.000000 70 | v 0.089789 -0.110784 -0.088789 13.000000 71 | v -0.233154 0.255332 -0.122564 12.000000 72 | v -0.033592 -0.110784 0.033206 13.000000 73 | v -0.230027 -0.033629 -0.002928 12.000000 74 | v -0.146282 0.249772 -0.308272 12.000000 75 | v 0.040250 -0.170883 -0.395256 13.000000 76 | v 0.176455 -0.110784 -0.396436 13.000000 77 | v -0.199004 0.038920 0.012218 12.000000 78 | v -0.202355 -0.305253 0.345589 14.000000 79 | v 0.348738 -0.110784 0.383015 13.000000 80 | v -0.183858 0.717982 -0.153082 12.000000 81 | v -0.139860 0.225040 0.302003 12.000000 82 | v -0.138456 0.549499 0.283992 12.000000 83 | v -0.162776 -0.028237 -0.349180 12.000000 84 | v -0.191122 0.570037 -0.239130 12.000000 85 | v 0.380342 -0.110784 0.286632 13.000000 86 | v 0.437370 -0.207410 0.306890 14.000000 87 | v 0.450233 -0.170883 0.125264 13.000000 88 | v 0.289051 -0.170883 -0.248903 13.000000 89 | v 0.287666 -0.170883 -0.386213 13.000000 90 | v -0.223568 0.394814 0.131761 12.000000 91 | v 0.438138 -0.274024 0.306684 14.000000 92 | v -0.206100 0.301296 0.189800 12.000000 93 | v 0.353026 -0.170883 0.203917 13.000000 94 | v 0.459594 -0.418898 0.306216 14.000000 95 | v -0.208047 -0.258466 -0.390070 14.000000 96 | v -0.202056 -0.466809 -0.400442 14.000000 97 | v -0.125294 0.717982 0.288692 12.000000 98 | v -0.208047 -0.274605 -0.369045 14.000000 99 | v 0.085202 -0.170883 -0.030263 13.000000 100 | v -0.228847 -0.116644 0.332427 13.000000 101 | v -0.180244 -0.110784 -0.385577 13.000000 102 | v -0.255171 -0.135273 -0.264967 13.000000 103 | v -0.287580 0.696076 0.066326 12.000000 104 | v -0.081446 0.717982 0.374422 12.000000 105 | v -0.127541 -0.110784 -0.094331 13.000000 106 | v 0.487303 -0.411391 -0.403887 14.000000 107 | v 0.154363 -0.170883 -0.162724 13.000000 108 | v 0.437464 -0.200015 -0.318101 14.000000 109 | v -0.284715 0.622254 -0.044249 12.000000 110 | v -0.171201 0.368977 -0.204269 12.000000 111 | v -0.199959 -0.021085 0.043054 12.000000 112 | v -0.202355 -0.728997 0.366053 14.000000 113 | v 0.119876 -0.170883 -0.192661 13.000000 114 | v -0.251951 -0.161859 0.264821 13.000000 115 | v -0.202205 0.364110 0.045993 12.000000 116 | v -0.181835 -0.265206 -0.318344 14.000000 117 | v 0.464855 -0.170883 -0.096465 13.000000 118 | v -0.194698 0.110402 -0.299191 12.000000 119 | v -0.179083 0.165540 -0.291328 12.000000 120 | v 0.441789 -0.605860 -0.403887 14.000000 121 | v 0.104729 -0.110784 -0.158605 13.000000 122 | v 0.502299 -0.613799 -0.374006 14.000000 123 | v -0.152479 -0.653284 -0.401041 14.000000 124 | v -0.190972 0.237396 0.310541 12.000000 125 | v -0.164798 -0.760470 -0.401397 14.000000 126 | v 0.125324 -0.134468 0.389006 13.000000 127 | v -0.150344 0.325841 0.266524 12.000000 128 | v 0.502299 -0.769850 0.353321 14.000000 129 | v -0.088785 0.622254 -0.362136 12.000000 130 | v -0.200202 0.493425 -0.102120 12.000000 131 | v -0.146188 -0.110784 -0.187250 13.000000 132 | v -0.232217 0.004078 -0.062728 12.000000 133 | v 0.470322 -0.210874 0.389624 14.000000 134 | v -0.167887 -0.193013 -0.337816 14.000000 135 | v -0.289190 0.642605 0.032232 12.000000 136 | v 0.502299 -0.416408 -0.385371 14.000000 137 | v -0.255826 0.542815 0.028001 12.000000 138 | v 0.096641 -0.110784 0.338549 13.000000 139 | v -0.161915 0.091661 0.320632 12.000000 140 | v -0.147798 0.365364 0.313574 12.000000 141 | v -0.171725 0.717982 0.075163 12.000000 142 | v 0.079361 -0.110784 0.281390 13.000000 143 | v -0.189175 -0.610035 0.305617 14.000000 144 | v -0.254628 0.147660 0.018284 12.000000 145 | v 0.147042 -0.170883 -0.087872 13.000000 146 | v -0.228155 0.623228 0.333944 12.000000 147 | v -0.077889 -0.110784 0.091938 13.000000 148 | v -0.207841 0.549162 0.002482 12.000000 149 | v -0.193406 0.355572 0.307377 12.000000 150 | v -0.182547 -0.674422 -0.318663 14.000000 151 | v -0.118423 -0.760994 0.367363 14.000000 152 | v -0.133756 0.663387 0.086059 12.000000 153 | v -0.290126 -0.157178 -0.018711 13.000000 154 | v -0.202355 -0.242196 0.388913 14.000000 155 | v -0.076054 -0.110784 0.320538 13.000000 156 | v -0.221808 0.622254 0.218913 12.000000 157 | v -0.142219 0.622254 0.071531 12.000000 158 | v -0.187527 0.609410 0.313986 12.000000 159 | v -0.123515 -0.505134 0.305954 14.000000 160 | v -0.226339 -0.110784 -0.011260 13.000000 161 | v 0.313222 -0.170883 -0.052543 13.000000 162 | v -0.202355 -0.651955 0.381723 14.000000 163 | v 0.323819 -0.170883 0.145615 13.000000 164 | v -0.121306 0.622254 0.303389 12.000000 165 | v -0.228997 -0.078581 -0.221175 12.000000 166 | v 0.042197 -0.110784 0.136647 13.000000 167 | v 0.474459 -0.690485 0.305355 14.000000 168 | v -0.074612 0.717982 -0.403194 12.000000 169 | v -0.195802 0.091025 0.302546 12.000000 170 | v -0.138306 0.585689 0.296892 12.000000 171 | v -0.173279 0.700158 -0.403887 12.000000 172 | v -0.269924 0.622254 -0.069224 12.000000 173 | v -0.136265 0.645582 -0.063271 12.000000 174 | v 0.159773 -0.110784 -0.136119 13.000000 175 | v 0.361825 -0.110784 -0.350940 13.000000 176 | v -0.252007 -0.110784 -0.219790 13.000000 177 | v -0.202355 -0.404857 0.324545 14.000000 178 | v 0.496851 -0.775766 0.389006 14.000000 179 | v 0.173048 -0.170883 0.137359 13.000000 180 | v -0.172661 0.437314 -0.351315 12.000000 181 | v 0.440141 -0.593410 -0.403887 14.000000 182 | v 0.208283 -0.110784 -0.278859 13.000000 183 | v 0.259713 -0.170883 0.304643 13.000000 184 | v 0.461185 -0.460350 0.306085 14.000000 185 | v -0.188444 0.342223 -0.238007 12.000000 186 | v -0.217090 0.046353 0.130918 12.000000 187 | v -0.196533 0.085857 0.281259 12.000000 188 | v 0.186546 -0.110784 0.212229 13.000000 189 | v 0.501906 -0.170883 0.113562 13.000000 190 | v -0.248824 0.717982 -0.166637 12.000000 191 | v -0.208346 0.086400 0.051704 12.000000 192 | v -0.183520 0.497563 -0.349573 12.000000 193 | v 0.193455 -0.170883 0.126350 13.000000 194 | v -0.171669 0.629743 -0.201985 12.000000 195 | v -0.118423 -0.518127 0.345964 14.000000 196 | v -0.084423 -0.110784 -0.276200 13.000000 197 | v 0.418367 -0.557014 0.320988 14.000000 198 | v -0.198985 0.415484 0.186318 12.000000 199 | v -0.132689 0.717982 -0.345586 12.000000 200 | v -0.208047 -0.299187 -0.353973 14.000000 201 | v 0.033041 -0.110784 0.216891 13.000000 202 | v -0.215143 -0.170883 0.035565 13.000000 203 | v -0.113012 0.659306 -0.249596 12.000000 204 | v 0.028192 -0.170883 0.217359 13.000000 205 | v -0.205501 0.655205 0.389006 12.000000 206 | v -0.206381 0.513795 -0.240160 12.000000 207 | v 0.441826 -0.673017 -0.403887 14.000000 208 | v 0.086456 -0.170883 -0.014817 13.000000 209 | v -0.175451 0.717982 -0.109009 12.000000 210 | v -0.227518 0.641707 0.133408 12.000000 211 | v 0.498780 -0.110784 -0.075084 13.000000 212 | v 0.442369 -0.746091 0.389006 14.000000 213 | v 0.502299 -0.477687 -0.371891 14.000000 214 | v 0.476444 -0.110784 0.080124 13.000000 215 | v -0.258859 0.697799 -0.089033 12.000000 216 | v -0.213476 0.390939 0.191260 12.000000 217 | v -0.205632 0.302382 -0.073568 12.000000 218 | v -0.245286 0.326946 0.167352 12.000000 219 | v -0.207616 0.200869 -0.071059 12.000000 220 | v 0.452124 -0.273313 -0.318382 14.000000 221 | v -0.119883 -0.723905 0.305242 14.000000 222 | v -0.143324 -0.059503 0.308463 12.000000 223 | v 0.483446 -0.170883 -0.107306 13.000000 224 | v 0.363379 -0.170883 0.174111 13.000000 225 | v -0.245941 0.195196 0.159826 12.000000 226 | v -0.202355 -0.679963 0.335984 14.000000 227 | v -0.251389 0.622254 -0.187119 12.000000 228 | v -0.151955 -0.668768 -0.361818 14.000000 229 | v 0.248517 -0.110784 0.326062 13.000000 230 | v -0.221901 0.097765 0.190736 12.000000 231 | v -0.187658 0.278193 0.172257 12.000000 232 | v 0.104243 -0.110784 0.298409 13.000000 233 | v -0.218925 -0.110878 0.356617 13.000000 234 | v -0.271160 0.671962 0.183884 12.000000 235 | v -0.235194 0.524542 -0.063121 12.000000 236 | v 0.156366 -0.110784 0.319659 13.000000 237 | v -0.215386 0.128956 0.191523 12.000000 238 | v 0.476275 -0.110784 0.267854 13.000000 239 | v 0.402153 -0.141826 -0.402876 13.000000 240 | v -0.122860 -0.350318 0.306441 14.000000 241 | v 0.491422 -0.431424 0.389006 14.000000 242 | v -0.253355 -0.110784 -0.198521 13.000000 243 | v 0.134816 -0.110784 0.139474 13.000000 244 | v -0.219561 0.619090 -0.087273 12.000000 245 | v -0.185842 -0.575848 0.389662 14.000000 246 | v 0.173197 -0.110784 -0.278016 13.000000 247 | v -0.164180 -0.455145 -0.400405 14.000000 248 | v 0.345836 -0.110784 0.078289 13.000000 249 | v -0.251595 0.339003 0.011301 12.000000 250 | v -0.193518 -0.520861 0.389830 14.000000 251 | v 0.460979 -0.631004 -0.319711 14.000000 252 | v -0.227237 -0.023949 0.133296 12.000000 253 | v 0.102520 -0.139279 -0.402951 13.000000 254 | v -0.195128 0.426811 0.277870 12.000000 255 | v 0.099225 -0.110784 0.206763 13.000000 256 | v 0.501888 -0.110784 -0.132412 13.000000 257 | v -0.185973 0.215210 0.266843 12.000000 258 | v -0.200651 -0.170883 0.245162 13.000000 259 | v -0.097847 -0.110784 0.342593 13.000000 260 | v -0.023818 -0.118741 -0.403625 13.000000 261 | v 0.466652 -0.409631 0.306253 14.000000 262 | v -0.238695 -0.024455 0.180701 12.000000 263 | v -0.145121 0.039576 -0.331001 12.000000 264 | v -0.171145 0.219517 -0.216289 12.000000 265 | v 0.049087 -0.170883 0.017142 13.000000 266 | v -0.255564 -0.039938 0.030098 12.000000 267 | v -0.281045 0.706898 0.128147 12.000000 268 | v 0.470827 -0.170883 -0.067052 13.000000 269 | v -0.227930 0.231761 -0.223778 12.000000 270 | v -0.121474 -0.170883 0.058406 13.000000 271 | v 0.015424 -0.170883 -0.167854 13.000000 272 | v -0.217034 0.622254 -0.241601 12.000000 273 | v -0.098128 0.675613 0.389006 12.000000 274 | v 0.436846 -0.369116 -0.318738 14.000000 275 | v 0.502299 -0.262510 -0.362548 14.000000 276 | v -0.223025 0.187295 -0.063083 12.000000 277 | v -0.232461 -0.156055 -0.336131 13.000000 278 | v -0.181929 -0.233060 0.389194 14.000000 279 | v 0.375867 -0.113218 0.389006 13.000000 280 | v -0.220460 0.622254 0.085479 12.000000 281 | v -0.197356 0.565694 0.283431 12.000000 282 | v -0.123159 -0.282618 0.390485 14.000000 283 | v -0.118423 -0.267734 0.325444 14.000000 284 | v 0.502299 -0.614847 -0.364083 14.000000 285 | v -0.239182 -0.079255 0.142226 12.000000 286 | v -0.208047 -0.283067 -0.374886 14.000000 287 | v 0.299330 -0.110784 -0.095042 13.000000 288 | v -0.082719 -0.110784 0.072804 13.000000 289 | v -0.212128 -0.083674 -0.182551 12.000000 290 | v 0.232004 -0.110784 -0.306269 13.000000 291 | v -0.209526 0.581664 0.131555 12.000000 292 | v 0.275627 -0.127110 -0.403363 13.000000 293 | v -0.119995 -0.170883 0.263885 13.000000 294 | v -0.202131 0.410616 -0.331001 12.000000 295 | v -0.153733 -0.170883 0.138725 13.000000 296 | v -0.181854 -0.576410 -0.319337 14.000000 297 | v 0.272931 -0.170883 -0.261428 13.000000 298 | v 0.476500 -0.352827 -0.318681 14.000000 299 | v 0.497338 -0.778967 0.305074 14.000000 300 | v -0.221059 0.232847 -0.122078 12.000000 301 | v -0.135853 -0.110784 0.295825 13.000000 302 | v -0.200726 0.124351 0.187591 12.000000 303 | v -0.205519 0.492489 0.132715 12.000000 304 | v 0.222119 -0.170883 0.368487 13.000000 305 | v -0.262810 -0.170883 -0.101745 13.000000 306 | v -0.150382 0.631540 -0.403887 12.000000 307 | v -0.137782 0.630829 -0.006205 12.000000 308 | v -0.236299 0.707890 0.055092 12.000000 309 | v 0.502299 -0.187901 0.314828 14.000000 310 | v -0.225852 0.403876 -0.227372 12.000000 311 | v -0.182453 0.717982 -0.236191 12.000000 312 | v 0.024336 -0.110784 -0.349424 13.000000 313 | v 0.023081 -0.170883 0.003381 13.000000 314 | v -0.195185 -0.077102 0.026672 12.000000 315 | v -0.269737 0.703640 -0.206740 12.000000 316 | v -0.199678 0.641313 -0.100884 12.000000 317 | v 0.271696 -0.110784 0.353228 13.000000 318 | v -0.198592 0.154512 -0.092272 12.000000 319 | v 0.308223 -0.110784 0.313256 13.000000 320 | v -0.214094 0.154643 0.191335 12.000000 321 | v 0.199465 -0.170883 0.336715 13.000000 322 | v -0.045967 -0.110784 -0.114270 13.000000 323 | v 0.154531 -0.170883 0.055429 13.000000 324 | v 0.418367 -0.357302 0.335910 14.000000 325 | v -0.208047 -0.601779 -0.340062 14.000000 326 | v 0.418367 -0.405662 -0.353412 14.000000 327 | v -0.138924 0.622254 -0.049079 12.000000 328 | v -0.008391 -0.170883 0.322954 13.000000 329 | v -0.151449 0.464443 -0.300408 12.000000 330 | v 0.502299 -0.136677 -0.271838 13.000000 331 | v 0.213975 -0.110784 0.377267 13.000000 332 | v -0.044900 -0.110784 0.328215 13.000000 333 | v -0.206886 -0.042691 -0.240104 12.000000 334 | v -0.196795 -0.501539 0.389886 14.000000 335 | v 0.032424 -0.112993 0.389006 13.000000 336 | v 0.418367 -0.534696 0.389755 14.000000 337 | v -0.130031 0.688063 0.129345 12.000000 338 | v 0.238183 -0.170883 0.241118 13.000000 339 | v -0.162046 -0.599532 0.305635 14.000000 340 | v -0.120314 -0.110784 -0.173789 13.000000 341 | v 0.235936 -0.170883 -0.043930 13.000000 342 | v 0.111619 -0.110784 0.160574 13.000000 343 | v -0.167606 -0.725421 0.389175 14.000000 344 | v -0.201812 0.622254 0.051292 12.000000 345 | v 0.056089 -0.170883 0.033393 13.000000 346 | v -0.078450 -0.170883 -0.371348 13.000000 347 | v -0.221827 0.521041 -0.122190 12.000000 348 | v 0.502299 -0.510058 -0.336056 14.000000 349 | v -0.208047 -0.588261 -0.363241 14.000000 350 | v -0.219074 -0.110784 -0.150760 13.000000 351 | v -0.200427 -0.508728 -0.316641 14.000000 352 | v 0.023624 -0.170883 -0.371591 13.000000 353 | v -0.146600 -0.527357 0.305879 14.000000 354 | v 0.026732 -0.126604 -0.403363 13.000000 355 | v -0.252026 -0.017135 0.041818 12.000000 356 | v -0.249086 0.639347 0.271916 12.000000 357 | v -0.231787 0.582787 -0.002685 12.000000 358 | v -0.224092 0.010781 -0.003321 12.000000 359 | v -0.255901 -0.092960 0.027496 12.000000 360 | v -0.208047 -0.604288 -0.366330 14.000000 361 | v 0.418367 -0.394878 0.351655 14.000000 362 | v -0.213046 -0.055441 -0.066903 12.000000 363 | v -0.140103 -0.110784 -0.057560 13.000000 364 | v -0.238527 0.004321 0.141384 12.000000 365 | v -0.119490 -0.253299 0.390654 14.000000 366 | v -0.171819 0.086793 -0.219303 12.000000 367 | v -0.118423 -0.240511 0.326492 14.000000 368 | v -0.231206 0.538078 -0.210541 12.000000 369 | v -0.184531 -0.534416 0.305841 14.000000 370 | v -0.176050 0.629406 0.320295 12.000000 371 | v -0.250734 0.663181 -0.281742 12.000000 372 | v -0.090526 0.622254 0.342051 12.000000 373 | v -0.118423 -0.345244 0.383914 14.000000 374 | v 0.279672 -0.170883 0.293316 13.000000 375 | v -0.084460 0.704427 -0.362267 12.000000 376 | v -0.143492 0.237396 -0.323100 12.000000 377 | v -0.175002 -0.687490 -0.329353 14.000000 378 | v 0.276170 -0.143267 -0.402839 13.000000 379 | v -0.272489 -0.136284 -0.190452 13.000000 380 | v -0.136696 0.694204 0.032401 12.000000 381 | v -0.193705 0.409811 -0.239973 12.000000 382 | v -0.201887 0.694429 -0.240759 12.000000 383 | v -0.279641 -0.168074 0.137920 13.000000 384 | v 0.077470 -0.110784 -0.250513 13.000000 385 | v -0.192994 -0.366419 -0.400124 14.000000 386 | v -0.284977 0.688288 0.090965 12.000000 387 | v 0.475190 -0.191683 -0.403475 14.000000 388 | v 0.425145 -0.605692 0.305617 14.000000 389 | v 0.446245 -0.423673 0.306197 14.000000 390 | v 0.272276 -0.110784 0.008006 13.000000 391 | v 0.502299 -0.375350 0.335647 14.000000 392 | v 0.333573 -0.110784 0.240631 13.000000 393 | v -0.171220 0.190272 -0.216813 12.000000 394 | v -0.238508 0.622254 -0.159597 12.000000 395 | v -0.208047 -0.348128 -0.325983 14.000000 396 | v -0.201850 -0.087980 -0.180211 12.000000 397 | v 0.458882 -0.535689 -0.403887 14.000000 398 | v -0.164573 0.622254 -0.287752 12.000000 399 | v -0.208047 -0.501951 -0.323287 14.000000 400 | v -0.204340 0.639965 -0.075234 12.000000 401 | v -0.203198 -0.110784 0.063649 13.000000 402 | v -0.174028 0.622254 0.216255 12.000000 403 | v -0.105411 -0.110784 0.008942 13.000000 404 | v -0.214319 -0.061188 -0.119288 12.000000 405 | v 0.063203 -0.170883 -0.374942 13.000000 406 | v -0.206942 0.710249 0.386048 12.000000 407 | v 0.496140 -0.681798 -0.319898 14.000000 408 | v 0.485262 -0.253355 0.389137 14.000000 409 | v 0.418367 -0.453554 0.308612 14.000000 410 | v -0.192357 0.046671 0.308743 12.000000 411 | v -0.198629 -0.013278 -0.093563 12.000000 412 | v -0.040163 -0.170883 -0.221643 13.000000 413 | v 0.111919 -0.170883 0.056646 13.000000 414 | v -0.191946 0.676362 0.179727 12.000000 415 | v -0.208047 -0.574144 -0.382712 14.000000 416 | v 0.420932 -0.170883 -0.295241 13.000000 417 | v 0.091942 -0.110784 -0.108504 13.000000 418 | v -0.017284 -0.170883 -0.031555 13.000000 419 | v 0.391987 -0.170883 -0.204437 13.000000 420 | v 0.297926 -0.170883 -0.194346 13.000000 421 | v -0.217052 0.203640 -0.120430 12.000000 422 | v -0.203366 0.102801 -0.315985 12.000000 423 | v -0.227293 0.271190 -0.225294 12.000000 424 | v -0.191440 -0.021310 0.272122 12.000000 425 | v -0.202355 -0.737554 0.378372 14.000000 426 | v -0.236730 0.638730 0.139287 12.000000 427 | v -0.194155 -0.725777 -0.317352 14.000000 428 | v -0.140234 -0.689549 0.389287 14.000000 429 | v -0.131491 0.667113 0.113431 12.000000 430 | v 0.488595 -0.591744 0.305673 14.000000 431 | v -0.210144 0.215023 -0.116292 12.000000 432 | v 0.032611 -0.110784 -0.082087 13.000000 433 | v -0.223923 0.540718 -0.003303 12.000000 434 | v -0.183183 -0.241092 -0.316379 14.000000 435 | v 0.167281 -0.110784 -0.080008 13.000000 436 | v -0.211773 0.043732 0.053819 12.000000 437 | v 0.122834 -0.110784 -0.340811 13.000000 438 | v -0.146151 -0.539770 -0.400686 14.000000 439 | v -0.213027 0.697237 0.371070 12.000000 440 | v -0.118423 -0.369808 0.346619 14.000000 441 | v -0.165978 -0.110784 0.002370 13.000000 442 | v 0.455699 -0.652142 -0.403887 14.000000 443 | v 0.248180 -0.156673 0.390485 13.000000 444 | v -0.254385 -0.124320 0.254505 13.000000 445 | v 0.418367 -0.298064 -0.397634 14.000000 446 | v 0.419210 -0.110784 -0.026387 13.000000 447 | v -0.143885 0.040137 0.309211 12.000000 448 | v 0.204969 -0.170883 -0.388085 13.000000 449 | v 0.452479 -0.551359 -0.403887 14.000000 450 | v 0.152341 -0.110784 0.248682 13.000000 451 | v 0.112031 -0.110784 -0.358560 13.000000 452 | v -0.051883 -0.170883 -0.309376 13.000000 453 | v -0.185337 0.056201 -0.236640 12.000000 454 | v 0.357257 -0.110784 -0.093638 13.000000 455 | v -0.196270 -0.473661 0.306047 14.000000 456 | v -0.252400 -0.133569 -0.275882 13.000000 457 | v -0.179364 0.429151 -0.350678 12.000000 458 | v -0.226657 0.310002 -0.194683 12.000000 459 | v 0.438924 -0.462465 -0.403550 14.000000 460 | v 0.277462 -0.110784 0.288542 13.000000 461 | v 0.351865 -0.110784 -0.306737 13.000000 462 | v 0.418367 -0.412570 -0.384229 14.000000 463 | v -0.192807 0.007092 0.308144 12.000000 464 | v -0.091949 0.700701 -0.338097 12.000000 465 | v 0.458115 -0.170883 -0.176279 13.000000 466 | v -0.149015 -0.226282 -0.364533 14.000000 467 | v 0.502299 -0.556358 0.381985 14.000000 468 | v -0.007642 -0.110784 -0.101483 13.000000 469 | v -0.181274 -0.348615 -0.319393 14.000000 470 | v 0.115420 -0.170883 -0.109421 13.000000 471 | v -0.105261 0.685779 0.270868 12.000000 472 | v -0.190448 0.094189 -0.182139 12.000000 473 | v -0.191103 -0.648229 0.305486 14.000000 474 | v -0.264008 0.634985 0.215656 12.000000 475 | v -0.253411 0.654138 -0.272755 12.000000 476 | v 0.134292 -0.119846 -0.403588 13.000000 477 | v -0.126661 -0.170883 -0.234524 13.000000 478 | v 0.486011 -0.220497 0.306852 14.000000 479 | v -0.211698 0.329679 -0.117491 12.000000 480 | v -0.202355 -0.187284 0.321643 14.000000 481 | v 0.001869 -0.170883 -0.363091 13.000000 482 | v -0.222856 0.282911 -0.189722 12.000000 483 | v 0.466053 -0.601835 0.389006 14.000000 484 | v 0.498630 -0.110784 0.332483 13.000000 485 | v 0.052737 -0.170883 0.370172 13.000000 486 | v 0.153670 -0.170883 -0.147652 13.000000 487 | v 0.458152 -0.170883 -0.010286 13.000000 488 | v -0.194866 0.622254 0.310391 12.000000 489 | v 0.020554 -0.170883 -0.291927 13.000000 490 | v -0.189474 -0.075998 -0.238437 12.000000 491 | v -0.198892 0.483315 0.136029 12.000000 492 | v -0.131940 -0.170883 0.162578 13.000000 493 | v -0.173897 -0.052913 -0.196611 12.000000 494 | v -0.208047 -0.297334 -0.326732 14.000000 495 | v 0.112218 -0.170883 0.285266 13.000000 496 | v 0.502299 -0.520486 0.346825 14.000000 497 | v -0.011387 -0.170883 -0.154542 13.000000 498 | v -0.176855 -0.170883 0.165292 13.000000 499 | v -0.183614 0.457890 0.317000 12.000000 500 | v -0.238658 -0.083730 0.054119 12.000000 501 | v 0.294107 -0.110784 -0.242332 13.000000 502 | v -0.202355 -0.510208 0.339467 14.000000 503 | v -0.241429 -0.050610 0.001060 12.000000 504 | v -0.219355 0.029054 -0.234243 12.000000 505 | v 0.502299 -0.282262 -0.395106 14.000000 506 | v -0.040631 -0.110784 -0.235442 13.000000 507 | v -0.212540 -0.110784 0.081828 13.000000 508 | v -0.240811 0.708920 0.177949 12.000000 509 | v 0.302401 -0.170883 0.014072 13.000000 510 | v -0.102265 -0.170883 0.005703 13.000000 511 | v 0.495597 -0.692639 0.389006 14.000000 512 | v -0.119677 -0.693144 0.389250 14.000000 513 | v -0.195334 0.667038 0.389006 12.000000 514 | v 0.456074 -0.110784 0.338606 13.000000 515 | v -0.257212 0.713282 0.242747 12.000000 516 | v -0.183240 -0.312068 0.390504 14.000000 517 | v 0.481705 -0.457972 0.306104 14.000000 518 | v -0.134880 0.690123 0.072691 12.000000 519 | v -0.226601 0.078687 0.189257 12.000000 520 | v -0.284827 -0.167138 0.094129 13.000000 521 | v -0.118423 -0.356047 0.331641 14.000000 522 | v -0.130124 0.699559 -0.143477 12.000000 523 | v -0.216809 0.717982 -0.331038 12.000000 524 | v -0.208047 -0.443668 -0.360526 14.000000 525 | v -0.171351 0.708208 -0.217880 12.000000 526 | v -0.211773 -0.170883 0.007912 13.000000 527 | v 0.418367 -0.375556 -0.379979 14.000000 528 | v -0.217314 -0.110784 0.294552 13.000000 529 | v 0.423272 -0.137875 0.389006 13.000000 530 | v -0.125331 0.622254 -0.314057 12.000000 531 | v 0.459706 -0.170883 0.275249 13.000000 532 | v 0.502299 -0.118909 -0.080701 13.000000 533 | v -0.189287 0.409212 0.176170 12.000000 534 | v -0.230926 0.301127 -0.208313 12.000000 535 | v -0.118423 -0.215180 0.386329 14.000000 536 | v -0.282581 0.677111 0.113600 12.000000 537 | v -0.025953 -0.170883 -0.341410 13.000000 538 | v -0.224504 0.026676 -0.191856 12.000000 539 | v -0.141077 0.209201 0.304961 12.000000 540 | v -0.237872 -0.151112 -0.319749 13.000000 541 | v -0.235587 0.433982 -0.063177 12.000000 542 | v 0.297570 -0.110784 -0.009238 13.000000 543 | v 0.388898 -0.170883 -0.018056 13.000000 544 | v -0.282450 -0.110784 0.072130 13.000000 545 | v 0.478148 -0.170883 -0.188055 13.000000 546 | v 0.020348 -0.170883 0.081716 13.000000 547 | v 0.184712 -0.110784 0.014334 13.000000 548 | v -0.255845 0.655692 0.025848 12.000000 549 | v -0.167700 0.131053 -0.291403 12.000000 550 | v 0.502299 -0.154239 0.130693 13.000000 551 | v 0.270235 -0.110784 -0.279046 13.000000 552 | v -0.138474 0.618135 0.283861 12.000000 553 | v -0.197675 -0.110784 0.349277 13.000000 554 | v -0.043571 -0.110784 0.051722 13.000000 555 | v -0.282730 0.702479 -0.127039 12.000000 556 | v -0.176799 -0.425620 -0.325983 14.000000 557 | v 0.117423 -0.110784 -0.204737 13.000000 558 | v -0.067666 -0.110784 0.182461 13.000000 559 | v 0.502299 -0.405811 0.365023 14.000000 560 | v -0.153228 0.378526 -0.343376 12.000000 561 | v 0.446881 -0.147143 -0.402708 13.000000 562 | v -0.115652 0.645114 -0.236153 12.000000 563 | v -0.116026 -0.170883 0.372718 13.000000 564 | v -0.200483 0.565113 -0.180117 12.000000 565 | v 0.158276 -0.110784 0.130993 13.000000 566 | v -0.252812 -0.028948 -0.111106 12.000000 567 | v -0.214562 0.338254 -0.183562 12.000000 568 | v -0.105055 -0.170883 0.096113 13.000000 569 | v -0.208047 -0.320381 -0.392354 14.000000 570 | v -0.202355 -0.685917 0.375770 14.000000 571 | v 0.159174 -0.110784 0.362196 13.000000 572 | v -0.168524 -0.170883 -0.209829 13.000000 573 | v -0.256182 -0.170883 0.077934 13.000000 574 | v -0.242889 0.018270 0.174298 12.000000 575 | v 0.068314 -0.138044 -0.403007 13.000000 576 | v 0.302569 -0.110784 -0.376440 13.000000 577 | v -0.195559 -0.091743 -0.341972 12.000000 578 | v -0.200539 0.501944 -0.180117 12.000000 579 | v 0.418610 -0.615072 -0.319655 14.000000 580 | v -0.138961 0.645788 0.282214 12.000000 581 | v 0.418367 -0.287823 -0.376590 14.000000 582 | v -0.221770 0.094245 -0.003003 12.000000 583 | v -0.167700 0.549817 -0.291403 12.000000 584 | v 0.164679 -0.170883 -0.273130 13.000000 585 | v -0.158414 -0.110784 -0.395106 13.000000 586 | v -0.198536 0.704857 -0.092796 12.000000 587 | v 0.430892 -0.170883 -0.053029 13.000000 588 | v -0.286288 0.676942 -0.093451 12.000000 589 | v 0.062960 -0.170883 0.161623 13.000000 590 | v 0.502299 -0.266423 -0.332817 14.000000 591 | v 0.385753 -0.170883 -0.232259 13.000000 592 | v -0.181910 0.015592 -0.186969 12.000000 593 | v -0.221059 0.040381 -0.063327 12.000000 594 | v 0.396012 -0.170883 0.206519 13.000000 595 | v -0.210425 0.707104 0.377473 12.000000 596 | v -0.203479 -0.021478 -0.076358 12.000000 597 | v -0.143230 0.589958 0.308350 12.000000 598 | v 0.502299 -0.319314 -0.388965 14.000000 599 | v 0.418367 -0.691197 -0.364570 14.000000 600 | v 0.502299 -0.346274 0.386029 14.000000 601 | v 0.125642 -0.170883 -0.173995 13.000000 602 | v -0.019026 -0.170883 -0.079110 13.000000 603 | v 0.133094 -0.110784 -0.278185 13.000000 604 | v -0.224429 0.623096 0.343062 12.000000 605 | v -0.037280 -0.110784 -0.208987 13.000000 606 | v -0.208047 -0.519868 -0.389396 14.000000 607 | v -0.138961 -0.223137 0.390279 14.000000 608 | v -0.178166 -0.073788 -0.350847 12.000000 609 | v -0.269381 0.622254 0.047023 12.000000 610 | v 0.158463 -0.110784 0.386909 13.000000 611 | v 0.418367 -0.419947 0.363525 14.000000 612 | v 0.141669 -0.170883 -0.305183 13.000000 613 | v -0.078750 -0.110784 0.214083 13.000000 614 | v -0.178934 0.433944 -0.291309 12.000000 615 | v -0.199622 0.214724 -0.084708 12.000000 616 | v -0.202355 -0.368797 0.322766 14.000000 617 | v -0.157178 -0.110784 -0.084577 13.000000 618 | v 0.488407 -0.486430 0.306010 14.000000 619 | v -0.184756 -0.110784 -0.137561 13.000000 620 | v -0.238901 0.507018 -0.121216 12.000000 621 | v -0.192245 0.413705 0.273171 12.000000 622 | v -0.195540 0.408407 0.303183 12.000000 623 | v -0.240848 0.684356 0.144398 12.000000 624 | v -0.179102 -0.170883 0.016880 13.000000 625 | v 0.502299 -0.216172 -0.324429 14.000000 626 | v -0.084404 -0.110784 -0.362735 13.000000 627 | v -0.118179 0.635079 -0.223235 12.000000 628 | v 0.250764 -0.170883 0.152430 13.000000 629 | v 0.299386 -0.170883 -0.298162 13.000000 630 | v 0.030739 -0.110784 -0.218797 13.000000 631 | v -0.136078 0.653146 -0.069786 12.000000 632 | v -0.255283 0.473448 0.032176 12.000000 633 | v 0.152322 -0.110784 0.116071 13.000000 634 | v -0.258223 0.649439 0.238722 12.000000 635 | v -0.279510 0.674583 -0.151715 12.000000 636 | v -0.212372 -0.008260 -0.067408 12.000000 637 | v -0.199416 -0.089160 -0.099442 12.000000 638 | v -0.230233 0.221688 -0.203108 12.000000 639 | v -0.124114 -0.110784 0.284592 13.000000 640 | v -0.253093 0.302401 0.039235 12.000000 641 | v -0.209638 0.405149 -0.181521 12.000000 642 | v 0.343066 -0.170883 0.327784 13.000000 643 | v 0.004602 -0.170883 -0.310874 13.000000 644 | v -0.173822 0.356789 -0.224171 12.000000 645 | v -0.191852 -0.000022 0.272647 12.000000 646 | v 0.255557 -0.170883 -0.309114 13.000000 647 | v -0.141545 0.207347 0.275942 12.000000 648 | v 0.447724 -0.570419 -0.319487 14.000000 649 | v -0.150251 -0.170883 -0.326283 13.000000 650 | v 0.420426 -0.110784 0.184914 13.000000 651 | v -0.213570 0.710474 -0.384622 12.000000 652 | v -0.189587 -0.257605 0.390317 14.000000 653 | v 0.155149 -0.170883 0.144136 13.000000 654 | v -0.186011 -0.102059 0.266880 12.000000 655 | v -0.271946 0.717982 0.114405 12.000000 656 | v 0.419116 -0.456137 -0.319075 14.000000 657 | v -0.216247 0.212402 -0.184348 12.000000 658 | v -0.183015 -0.110784 0.356186 13.000000 659 | v 0.502299 -0.476301 -0.320909 14.000000 660 | v 0.145563 -0.170883 0.092294 13.000000 661 | v 0.502299 -0.322178 -0.366180 14.000000 662 | v 0.093328 -0.170883 0.113937 13.000000 663 | v -0.124826 -0.482180 -0.399487 14.000000 664 | v -0.196944 0.378507 -0.301887 12.000000 665 | v -0.198124 0.015424 0.292773 12.000000 666 | v -0.022377 -0.170883 0.099895 13.000000 667 | v -0.152966 -0.512585 0.305916 14.000000 668 | v -0.289489 0.682278 -0.038707 12.000000 669 | v 0.478503 -0.779099 0.325612 14.000000 670 | v 0.201824 -0.110784 -0.388460 13.000000 671 | v -0.198180 0.078705 0.292343 12.000000 672 | v -0.157646 -0.736318 -0.401322 14.000000 673 | v -0.288778 0.622516 -0.058609 12.000000 674 | v 0.184955 -0.134149 -0.403119 13.000000 675 | v -0.185898 0.360515 0.156530 12.000000 676 | v 0.418367 -0.340227 -0.374212 14.000000 677 | v -0.193144 -0.110784 -0.023972 13.000000 678 | v -0.245847 0.499941 -0.067689 12.000000 679 | v 0.071160 -0.170883 0.322692 13.000000 680 | v -0.188856 0.619090 0.148723 12.000000 681 | v 0.209594 -0.110784 0.283431 13.000000 682 | v -0.138811 0.717982 -0.194234 12.000000 683 | v 0.008328 -0.170883 -0.018468 13.000000 684 | v -0.130218 -0.715405 0.305280 14.000000 685 | v 0.418367 -0.458440 0.320220 14.000000 686 | v 0.351903 -0.110784 0.352685 13.000000 687 | v -0.160866 0.616038 0.261544 12.000000 688 | v -0.138493 0.524710 0.298409 12.000000 689 | v -0.202393 -0.170883 -0.259837 13.000000 690 | v 0.218431 -0.125762 0.389006 13.000000 691 | v 0.418367 -0.490175 0.329675 14.000000 692 | v -0.289789 0.717982 -0.022100 12.000000 693 | v -0.233228 0.289239 0.185700 12.000000 694 | v 0.307605 -0.110784 -0.262926 13.000000 695 | v -0.094589 -0.170883 -0.361986 13.000000 696 | v -0.227799 0.622254 0.194443 12.000000 697 | v 0.502299 -0.647911 0.337108 14.000000 698 | v -0.196458 0.032798 0.281072 12.000000 699 | v -0.169909 0.622254 0.133670 12.000000 700 | v -0.221770 0.370438 -0.122171 12.000000 701 | v -0.252531 -0.007567 -0.111499 12.000000 702 | v 0.041785 -0.163619 0.390710 13.000000 703 | v -0.202355 -0.218850 0.327185 14.000000 704 | v -0.118423 -0.212372 0.307452 14.000000 705 | v -0.057669 -0.110784 -0.163492 13.000000 706 | v 0.419078 -0.398435 0.306291 14.000000 707 | v 0.193829 -0.170883 -0.391418 13.000000 708 | v 0.334772 -0.170883 0.302902 13.000000 709 | v -0.161035 -0.621025 -0.400948 14.000000 710 | v 0.502299 -0.217988 -0.401828 14.000000 711 | v 0.310301 -0.110784 -0.065236 13.000000 712 | v -0.187246 0.437988 0.267835 12.000000 713 | v -0.240568 0.717982 -0.223422 12.000000 714 | v -0.187920 0.675182 0.313686 12.000000 715 | v -0.139991 -0.655269 -0.378649 14.000000 716 | v 0.418367 -0.464001 -0.337497 14.000000 717 | v -0.195615 0.036617 -0.240216 12.000000 718 | v 0.205924 -0.110784 -0.063589 13.000000 719 | v 0.502299 -0.112020 0.066551 13.000000 720 | v 0.201150 -0.110784 0.034236 13.000000 721 | v -0.207410 0.201655 -0.240029 12.000000 722 | v -0.188089 -0.763933 -0.317465 14.000000 723 | v -0.289545 -0.162139 0.024069 13.000000 724 | v -0.151936 -0.309560 -0.360676 14.000000 725 | v 0.026058 -0.110784 0.304287 13.000000 726 | v -0.143604 0.281974 -0.323961 12.000000 727 | v -0.230439 0.360140 0.134606 12.000000 728 | v 0.502299 -0.235325 -0.387131 14.000000 729 | v -0.164424 -0.137314 -0.403026 13.000000 730 | v -0.191796 -0.105186 0.309455 12.000000 731 | v -0.102471 -0.170883 -0.171411 13.000000 732 | v 0.418367 -0.272564 -0.342197 14.000000 733 | v -0.200052 0.615252 0.043185 12.000000 734 | v -0.194960 0.676717 -0.403887 12.000000 735 | v 0.406347 -0.170883 -0.051101 13.000000 736 | v -0.226020 -0.105860 0.132790 12.000000 737 | v -0.287842 0.688812 -0.078717 12.000000 738 | v -0.143548 0.260706 -0.318569 12.000000 739 | v -0.201251 0.534671 -0.104647 12.000000 740 | v -0.268820 0.717982 0.155594 12.000000 741 | v -0.118423 -0.472444 0.352966 14.000000 742 | v 0.502299 -0.169591 0.206500 13.000000 743 | v -0.208047 -0.492964 -0.351034 14.000000 744 | v 0.238370 -0.110784 0.103771 13.000000 745 | v -0.231188 0.600517 0.187254 12.000000 746 | v -0.111795 0.691246 0.240950 12.000000 747 | v -0.177211 0.622254 -0.170980 12.000000 748 | v -0.075605 -0.110784 -0.387655 13.000000 749 | v 0.262484 -0.170883 0.178960 13.000000 750 | v -0.163001 0.601697 0.261263 12.000000 751 | v -0.147536 -0.318490 -0.366929 14.000000 752 | v -0.171388 -0.055347 0.320969 12.000000 753 | v 0.240748 -0.170883 -0.393197 13.000000 754 | v -0.288553 0.652566 -0.065068 12.000000 755 | v -0.232124 -0.016067 -0.122695 12.000000 756 | v 0.264619 -0.110784 0.286614 13.000000 757 | v 0.418367 -0.217614 -0.334221 14.000000 758 | v -0.173354 -0.110784 0.072411 13.000000 759 | v -0.192769 0.189579 0.141496 12.000000 760 | v 0.124762 -0.170883 -0.023411 13.000000 761 | v 0.418367 -0.390927 -0.399862 14.000000 762 | v -0.116195 0.683177 -0.403887 12.000000 763 | v 0.418367 -0.284022 0.382697 14.000000 764 | v -0.203422 0.717982 -0.065349 12.000000 765 | v -0.207485 -0.019568 0.131910 12.000000 766 | v -0.208047 -0.637744 -0.388666 14.000000 767 | v -0.228960 0.104842 -0.003059 12.000000 768 | v 0.055415 -0.170883 -0.173901 13.000000 769 | v -0.164424 -0.110784 -0.069730 13.000000 770 | v -0.150831 0.622254 0.169074 12.000000 771 | v -0.022265 -0.170883 0.168494 13.000000 772 | v 0.468019 -0.110784 0.362215 13.000000 773 | v -0.015993 -0.170883 0.272965 13.000000 774 | v -0.223792 0.681941 0.057077 12.000000 775 | v 0.418367 -0.671295 0.331285 14.000000 776 | v 0.250895 -0.110784 0.339186 13.000000 777 | v -0.024942 -0.170883 -0.175736 13.000000 778 | v -0.097435 0.650469 -0.316566 12.000000 779 | v -0.189980 0.239044 0.177162 12.000000 780 | v 0.251813 -0.170883 -0.375335 13.000000 781 | v 0.301109 -0.110784 0.098659 13.000000 782 | v -0.012697 -0.110784 -0.392523 13.000000 783 | v 0.243912 -0.110784 -0.044136 13.000000 784 | v 0.002449 -0.110784 -0.101314 13.000000 785 | v -0.196795 0.564814 0.137658 12.000000 786 | v 0.247057 -0.110784 0.368955 13.000000 787 | v -0.021965 -0.170883 0.334131 13.000000 788 | v -0.060252 -0.165884 0.390785 13.000000 789 | v 0.136558 -0.170883 0.334711 13.000000 790 | v -0.203329 -0.110784 -0.397184 13.000000 791 | v 0.478241 -0.375799 0.389006 14.000000 792 | v -0.094477 -0.110784 -0.170512 13.000000 793 | v -0.136246 -0.110784 -0.213106 13.000000 794 | v -0.218531 0.609392 0.191186 12.000000 795 | v -0.248431 -0.110784 0.144305 13.000000 796 | v -0.148435 0.442313 -0.304321 12.000000 797 | v -0.185767 0.488520 0.157448 12.000000 798 | v -0.103969 0.652097 0.275923 12.000000 799 | v -0.143642 0.717982 0.168325 12.000000 800 | v -0.181892 0.659175 -0.234000 12.000000 801 | v 0.418367 -0.531046 0.362795 14.000000 802 | v 0.061687 -0.170883 -0.304602 13.000000 803 | v -0.132483 0.680818 -0.116386 12.000000 804 | v -0.272677 0.664361 0.175384 12.000000 805 | v -0.288553 0.641370 -0.065124 12.000000 806 | v 0.502299 -0.615072 0.326099 14.000000 807 | v 0.177466 -0.110784 0.292118 13.000000 808 | v -0.256145 0.536655 -0.079615 12.000000 809 | v -0.118423 -0.367037 0.386928 14.000000 810 | v 0.130903 -0.163113 0.390691 13.000000 811 | v -0.246615 0.717982 0.141796 12.000000 812 | v 0.273156 -0.170883 -0.363391 13.000000 813 | v -0.202805 0.377328 -0.329391 12.000000 814 | v 0.140733 -0.166521 0.390804 13.000000 815 | v -0.153658 -0.110784 -0.014293 13.000000 816 | v 0.138261 -0.110784 -0.351727 13.000000 817 | v -0.200127 0.289014 0.043260 12.000000 818 | v -0.147967 -0.517940 0.389119 14.000000 819 | v -0.147143 -0.110784 -0.242612 13.000000 820 | v -0.149970 0.619090 0.272216 12.000000 821 | v -0.273032 0.671756 -0.188243 12.000000 822 | v 0.306033 -0.110784 -0.030282 13.000000 823 | v 0.446451 -0.110784 -0.263282 13.000000 824 | v 0.502299 -0.538460 0.313649 14.000000 825 | v 0.502299 -0.697712 0.311814 14.000000 826 | v -0.216247 0.272220 0.130806 12.000000 827 | v 0.422130 -0.170883 -0.099442 13.000000 828 | v 0.455924 -0.560870 0.305767 14.000000 829 | v -0.060383 -0.110784 0.004991 13.000000 830 | v -0.156860 0.622254 -0.097626 12.000000 831 | v 0.449727 -0.693462 -0.324860 14.000000 832 | v -0.134936 0.674939 -0.086842 12.000000 833 | v -0.153340 0.585745 0.317618 12.000000 834 | v -0.154613 -0.086595 0.263903 12.000000 835 | v 0.356864 -0.110784 0.150202 13.000000 836 | v 0.279147 -0.150606 0.389006 13.000000 837 | v 0.317079 -0.110784 0.348435 13.000000 838 | v -0.199247 0.699446 0.186524 12.000000 839 | v -0.185318 0.076272 0.161379 12.000000 840 | v -0.244499 0.154887 0.050374 12.000000 841 | v -0.194136 0.247413 -0.343301 12.000000 842 | v 0.484832 -0.110784 -0.184198 13.000000 843 | v 0.418367 -0.295611 -0.338228 14.000000 844 | v -0.162945 -0.459283 0.390036 14.000000 845 | v 0.502299 -0.115914 0.355961 13.000000 846 | v -0.255489 0.424658 -0.107399 12.000000 847 | v 0.404419 -0.162739 0.389044 13.000000 848 | v -0.226694 0.573145 -0.003359 12.000000 849 | v -0.191496 0.717982 -0.268112 12.000000 850 | v 0.494923 -0.460593 0.306085 14.000000 851 | v -0.145533 -0.170883 0.176769 13.000000 852 | v 0.477436 -0.294469 0.306628 14.000000 853 | v -0.176331 0.000764 0.320183 12.000000 854 | v -0.194192 0.365158 0.139643 12.000000 855 | v -0.151805 -0.500397 -0.361500 14.000000 856 | v 0.016098 -0.170883 -0.102419 13.000000 857 | v -0.172942 0.411159 0.320782 12.000000 858 | v -0.195241 -0.045237 0.027589 12.000000 859 | v -0.165753 -0.028274 0.260908 12.000000 860 | v 0.418367 -0.457204 0.380619 14.000000 861 | v -0.243657 -0.048476 0.149846 12.000000 862 | v -0.198910 0.210586 -0.095585 12.000000 863 | v -0.202355 -0.183015 0.375844 14.000000 864 | v 0.488651 -0.170883 -0.020789 13.000000 865 | v -0.216566 0.560788 -0.236378 12.000000 866 | v -0.146132 0.216072 0.312132 12.000000 867 | v -0.203479 -0.041305 -0.325290 12.000000 868 | v 0.417899 -0.170883 0.196297 13.000000 869 | v -0.232349 0.695608 0.056422 12.000000 870 | v -0.175807 0.121786 -0.193522 12.000000 871 | v -0.241242 -0.110784 0.215375 13.000000 872 | v 0.008815 -0.170883 -0.007347 13.000000 873 | v -0.029192 -0.161334 -0.402240 13.000000 874 | v 0.491515 -0.170883 0.197570 13.000000 875 | v 0.269374 -0.110784 0.296443 13.000000 876 | v 0.418367 -0.409762 -0.342197 14.000000 877 | v 0.502299 -0.599944 0.339392 14.000000 878 | v -0.244874 -0.066974 -0.066940 12.000000 879 | v -0.103595 0.693961 0.277440 12.000000 880 | v 0.121149 -0.170883 0.305205 13.000000 881 | v -0.224167 0.313522 0.190268 12.000000 882 | v -0.245585 0.622254 0.263978 12.000000 883 | v -0.201513 0.055864 -0.309564 12.000000 884 | v -0.096892 0.622254 0.308725 12.000000 885 | v -0.152273 0.151348 -0.342646 12.000000 886 | v 0.502299 -0.504160 0.312507 14.000000 887 | v -0.273706 -0.170883 0.083195 13.000000 888 | v 0.418367 -0.570138 0.315090 14.000000 889 | v -0.178821 0.526264 -0.231398 12.000000 890 | v -0.230701 -0.020448 -0.214323 12.000000 891 | v 0.083629 -0.170883 0.002857 13.000000 892 | v -0.231806 0.232697 -0.002685 12.000000 893 | v 0.369034 -0.110784 -0.066828 13.000000 894 | v -0.197113 0.708358 -0.403887 12.000000 895 | v 0.439093 -0.483378 -0.403644 14.000000 896 | v -0.126061 0.654494 -0.172647 12.000000 897 | v -0.145739 0.525328 -0.309582 12.000000 898 | v 0.445365 -0.639579 0.305523 14.000000 899 | v 0.254864 -0.110784 0.199611 13.000000 900 | v 0.049686 -0.110784 -0.395031 13.000000 901 | v -0.121549 0.717982 0.209739 12.000000 902 | v -0.198817 -0.093915 0.012686 12.000000 903 | v -0.089309 0.649289 0.331922 12.000000 904 | v 0.231892 -0.110784 0.069172 13.000000 905 | v -0.167606 -0.219130 0.389587 14.000000 906 | v -0.136434 -0.185355 -0.382150 14.000000 907 | v -0.209713 -0.170883 0.180551 13.000000 908 | v 0.340650 -0.110784 -0.357025 13.000000 909 | v 0.068539 -0.170883 -0.099349 13.000000 910 | v -0.050629 -0.110784 -0.174819 13.000000 911 | v -0.146768 0.345256 -0.334951 12.000000 912 | v 0.336775 -0.170883 -0.068812 13.000000 913 | v -0.149801 -0.064240 0.315109 12.000000 914 | v -0.245192 0.204520 0.154171 12.000000 915 | v -0.289246 0.663218 -0.045597 12.000000 916 | v -0.202355 -0.246540 0.355287 14.000000 917 | v -0.155942 -0.617393 -0.356033 14.000000 918 | v -0.283685 -0.165921 0.104950 13.000000 919 | v 0.442107 -0.471115 0.389437 14.000000 920 | v -0.203797 0.247038 0.005721 12.000000 921 | v 0.252618 -0.170883 -0.277979 13.000000 922 | v -0.186666 -0.050367 0.314660 12.000000 923 | v 0.102576 -0.170883 0.359219 13.000000 924 | v 0.258684 -0.110784 -0.119176 13.000000 925 | v 0.501457 -0.537467 -0.319374 14.000000 926 | v 0.237003 -0.170883 -0.192137 13.000000 927 | v 0.418367 -0.679215 -0.366536 14.000000 928 | v -0.127990 0.639198 0.144005 12.000000 929 | v 0.122853 -0.170883 -0.303254 13.000000 930 | v -0.222838 -0.170883 0.309661 13.000000 931 | v 0.246851 -0.170883 0.127529 13.000000 932 | v 0.418367 -0.421070 -0.355209 14.000000 933 | v -0.145233 0.717982 0.250760 12.000000 934 | v 0.393579 -0.110784 -0.250663 13.000000 935 | v -0.044188 -0.110784 -0.264050 13.000000 936 | v -0.235400 0.048001 0.138257 12.000000 937 | v 0.111039 -0.170883 -0.209024 13.000000 938 | v 0.354205 -0.170883 -0.308515 13.000000 939 | v -0.255602 0.399027 0.029836 12.000000 940 | v -0.073807 -0.110784 -0.300745 13.000000 941 | v -0.071261 -0.110784 0.102104 13.000000 942 | v -0.286868 -0.117674 -0.087816 13.000000 943 | v -0.237497 -0.055216 0.140036 12.000000 944 | v 0.329791 -0.170883 -0.091747 13.000000 945 | v -0.200726 0.264825 0.009747 12.000000 946 | v -0.180244 -0.170883 0.140672 13.000000 947 | v 0.136969 -0.110784 -0.212057 13.000000 948 | v -0.217708 -0.014907 0.191298 12.000000 949 | v -0.142818 -0.393473 0.306309 14.000000 950 | v -0.245098 0.318258 0.153479 12.000000 951 | v -0.158208 0.266472 0.319621 12.000000 952 | v -0.108331 0.662619 -0.403887 12.000000 953 | v -0.288216 0.715023 0.059829 12.000000 954 | v 0.453247 -0.213607 0.389999 14.000000 955 | v 0.468918 -0.691178 -0.403887 14.000000 956 | v -0.238639 0.197143 -0.064094 12.000000 957 | v 0.154625 -0.110784 -0.324523 13.000000 958 | v -0.157216 -0.393979 0.306309 14.000000 959 | v 0.314757 -0.110784 0.288823 13.000000 960 | v 0.173834 -0.170883 -0.025844 13.000000 961 | v -0.147049 -0.399596 0.389512 14.000000 962 | v -0.195259 0.141163 0.027683 12.000000 963 | v 0.340819 -0.110784 0.158346 13.000000 964 | v -0.262492 0.653820 0.221759 12.000000 965 | v -0.290276 0.676811 0.001509 12.000000 966 | v -0.001951 -0.170883 -0.102513 13.000000 967 | v -0.139560 -0.716435 -0.379473 14.000000 968 | v -0.245941 0.646668 -0.296364 12.000000 969 | v 0.286730 -0.110784 -0.050352 13.000000 970 | v 0.106209 -0.110784 -0.126852 13.000000 971 | v -0.195746 0.195440 0.022384 12.000000 972 | v -0.129806 -0.110784 0.247296 13.000000 973 | v 0.094807 -0.170377 0.390935 13.000000 974 | v 0.481705 -0.397836 -0.403887 14.000000 975 | v 0.430817 -0.429327 -0.403213 14.000000 976 | v 0.354786 -0.141919 0.389006 13.000000 977 | v -0.247514 0.531020 -0.068962 12.000000 978 | v -0.201138 -0.170883 -0.116012 13.000000 979 | v 0.016921 -0.170883 -0.251356 13.000000 980 | v -0.198948 0.391575 -0.089444 12.000000 981 | v 0.499042 -0.110784 0.387509 13.000000 982 | v -0.227593 0.157639 -0.122939 12.000000 983 | v -0.290013 0.665184 0.008904 12.000000 984 | v 0.423946 -0.308212 0.390373 14.000000 985 | v -0.189306 0.118509 0.146065 12.000000 986 | v -0.197918 -0.682510 0.305373 14.000000 987 | v -0.263709 -0.110784 -0.221175 13.000000 988 | v 0.332319 -0.170883 -0.037377 13.000000 989 | v 0.105834 -0.162027 -0.402221 13.000000 990 | v 0.455718 -0.218531 -0.318176 14.000000 991 | v -0.199191 0.013720 -0.337235 12.000000 992 | v -0.034022 -0.110784 0.222021 13.000000 993 | v 0.212046 -0.110784 -0.076788 13.000000 994 | v 0.230338 -0.170883 -0.220707 13.000000 995 | v 0.413649 -0.110784 -0.297937 13.000000 996 | v -0.097285 -0.110784 0.300431 13.000000 997 | v -0.118423 -0.266049 0.382341 14.000000 998 | v 0.050547 -0.110784 0.166266 13.000000 999 | v -0.235999 -0.001108 -0.001412 12.000000 1000 | v -0.269157 -0.170883 -0.041440 13.000000 1001 | v 0.260743 -0.110784 -0.343676 13.000000 1002 | v 0.284539 -0.110784 0.240313 13.000000 1003 | v -0.185823 0.370999 0.165199 12.000000 1004 | v -0.152067 0.622254 0.366483 12.000000 1005 | v 0.409511 -0.170883 0.190437 13.000000 1006 | v 0.418367 -0.366326 -0.385651 14.000000 1007 | v 0.043545 -0.110784 0.120995 13.000000 1008 | v -0.142256 0.717982 0.075593 12.000000 1009 | v -0.145533 0.126054 -0.310050 12.000000 1010 | v -0.200446 -0.179383 0.390598 14.000000 1011 | v -0.242796 0.273381 -0.119606 12.000000 1012 | v -0.191459 0.689243 0.143200 12.000000 1013 | v -0.240755 -0.016891 0.144267 12.000000 1014 | v -0.236748 0.410616 -0.001112 12.000000 1015 | v 0.023175 -0.110784 0.087763 13.000000 1016 | v 0.446114 -0.110784 0.095739 13.000000 1017 | v 0.428252 -0.249124 -0.318288 14.000000 1018 | v -0.287411 -0.136640 -0.082030 13.000000 1019 | v 0.086288 -0.110784 0.002595 13.000000 1020 | v -0.284116 0.637625 0.099127 12.000000 1021 | v -0.275429 0.636258 0.159863 12.000000 1022 | v -0.163750 0.717982 -0.214585 12.000000 1023 | v 0.433289 -0.110784 -0.310912 13.000000 1024 | v -0.204827 0.713263 0.048989 12.000000 1025 | v -0.152909 -0.068827 0.264615 12.000000 1026 | v -0.211529 0.305434 0.000067 12.000000 1027 | v -0.141002 -0.170883 -0.201816 13.000000 1028 | v -0.240324 0.285438 -0.064787 12.000000 1029 | v -0.188650 -0.566712 0.305748 14.000000 1030 | v -0.199809 0.619090 -0.226230 12.000000 1031 | v -0.204883 0.029821 -0.240366 12.000000 1032 | v 0.502299 -0.121325 0.040732 13.000000 1033 | v -0.204452 -0.013858 0.005085 12.000000 1034 | v -0.193705 -0.170883 -0.015622 13.000000 1035 | v -0.155793 0.514993 0.318629 12.000000 1036 | v -0.170209 -0.379918 0.306347 14.000000 1037 | v -0.117767 0.622254 -0.398233 12.000000 1038 | v -0.285745 0.700326 -0.098544 12.000000 1039 | v 0.418367 -0.625294 0.319584 14.000000 1040 | v -0.244106 0.369258 0.003119 12.000000 1041 | v 0.026882 -0.110784 -0.256504 13.000000 1042 | v -0.208047 -0.199528 -0.338640 14.000000 1043 | v -0.258073 0.172467 -0.101146 12.000000 1044 | v -0.254853 0.697424 0.018808 12.000000 1045 | v -0.222538 0.622254 -0.263207 12.000000 1046 | v -0.207410 0.022051 -0.071246 12.000000 1047 | v 0.081139 -0.110784 -0.138235 13.000000 1048 | v -0.242945 -0.110784 0.089860 13.000000 1049 | v 0.502299 -0.408283 -0.400161 14.000000 1050 | v -0.143230 0.622254 -0.004370 12.000000 1051 | v -0.155268 -0.779099 0.324246 14.000000 1052 | v -0.182566 -0.532562 -0.318195 14.000000 1053 | v -0.083505 -0.110784 0.137321 13.000000 1054 | v -0.207073 0.485262 0.050711 12.000000 1055 | v 0.047888 -0.110784 -0.191013 13.000000 1056 | v 0.215809 -0.170883 0.057171 13.000000 1057 | v -0.156111 0.569513 -0.296458 12.000000 1058 | v -0.188426 0.403314 -0.294530 12.000000 1059 | v -0.200127 -0.779099 0.343399 14.000000 1060 | v 0.105647 -0.110784 -0.184124 13.000000 1061 | v -0.252831 0.522407 0.039871 12.000000 1062 | v 0.418367 -0.658994 -0.357268 14.000000 1063 | v -0.202355 -0.274043 0.325163 14.000000 1064 | v -0.136377 0.670576 0.043990 12.000000 1065 | v -0.253542 0.024935 -0.075234 12.000000 1066 | v -0.194660 0.133094 0.305298 12.000000 1067 | v 0.120569 -0.170883 0.360492 13.000000 1068 | v 0.188100 -0.170883 -0.290729 13.000000 1069 | v 0.446619 -0.170883 -0.085887 13.000000 1070 | v -0.148603 0.256755 0.314192 12.000000 1071 | v 0.418367 -0.422025 -0.368240 14.000000 1072 | v 0.418367 -0.524437 0.328327 14.000000 1073 | v 0.418367 -0.637333 -0.369962 14.000000 1074 | v 0.418367 -0.203010 -0.400218 14.000000 1075 | v 0.163874 -0.134075 0.389006 13.000000 1076 | v -0.206849 0.717982 -0.149131 12.000000 1077 | v -0.151486 -0.257324 0.389868 14.000000 1078 | v -0.118423 -0.197843 0.383464 14.000000 1079 | v 0.042309 -0.170883 -0.323325 13.000000 1080 | v -0.143324 0.074680 0.273583 12.000000 1081 | v 0.418367 -0.579724 -0.327181 14.000000 1082 | v -0.203048 -0.302127 -0.315985 14.000000 1083 | v -0.159724 0.194166 -0.347907 12.000000 1084 | v -0.137070 0.634367 0.018808 12.000000 1085 | v 0.332225 -0.110784 -0.037752 13.000000 1086 | v -0.137126 0.711223 0.017423 12.000000 1087 | v -0.163394 0.717982 -0.401079 12.000000 1088 | v 0.502299 -0.316318 0.355943 14.000000 1089 | v -0.142612 -0.110784 0.251472 13.000000 1090 | v -0.161896 -0.779099 0.345009 14.000000 1091 | v 0.230188 -0.170883 -0.386363 13.000000 1092 | v 0.041186 -0.110784 0.108826 13.000000 1093 | v -0.253243 0.275347 0.014933 12.000000 1094 | v -0.090077 -0.170883 0.354239 13.000000 1095 | v -0.208047 -0.604288 -0.386457 14.000000 1096 | v 0.138786 -0.170883 -0.208126 13.000000 1097 | v -0.205070 -0.378065 -0.316229 14.000000 1098 | v -0.149333 0.166476 0.314753 12.000000 1099 | v 0.502299 -0.138119 0.298315 13.000000 1100 | v -0.134711 0.622254 0.164937 12.000000 1101 | v 0.454033 -0.170883 -0.267420 13.000000 1102 | v 0.239231 -0.110784 0.276054 13.000000 1103 | v 0.463451 -0.231618 -0.403325 14.000000 1104 | v 0.263158 -0.170883 -0.399394 13.000000 1105 | v 0.087318 -0.110784 0.215450 13.000000 1106 | v -0.216303 -0.170883 -0.221063 13.000000 1107 | v 0.226668 -0.110784 0.379739 13.000000 1108 | v 0.458864 -0.307463 0.389568 14.000000 1109 | v 0.305920 -0.110784 -0.343339 13.000000 1110 | v 0.427709 -0.110784 -0.120505 13.000000 1111 | v 0.067715 -0.110784 -0.140238 13.000000 1112 | v -0.136452 -0.110784 0.386011 13.000000 1113 | v -0.041286 -0.110784 -0.301625 13.000000 1114 | v -0.208047 -0.388493 -0.345754 14.000000 1115 | v -0.009327 -0.110784 0.324246 13.000000 1116 | v -0.204021 0.239924 0.133352 12.000000 1117 | v -0.151955 0.619090 -0.335700 12.000000 1118 | v -0.041979 -0.170883 -0.272531 13.000000 1119 | v -0.213944 -0.110784 -0.332854 13.000000 1120 | v -0.252381 0.116805 0.040938 12.000000 1121 | v -0.234502 0.468843 0.184726 12.000000 1122 | v -0.162495 -0.727481 -0.347121 14.000000 1123 | v -0.066637 -0.170883 0.209477 13.000000 1124 | v -0.079031 0.625661 -0.379529 12.000000 1125 | v -0.047296 -0.170883 0.243159 13.000000 1126 | v -0.259178 0.612163 -0.091429 12.000000 1127 | v 0.480806 -0.421070 -0.403887 14.000000 1128 | v -0.227013 0.335389 -0.195151 12.000000 1129 | v 0.429994 -0.170883 0.279387 13.000000 1130 | v -0.094346 -0.170883 -0.026444 13.000000 1131 | v 0.502299 -0.584161 -0.382244 14.000000 1132 | v -0.143679 0.455681 -0.324542 12.000000 1133 | v 0.502299 -0.163656 0.322523 13.000000 1134 | v -0.192900 0.430986 0.180963 12.000000 1135 | v -0.010919 -0.110784 -0.375073 13.000000 1136 | v -0.248113 0.320786 0.047042 12.000000 1137 | v -0.073302 -0.110784 -0.399206 13.000000 1138 | v -0.088280 0.634255 -0.350098 12.000000 1139 | v -0.246765 -0.129188 -0.293275 13.000000 1140 | v -0.155736 -0.222912 0.306852 14.000000 1141 | v 0.502299 -0.161072 -0.062054 13.000000 1142 | v 0.092934 -0.110784 0.078271 13.000000 1143 | v -0.195035 0.168985 0.183303 12.000000 1144 | v 0.280009 -0.110784 -0.312709 13.000000 1145 | v 0.073838 -0.110784 -0.348057 13.000000 1146 | v 0.315993 -0.170883 -0.268337 13.000000 1147 | v -0.179008 -0.761106 -0.323942 14.000000 1148 | v -0.062799 -0.170883 0.377754 13.000000 1149 | v 0.502299 -0.120520 -0.108766 13.000000 1150 | v -0.190747 0.622254 -0.166805 12.000000 1151 | v 0.035775 -0.170883 0.331398 13.000000 1152 | v -0.189212 0.474253 0.269333 12.000000 1153 | v -0.186516 0.027874 0.169486 12.000000 1154 | v -0.188707 -0.110784 0.217940 13.000000 1155 | v 0.438250 -0.379132 0.306347 14.000000 1156 | v -0.194061 0.597653 -0.240010 12.000000 1157 | v -0.186291 -0.170883 0.194574 13.000000 1158 | v -0.075342 -0.170883 -0.114926 13.000000 1159 | v -0.224654 0.283098 -0.228926 12.000000 1160 | v -0.202355 0.093534 -0.180286 12.000000 1161 | v 0.125362 -0.169647 -0.401977 13.000000 1162 | v -0.187808 0.482435 -0.183225 12.000000 1163 | v -0.182566 0.332768 -0.186464 12.000000 1164 | v 0.395657 -0.170883 -0.375017 13.000000 1165 | v -0.217745 0.521958 -0.235479 12.000000 1166 | v -0.118423 -0.400007 0.319322 14.000000 1167 | v 0.012353 -0.137126 0.389868 13.000000 1168 | v -0.068996 -0.110784 -0.139396 13.000000 1169 | v -0.171388 0.622254 0.039553 12.000000 1170 | v 0.294425 -0.170883 0.318835 13.000000 1171 | v -0.177248 0.068614 -0.350959 12.000000 1172 | v 0.488838 -0.270786 0.389006 14.000000 1173 | v 0.502299 -0.219692 -0.366330 14.000000 1174 | v -0.160005 -0.387314 -0.400199 14.000000 1175 | v 0.251438 -0.110784 -0.095754 13.000000 1176 | v -0.170771 -0.106983 -0.213462 12.000000 1177 | v 0.104112 -0.170883 0.107702 13.000000 1178 | v -0.185449 -0.555385 -0.316790 14.000000 1179 | v 0.413780 -0.170883 0.336696 13.000000 1180 | v -0.217782 0.717982 0.197944 12.000000 1181 | v -0.205800 0.622254 -0.119925 12.000000 1182 | v -0.188051 0.717982 0.110249 12.000000 1183 | v 0.502299 -0.116176 0.234902 13.000000 1184 | v -0.151749 -0.309204 -0.399937 14.000000 1185 | v 0.502299 -0.621699 0.383165 14.000000 1186 | v -0.032562 -0.170883 0.274388 13.000000 1187 | v -0.208047 -0.424141 -0.362922 14.000000 1188 | v 0.502299 -0.115839 -0.220089 13.000000 1189 | v 0.473542 -0.110784 -0.212488 13.000000 1190 | v 0.389890 -0.170883 0.011600 13.000000 1191 | v -0.183090 -0.170883 -0.308047 13.000000 1192 | v -0.199360 0.717982 0.343511 12.000000 1193 | v -0.125612 0.715491 0.160986 12.000000 1194 | v 0.499997 -0.110784 0.079525 13.000000 1195 | v -0.229016 0.717982 -0.308253 12.000000 1196 | v 0.418367 -0.716903 0.305860 14.000000 1197 | v -0.238789 0.661646 0.180570 12.000000 1198 | v -0.131042 0.713994 -0.133611 12.000000 1199 | v 0.386520 -0.170883 -0.173433 13.000000 1200 | v -0.243526 0.249416 0.149566 12.000000 1201 | v -0.012398 -0.170883 0.297735 13.000000 1202 | v -0.168486 -0.435262 -0.337741 14.000000 1203 | v -0.149745 -0.438931 -0.400349 14.000000 1204 | v -0.178128 -0.078170 0.262611 12.000000 1205 | v -0.205707 0.331589 0.004130 12.000000 1206 | v -0.140253 -0.170883 0.143294 13.000000 1207 | v -0.155399 0.622254 0.222115 12.000000 1208 | v 0.488651 -0.321430 -0.403887 14.000000 1209 | v 0.435947 -0.339572 0.306478 14.000000 1210 | v 0.499529 -0.170883 0.136423 13.000000 1211 | v -0.205594 -0.052482 0.189594 12.000000 1212 | v -0.234221 -0.170883 0.216629 13.000000 1213 | v 0.204651 -0.170883 0.282738 13.000000 1214 | v -0.268558 0.640883 0.197551 12.000000 1215 | v -0.214749 0.145526 -0.065948 12.000000 1216 | v -0.047221 -0.110784 0.082895 13.000000 1217 | v 0.419415 -0.669966 0.389325 14.000000 1218 | v -0.288591 0.712196 0.049083 12.000000 1219 | v 0.118696 -0.170883 0.112495 13.000000 1220 | v -0.221583 -0.110784 -0.290149 13.000000 1221 | v 0.437146 -0.110784 0.198169 13.000000 1222 | v 0.175275 -0.110784 -0.295728 13.000000 1223 | v 0.502299 -0.393473 -0.324485 14.000000 1224 | v -0.084179 0.667731 -0.363166 12.000000 1225 | v -0.137108 -0.171482 0.307021 14.000000 1226 | v -0.253561 -0.170883 -0.207976 13.000000 1227 | v 0.129406 -0.170883 0.155276 13.000000 1228 | v 0.366038 -0.110784 -0.183225 13.000000 1229 | v -0.110615 0.704876 -0.261765 12.000000 1230 | v -0.235288 -0.110784 -0.317839 13.000000 1231 | v -0.197431 0.626073 0.283992 12.000000 1232 | v -0.172924 0.002224 0.261263 12.000000 1233 | v 0.072639 -0.110784 -0.084333 13.000000 1234 | v -0.196083 0.504434 0.280173 12.000000 1235 | v -0.062536 -0.110784 -0.290167 13.000000 1236 | v -0.209994 0.310170 -0.181671 12.000000 1237 | v -0.125387 -0.519756 0.305898 14.000000 1238 | v -0.287542 -0.110784 -0.041403 13.000000 1239 | v 0.072396 -0.110784 -0.389171 13.000000 1240 | v 0.418367 -0.574987 -0.339220 14.000000 1241 | v -0.254722 0.407508 0.035303 12.000000 1242 | v -0.226020 -0.111215 -0.354029 13.000000 1243 | v -0.212578 0.083124 -0.182738 12.000000 1244 | v 0.059740 -0.110784 -0.016034 13.000000 1245 | v -0.099157 -0.170883 0.028787 13.000000 1246 | v 0.418367 -0.183895 0.380413 13.000000 1247 | v 0.480469 -0.711398 0.389006 14.000000 1248 | v -0.049487 -0.170883 0.295825 13.000000 1249 | v -0.186011 -0.739463 0.389137 14.000000 1250 | v 0.265012 -0.170883 0.387602 13.000000 1251 | v 0.473074 -0.110784 0.100700 13.000000 1252 | v -0.172287 0.230226 -0.290785 12.000000 1253 | v 0.052868 -0.170883 -0.029420 13.000000 1254 | v -0.207410 0.630342 -0.399749 12.000000 1255 | v 0.257036 -0.110784 -0.403045 13.000000 1256 | v -0.267621 0.622254 0.172931 12.000000 1257 | v -0.262866 0.707666 0.220224 12.000000 1258 | v 0.474010 -0.110784 -0.007852 13.000000 1259 | v -0.206493 0.134367 0.050281 12.000000 1260 | v 0.502299 -0.372542 -0.358373 14.000000 1261 | v 0.502299 -0.461211 -0.335438 14.000000 1262 | v -0.202355 -0.682304 0.345870 14.000000 1263 | v -0.208047 -0.237946 -0.352344 14.000000 1264 | v 0.029615 -0.170883 0.108208 13.000000 1265 | v 0.262091 -0.170883 -0.141549 13.000000 1266 | v -0.208047 -0.548177 -0.347832 14.000000 1267 | v -0.229971 0.717982 0.278001 12.000000 1268 | v -0.285108 0.622254 -0.100940 12.000000 1269 | v -0.136565 0.663724 -0.052299 12.000000 1270 | v -0.174253 0.454857 -0.195750 12.000000 1271 | v 0.393785 -0.170883 -0.276331 13.000000 1272 | v -0.137557 0.624444 -0.016577 12.000000 1273 | v 0.111189 -0.170883 -0.180960 13.000000 1274 | v -0.208515 0.622254 -0.035917 12.000000 1275 | v -0.159106 -0.170883 0.097461 13.000000 1276 | v -0.234576 0.372591 0.055804 12.000000 1277 | v 0.502299 -0.464413 -0.356894 14.000000 1278 | v -0.176462 0.640770 -0.228308 12.000000 1279 | v 0.201318 -0.170883 -0.139283 13.000000 1280 | v 0.502299 -0.144409 -0.387805 14.000000 1281 | v -0.289321 -0.135928 -0.042451 13.000000 1282 | v -0.082869 -0.170883 0.180738 13.000000 1283 | v 0.418367 -0.564016 0.358152 14.000000 1284 | v -0.015319 -0.110784 0.146551 13.000000 1285 | v -0.192002 -0.110784 0.255179 13.000000 1286 | v -0.202355 -0.376548 0.333757 14.000000 1287 | v 0.502299 -0.139654 0.372811 13.000000 1288 | v 0.502299 -0.202823 0.353864 14.000000 1289 | v -0.182865 0.717982 0.157466 12.000000 1290 | v -0.255883 0.716222 0.026091 12.000000 1291 | v 0.304385 -0.170883 0.262742 13.000000 1292 | v -0.201232 0.646275 0.044701 12.000000 1293 | v -0.237853 0.717982 0.191822 12.000000 1294 | v -0.225889 0.524092 -0.227316 12.000000 1295 | v -0.187340 -0.757512 0.305130 14.000000 1296 | v -0.111814 0.708639 0.240800 12.000000 1297 | v -0.225009 -0.170883 -0.309657 13.000000 1298 | v 0.059178 -0.110784 0.376032 13.000000 1299 | v 0.010107 -0.170883 0.096338 13.000000 1300 | v 0.237471 -0.170883 -0.089201 13.000000 1301 | v 0.068090 -0.110784 -0.198577 13.000000 1302 | v -0.118423 -0.271197 0.310204 14.000000 1303 | v -0.139167 0.496552 0.281708 12.000000 1304 | v -0.194136 0.518738 0.139718 12.000000 1305 | v -0.141451 0.178758 0.305841 12.000000 1306 | v 0.433364 -0.723774 0.305242 14.000000 1307 | v 0.502299 -0.145439 -0.229694 13.000000 1308 | v -0.118423 -0.370033 0.333888 14.000000 1309 | v 0.329529 -0.170883 -0.121647 13.000000 1310 | v -0.219318 0.622254 -0.054471 12.000000 1311 | v -0.231674 0.717982 0.226384 12.000000 1312 | v -0.202355 -0.612975 0.356635 14.000000 1313 | v -0.286119 0.700982 0.080199 12.000000 1314 | v -0.198161 0.397922 0.292492 12.000000 1315 | v -0.188613 0.153164 -0.238082 12.000000 1316 | v -0.210013 0.513383 0.000816 12.000000 1317 | v -0.182023 0.159493 0.264222 12.000000 1318 | v -0.241354 0.487846 0.177219 12.000000 1319 | v -0.186703 -0.313679 0.306553 14.000000 1320 | v 0.450663 -0.335621 0.389680 14.000000 1321 | v -0.168730 0.717982 0.086303 12.000000 1322 | v 0.258927 -0.170883 0.146813 13.000000 1323 | v -0.120632 -0.170883 -0.307186 13.000000 1324 | v -0.163019 -0.042035 0.320782 12.000000 1325 | v 0.418367 -0.710331 0.377398 14.000000 1326 | v 0.231031 -0.170883 0.164356 13.000000 1327 | v -0.208047 -0.509477 -0.378294 14.000000 1328 | v 0.365757 -0.110784 -0.220277 13.000000 1329 | v -0.289920 -0.133775 -0.025507 13.000000 1330 | v -0.213476 0.658781 0.054531 12.000000 1331 | v 0.010387 -0.110784 -0.033783 13.000000 1332 | v 0.502299 -0.134992 0.222190 13.000000 1333 | v 0.491834 -0.240998 -0.318270 14.000000 1334 | v -0.203572 0.289482 -0.317408 12.000000 1335 | v 0.502299 -0.535445 0.339036 14.000000 1336 | v -0.250865 0.172617 -0.113671 12.000000 1337 | v -0.251314 0.334921 0.042885 12.000000 1338 | v -0.156729 0.348102 -0.346072 12.000000 1339 | v 0.301876 -0.170883 0.191972 13.000000 1340 | v -0.254815 -0.011274 -0.076900 12.000000 1341 | v -0.274567 0.717982 -0.064544 12.000000 1342 | v 0.418367 -0.654127 -0.390238 14.000000 1343 | v -0.199846 0.697930 -0.101258 12.000000 1344 | v -0.280390 -0.155905 0.133371 13.000000 1345 | v 0.500072 -0.110784 -0.366498 13.000000 1346 | v 0.197312 -0.170883 -0.019404 13.000000 1347 | v -0.289040 -0.129862 -0.050633 13.000000 1348 | v 0.052887 -0.170883 -0.217468 13.000000 1349 | v -0.222819 0.717982 0.317974 12.000000 1350 | v -0.190710 -0.110784 -0.294211 13.000000 1351 | v 0.157789 -0.110784 -0.072145 13.000000 1352 | v 0.006231 -0.170883 0.220018 13.000000 1353 | v -0.188576 0.672879 -0.347477 12.000000 1354 | v -0.161765 0.634143 -0.403887 12.000000 1355 | v 0.502299 -0.667756 0.356560 14.000000 1356 | v 0.448379 -0.184887 0.390205 14.000000 1357 | v 0.418367 -0.206437 0.387621 13.000000 1358 | v 0.452348 -0.714282 0.305280 14.000000 1359 | v 0.067771 -0.110784 -0.268505 13.000000 1360 | v -0.177885 -0.731881 -0.325422 14.000000 1361 | v 0.337093 -0.110784 -0.218217 13.000000 1362 | v -0.245960 0.622254 -0.041777 12.000000 1363 | v -0.148547 0.569532 -0.304190 12.000000 1364 | v -0.241691 0.035195 0.145503 12.000000 1365 | v 0.502299 -0.137482 0.242223 13.000000 1366 | v -0.203759 0.222550 -0.323156 12.000000 1367 | v 0.179376 -0.170883 -0.260698 13.000000 1368 | v 0.258590 -0.170883 -0.205336 13.000000 1369 | v 0.159418 -0.170883 -0.095136 13.000000 1370 | v 0.193043 -0.110784 0.372774 13.000000 1371 | v -0.237609 0.347859 -0.121759 12.000000 1372 | v 0.090632 -0.170883 0.092050 13.000000 1373 | v -0.184026 0.612481 -0.292695 12.000000 1374 | v -0.107901 0.622254 0.369086 12.000000 1375 | v 0.477174 -0.521553 0.389006 14.000000 1376 | v -0.072366 -0.170883 0.353771 13.000000 1377 | v 0.230450 -0.110784 0.140485 13.000000 1378 | v -0.145027 -0.657440 0.389399 14.000000 1379 | v -0.088804 -0.170883 -0.231810 13.000000 1380 | v 0.502299 -0.137988 -0.249371 13.000000 1381 | v 0.260593 -0.110784 -0.207152 13.000000 1382 | v -0.207017 -0.410979 -0.316322 14.000000 1383 | v -0.210649 0.588310 0.190886 12.000000 1384 | v -0.118423 -0.312405 0.340908 14.000000 1385 | v -0.167812 -0.710350 0.389231 14.000000 1386 | v -0.174815 -0.242421 0.389343 14.000000 1387 | v -0.214581 0.099019 -0.237408 12.000000 1388 | v 0.400131 -0.170883 -0.113016 13.000000 1389 | v 0.261473 -0.110784 0.265551 13.000000 1390 | v 0.302981 -0.110784 -0.274328 13.000000 1391 | v -0.200109 0.717982 0.295151 12.000000 1392 | v -0.227537 0.214218 -0.196256 12.000000 1393 | v -0.165266 -0.659313 0.305448 14.000000 1394 | v -0.074107 -0.110784 0.203917 13.000000 1395 | v 0.502299 -0.571168 -0.336655 14.000000 1396 | v 0.139722 -0.110784 -0.169689 13.000000 1397 | v 0.006549 -0.170883 -0.038463 13.000000 1398 | v -0.206044 0.538546 -0.112379 12.000000 1399 | v 0.157377 -0.110784 0.299738 13.000000 1400 | v -0.202355 -0.484371 0.338063 14.000000 1401 | v -0.204508 0.047533 -0.110395 12.000000 1402 | v 0.425687 -0.779099 0.381405 14.000000 1403 | v 0.418367 -0.347229 0.333625 14.000000 1404 | v -0.254572 0.095705 0.035640 12.000000 1405 | v 0.502299 -0.156766 -0.387599 14.000000 1406 | v 0.394122 -0.170883 0.366333 13.000000 1407 | v -0.198255 -0.700652 0.305317 14.000000 1408 | v -0.257455 0.096211 -0.082742 12.000000 1409 | v -0.158975 -0.555928 -0.351539 14.000000 1410 | v -0.252587 0.241665 -0.073998 12.000000 1411 | v 0.310957 -0.110784 -0.178881 13.000000 1412 | v -0.211398 0.683888 0.190998 12.000000 1413 | v -0.172605 0.552232 0.261226 12.000000 1414 | v -0.017940 -0.170883 0.010814 13.000000 1415 | v -0.196458 -0.415060 -0.400274 14.000000 1416 | v 0.418367 -0.455220 -0.360395 14.000000 1417 | v -0.145046 0.335221 -0.330832 12.000000 1418 | v -0.128009 -0.384075 -0.400180 14.000000 1419 | v -0.234146 0.221070 0.184988 12.000000 1420 | v 0.230918 -0.170883 -0.266540 13.000000 1421 | v -0.259983 0.667356 0.231757 12.000000 1422 | v -0.005452 -0.170883 0.016524 13.000000 1423 | v 0.418367 -0.255995 -0.325122 14.000000 1424 | v 0.083442 -0.170883 0.294421 13.000000 1425 | v -0.063922 -0.170883 0.114686 13.000000 1426 | v -0.129076 -0.110784 0.126425 13.000000 1427 | v -0.197862 0.370101 -0.338977 12.000000 1428 | v -0.124302 0.622254 -0.232165 12.000000 1429 | v -0.149315 0.486535 -0.303179 12.000000 1430 | v -0.208047 -0.319408 -0.331731 14.000000 1431 | v -0.126193 -0.728866 -0.398383 14.000000 1432 | v -0.113106 0.675557 0.389006 12.000000 1433 | v 0.502299 -0.202093 0.383259 14.000000 1434 | v -0.098334 0.716839 -0.313027 12.000000 1435 | v -0.212971 0.138261 -0.000513 12.000000 1436 | v -0.258747 0.004396 -0.097252 12.000000 1437 | v -0.258953 -0.084385 -0.089688 12.000000 1438 | v 0.257280 -0.170883 0.200603 13.000000 1439 | v -0.165060 -0.039995 -0.291946 12.000000 1440 | v -0.120407 -0.522826 0.389774 14.000000 1441 | v -0.250996 0.493088 -0.113484 12.000000 1442 | v 0.299162 -0.110784 -0.146847 13.000000 1443 | v -0.049150 -0.110784 -0.063645 13.000000 1444 | v 0.171344 -0.110784 0.106448 13.000000 1445 | v 0.330915 -0.110784 -0.164091 13.000000 1446 | v -0.125949 -0.450895 -0.400386 14.000000 1447 | v 0.418367 -0.539471 -0.384547 14.000000 1448 | v -0.142986 0.717982 -0.248173 12.000000 1449 | v 0.440647 -0.170883 -0.306868 13.000000 1450 | v -0.231674 0.263832 0.135393 12.000000 1451 | v -0.199903 0.700345 0.135262 12.000000 1452 | v 0.112630 -0.110784 0.137434 13.000000 1453 | v 0.169790 -0.110784 -0.249615 13.000000 1454 | v -0.208047 -0.760620 -0.331038 14.000000 1455 | v 0.444972 -0.741785 0.305186 14.000000 1456 | v -0.209601 0.457834 -0.181502 12.000000 1457 | v -0.126005 0.705756 -0.173040 12.000000 1458 | v 0.502299 -0.553306 -0.342459 14.000000 1459 | v 0.129106 -0.110784 -0.232016 13.000000 1460 | v -0.158283 -0.080978 0.319659 12.000000 1461 | v -0.118423 -0.284266 0.314809 14.000000 1462 | v 0.204576 -0.170883 -0.375111 13.000000 1463 | v 0.502299 -0.689325 0.374553 14.000000 1464 | v -0.197431 0.386108 0.185138 12.000000 1465 | v -0.205912 -0.742234 -0.401341 14.000000 1466 | v 0.502299 -0.539227 0.384906 14.000000 1467 | v 0.502299 -0.568584 -0.370468 14.000000 1468 | v 0.450045 -0.587100 -0.319561 14.000000 1469 | v -0.206811 0.464555 0.050505 12.000000 1470 | v 0.502299 -0.164873 0.312282 13.000000 1471 | v -0.173186 0.717345 0.261301 12.000000 1472 | v -0.232030 -0.106403 -0.122714 12.000000 1473 | v -0.203965 0.271471 -0.075740 12.000000 1474 | v 0.049966 -0.170883 -0.355733 13.000000 1475 | v 0.418367 -0.447656 -0.385427 14.000000 1476 | v -0.222594 0.219423 -0.063140 12.000000 1477 | v 0.329099 -0.110784 0.125245 13.000000 1478 | v 0.475077 -0.680974 0.389006 14.000000 1479 | v -0.166315 0.210623 -0.350472 12.000000 1480 | v 0.067453 -0.170883 0.078683 13.000000 1481 | v 0.133225 -0.170883 0.351973 13.000000 1482 | v -0.186254 0.229365 -0.183880 12.000000 1483 | v -0.179383 0.185105 -0.350678 12.000000 1484 | v -0.202355 -0.602434 0.342144 14.000000 1485 | v -0.092904 -0.110784 0.153235 13.000000 1486 | v -0.261705 -0.110784 0.153666 13.000000 1487 | v 0.128395 -0.170883 -0.145013 13.000000 1488 | v -0.171145 0.393785 -0.204681 12.000000 1489 | v 0.224010 -0.110784 0.062825 13.000000 1490 | v 0.120456 -0.170883 -0.002947 13.000000 1491 | v -0.134917 -0.110784 -0.023560 13.000000 1492 | v -0.229035 0.693437 -0.346615 12.000000 1493 | v 0.427260 -0.759852 0.305130 14.000000 1494 | v -0.221452 0.706074 -0.365282 12.000000 1495 | v 0.420052 -0.110784 0.364517 13.000000 1496 | v -0.146619 -0.425582 -0.368558 14.000000 1497 | v -0.125593 0.642399 0.161155 12.000000 1498 | v -0.062574 -0.110784 -0.199925 13.000000 1499 | v 0.475957 -0.779099 0.348191 14.000000 1500 | v 0.475377 -0.110784 0.122418 13.000000 1501 | v 0.090107 -0.110784 -0.361631 13.000000 1502 | v 0.502299 -0.155156 -0.043163 13.000000 1503 | v -0.172643 -0.170883 0.024556 13.000000 1504 | v 0.373527 -0.110784 0.033524 13.000000 1505 | v -0.118423 -0.369527 0.318760 14.000000 1506 | v -0.246615 -0.134749 0.280229 13.000000 1507 | v -0.248768 -0.033854 0.046181 12.000000 1508 | v -0.203610 -0.017771 -0.317802 12.000000 1509 | v -0.215236 0.400225 -0.237145 12.000000 1510 | v 0.438550 -0.693462 -0.385352 14.000000 1511 | v 0.502299 -0.478642 0.358489 14.000000 1512 | v -0.085471 -0.110784 0.383577 13.000000 1513 | v 0.083030 -0.170883 -0.047843 13.000000 1514 | v 0.413518 -0.134692 -0.403101 13.000000 1515 | v 0.284858 -0.170883 -0.348750 13.000000 1516 | v 0.479402 -0.110784 -0.141961 13.000000 1517 | v 0.435255 -0.110784 0.160219 13.000000 1518 | v 0.418367 -0.411522 0.344054 14.000000 1519 | v -0.288628 -0.134786 0.049251 13.000000 1520 | v -0.107620 0.656872 -0.276481 12.000000 1521 | v 0.111264 -0.110784 -0.101988 13.000000 1522 | v -0.191346 -0.120295 -0.403569 13.000000 1523 | v -0.258710 -0.110784 0.030585 13.000000 1524 | v -0.143661 0.012016 -0.324467 12.000000 1525 | v -0.203872 0.035213 -0.319805 12.000000 1526 | v 0.054273 -0.170883 -0.339669 13.000000 1527 | v -0.231150 0.616600 -0.210840 12.000000 1528 | v -0.131585 -0.170883 0.255740 13.000000 1529 | v 0.074437 -0.151880 -0.402558 13.000000 1530 | v -0.118423 -0.640852 0.377792 14.000000 1531 | v -0.066674 -0.110784 0.347480 13.000000 1532 | v -0.173953 -0.450052 -0.400386 14.000000 1533 | v -0.230926 0.436565 -0.212563 12.000000 1534 | v -0.191758 0.399963 -0.181596 12.000000 1535 | v 0.433495 -0.655269 0.389044 14.000000 1536 | v 0.427766 -0.110784 -0.201198 13.000000 1537 | v -0.164143 0.310844 -0.292320 12.000000 1538 | v -0.179083 -0.182566 0.306984 14.000000 1539 | v 0.025010 -0.110784 -0.188317 13.000000 1540 | v -0.132933 -0.110784 -0.146978 13.000000 1541 | v 0.006924 -0.110784 0.165199 13.000000 1542 | v 0.488164 -0.112432 -0.403831 13.000000 1543 | v -0.218176 0.717982 0.119947 12.000000 1544 | v -0.209357 0.460904 -0.115693 12.000000 1545 | v 0.186340 -0.110784 -0.114195 13.000000 1546 | v -0.167887 0.717982 0.321606 12.000000 1547 | v -0.007174 -0.110784 0.347986 13.000000 1548 | v 0.293133 -0.170883 -0.001674 13.000000 1549 | v -0.166352 -0.110784 -0.035730 13.000000 1550 | v 0.305939 -0.138175 0.389006 13.000000 1551 | v 0.425294 -0.405811 0.390036 14.000000 1552 | v 0.001157 -0.170883 -0.177758 13.000000 1553 | v -0.102565 0.684862 -0.296383 12.000000 1554 | v -0.195709 0.594938 -0.180754 12.000000 1555 | v -0.147761 0.232716 -0.336861 12.000000 1556 | v -0.179626 0.717982 -0.096559 12.000000 1557 | v -0.206418 0.619090 -0.197529 12.000000 1558 | v -0.109193 -0.170883 0.261413 13.000000 1559 | v -0.187153 0.582338 0.267760 12.000000 1560 | v 0.241066 -0.110784 0.201576 13.000000 1561 | v -0.152441 0.717982 0.135692 12.000000 1562 | v -0.249536 -0.170883 -0.015940 13.000000 1563 | v -0.177585 0.045192 -0.291141 12.000000 1564 | v 0.362481 -0.110784 -0.083959 13.000000 1565 | v -0.140290 0.113641 0.278975 12.000000 1566 | v -0.156111 -0.170883 0.020568 13.000000 1567 | v -0.212933 0.634143 -0.118427 12.000000 1568 | v -0.192601 0.040830 0.180570 12.000000 1569 | v 0.260032 -0.170883 -0.041234 13.000000 1570 | v -0.202355 -0.544938 0.388931 14.000000 1571 | v 0.253666 -0.110784 0.310934 13.000000 1572 | v -0.237703 0.222887 -0.000700 12.000000 1573 | v -0.232742 0.194822 0.056366 12.000000 1574 | v -0.218363 0.547758 -0.235011 12.000000 1575 | v -0.208103 0.257504 -0.239935 12.000000 1576 | v -0.198274 -0.047427 0.290470 12.000000 1577 | v -0.191721 0.373976 0.179428 12.000000 1578 | v -0.183502 0.077432 -0.349573 12.000000 1579 | v -0.162795 0.653857 0.389006 12.000000 1580 | v 0.501588 -0.170883 0.017741 13.000000 1581 | v -0.138025 0.093028 0.287306 12.000000 1582 | v -0.081371 -0.170883 -0.308496 13.000000 1583 | v 0.006287 -0.170883 0.049064 13.000000 1584 | v -0.222650 -0.097922 0.056927 12.000000 1585 | v -0.245248 0.070337 0.167633 12.000000 1586 | v 0.387438 -0.170883 -0.247312 13.000000 1587 | v 0.276826 -0.110784 -0.013057 13.000000 1588 | v 0.259938 -0.110784 -0.325347 13.000000 1589 | v -0.201251 0.588423 0.009054 12.000000 1590 | v 0.152565 -0.110784 -0.207339 13.000000 1591 | v 0.451075 -0.680750 0.305373 14.000000 1592 | v -0.181105 0.122085 0.263847 12.000000 1593 | v 0.472718 -0.353763 -0.403887 14.000000 1594 | v 0.196170 -0.170883 -0.094013 13.000000 1595 | v -0.241129 0.549349 0.177537 12.000000 1596 | v 0.418367 -0.676968 -0.393814 14.000000 1597 | v -0.080004 -0.170883 -0.356782 13.000000 1598 | v -0.036194 -0.170883 0.255085 13.000000 1599 | v 0.473841 -0.170883 -0.125391 13.000000 1600 | v 0.441377 -0.242833 -0.402801 14.000000 1601 | v -0.144278 0.683195 -0.313121 12.000000 1602 | v 0.075448 -0.170883 0.017610 13.000000 1603 | v 0.502299 -0.233940 0.352198 14.000000 1604 | v -0.040144 -0.170883 0.204572 13.000000 1605 | v 0.091437 -0.110784 0.298558 13.000000 1606 | v -0.199509 -0.315569 0.390485 14.000000 1607 | v -0.176032 0.622254 -0.024028 12.000000 1608 | v 0.494417 -0.110784 -0.266689 13.000000 1609 | v 0.421175 -0.672194 0.305411 14.000000 1610 | v -0.262304 0.717982 0.012817 12.000000 1611 | v -0.133813 -0.423242 0.306197 14.000000 1612 | v -0.218775 0.347784 -0.121141 12.000000 1613 | v -0.267116 0.622254 0.113244 12.000000 1614 | v -0.171782 0.567753 -0.201723 12.000000 1615 | v -0.185468 0.233596 0.162465 12.000000 1616 | v -0.205332 0.653970 0.049382 12.000000 1617 | v -0.279660 -0.170883 0.123223 13.000000 1618 | v 0.149195 -0.110784 0.019651 13.000000 1619 | v 0.190253 -0.110784 0.242747 13.000000 1620 | v -0.059335 -0.156897 -0.402389 13.000000 1621 | v -0.200951 0.650955 0.187666 12.000000 1622 | v -0.160323 -0.110784 0.367944 13.000000 1623 | v 0.411552 -0.110784 -0.108092 13.000000 1624 | v -0.195185 0.496945 0.026653 12.000000 1625 | v 0.262690 -0.110784 0.011825 13.000000 1626 | v -0.258934 0.511754 -0.095885 12.000000 1627 | v -0.149876 -0.110784 0.106804 13.000000 1628 | v -0.198124 0.257186 -0.240553 12.000000 1629 | v -0.166727 0.717982 -0.261110 12.000000 1630 | v 0.502299 -0.449940 -0.346334 14.000000 1631 | v -0.208047 -0.634618 -0.350659 14.000000 1632 | v -0.185374 0.622254 0.209459 12.000000 1633 | v -0.080697 0.708171 0.389006 12.000000 1634 | v 0.472419 -0.170883 -0.011035 13.000000 1635 | v 0.489250 -0.513428 0.305916 14.000000 1636 | v -0.144222 0.426848 -0.328717 12.000000 1637 | v -0.220535 -0.170883 0.281203 13.000000 1638 | v -0.192002 0.554535 0.142507 12.000000 1639 | v -0.138924 0.717982 -0.055576 12.000000 1640 | v 0.269430 -0.110784 -0.260998 13.000000 1641 | v -0.142050 0.260125 0.275230 12.000000 1642 | v 0.004565 -0.110784 -0.266895 13.000000 1643 | v -0.245772 0.680612 0.158590 12.000000 1644 | v 0.465136 -0.110784 -0.236921 13.000000 1645 | v -0.112900 -0.170883 -0.169576 13.000000 1646 | v -0.188220 0.169790 0.173605 12.000000 1647 | v -0.228173 0.313653 0.133670 12.000000 1648 | v -0.067180 -0.110784 -0.248735 13.000000 1649 | v -0.132390 0.670033 -0.117397 12.000000 1650 | v -0.143885 0.117648 -0.315985 12.000000 1651 | v -0.195578 0.269917 0.023732 12.000000 1652 | v 0.275515 -0.170883 0.220243 13.000000 1653 | v 0.036374 -0.110784 0.092369 13.000000 1654 | v -0.201120 0.717982 0.201614 12.000000 1655 | v -0.020898 -0.110784 -0.060930 13.000000 1656 | v -0.168655 -0.110784 0.115210 13.000000 1657 | v -0.157103 0.717982 0.044159 12.000000 1658 | v -0.083412 -0.110784 -0.152801 13.000000 1659 | v -0.211847 0.622254 -0.319075 12.000000 1660 | v -0.236898 0.622254 -0.286535 12.000000 1661 | v -0.097248 -0.170883 -0.050895 13.000000 1662 | v 0.502299 -0.147424 -0.202696 13.000000 1663 | v -0.136733 -0.308268 0.390055 14.000000 1664 | v 0.186752 -0.110784 0.049794 13.000000 1665 | v -0.208047 -0.189811 -0.343264 14.000000 1666 | v -0.241242 0.233240 -0.120262 12.000000 1667 | v 0.089976 -0.110784 0.181974 13.000000 1668 | v -0.182903 -0.170883 -0.066510 13.000000 1669 | v -0.198330 0.694541 0.290789 12.000000 1670 | v -0.208309 0.339153 0.190586 12.000000 1671 | v -0.185543 -0.255527 -0.315836 14.000000 1672 | v 0.229683 -0.110784 0.086789 13.000000 1673 | v -0.059485 -0.170883 -0.230312 13.000000 1674 | v 0.223224 -0.110784 0.295020 13.000000 1675 | v -0.183165 -0.489145 -0.317184 14.000000 1676 | v 0.059665 -0.170883 0.173680 13.000000 1677 | v -0.202355 -0.706194 0.373542 14.000000 1678 | v -0.118423 -0.437284 0.363020 14.000000 1679 | v 0.017801 -0.170883 -0.388273 13.000000 1680 | v -0.168898 -0.483734 0.306010 14.000000 1681 | v -0.228978 0.619090 0.166940 12.000000 1682 | v -0.075642 -0.170883 0.240425 13.000000 1683 | v 0.117049 -0.110784 0.034367 13.000000 1684 | v -0.202374 0.512260 -0.311623 12.000000 1685 | v 0.227698 -0.170883 0.015476 13.000000 1686 | v -0.157328 0.393785 -0.346522 12.000000 1687 | v -0.141526 -0.698761 -0.401191 14.000000 1688 | v -0.118329 0.717982 -0.345941 12.000000 1689 | v -0.140328 0.622254 0.366315 12.000000 1690 | v 0.462402 -0.110784 -0.385352 13.000000 1691 | v -0.170040 -0.095825 0.260889 12.000000 1692 | v -0.284621 -0.143155 -0.108260 13.000000 1693 | v -0.191215 0.230844 0.143518 12.000000 1694 | v 0.478971 -0.314034 -0.403887 14.000000 1695 | v -0.143529 0.655299 0.273302 12.000000 1696 | v 0.240336 -0.110784 -0.276575 13.000000 1697 | v 0.502299 -0.461117 0.359163 14.000000 1698 | v -0.158301 0.622254 -0.172703 12.000000 1699 | v -0.118423 -0.540875 0.310691 14.000000 1700 | v -0.151973 0.717982 -0.207470 12.000000 1701 | v 0.018738 -0.110784 -0.053460 13.000000 1702 | v -0.062349 -0.170883 -0.182326 13.000000 1703 | v 0.447817 -0.170883 0.043522 13.000000 1704 | v -0.179551 -0.415322 -0.400274 14.000000 1705 | v 0.451300 -0.609193 -0.319636 14.000000 1706 | v -0.186142 0.086793 0.315072 12.000000 1707 | v 0.364091 -0.170883 0.278320 13.000000 1708 | v -0.172474 0.582675 0.320838 12.000000 1709 | v 0.455250 -0.693462 -0.335270 14.000000 1710 | v -0.242009 -0.170883 0.013810 13.000000 1711 | v 0.402303 -0.110784 -0.241508 13.000000 1712 | v 0.412207 -0.170883 -0.080439 13.000000 1713 | v 0.303917 -0.110784 0.361634 13.000000 1714 | v -0.118423 -0.408863 0.327971 14.000000 1715 | v -0.207129 0.634218 0.190231 12.000000 1716 | v 0.069925 -0.170883 -0.325665 13.000000 1717 | v 0.087168 -0.170883 0.177911 13.000000 1718 | v -0.158376 0.650506 -0.294717 12.000000 1719 | v -0.135254 0.717982 0.334917 12.000000 1720 | v -0.079424 0.673984 0.389006 12.000000 1721 | v 0.432278 -0.231075 0.306815 14.000000 1722 | v -0.253411 0.717982 -0.122396 12.000000 1723 | v -0.161728 -0.074182 -0.348731 12.000000 1724 | v -0.255396 0.054947 -0.107624 12.000000 1725 | v -0.278705 -0.170883 -0.148832 13.000000 1726 | v -0.241223 0.686079 -0.310818 12.000000 1727 | v 0.247900 -0.170883 -0.100828 13.000000 1728 | v -0.185299 0.272389 -0.348825 12.000000 1729 | v 0.065731 -0.131435 0.389006 13.000000 1730 | v 0.158257 -0.170883 -0.288295 13.000000 1731 | v -0.234371 0.622254 -0.271482 12.000000 1732 | v 0.247095 -0.110784 -0.158118 13.000000 1733 | v -0.245435 0.481143 0.166303 12.000000 1734 | v -0.243619 0.717982 -0.049641 12.000000 1735 | v -0.236224 0.035101 0.183397 12.000000 1736 | v -0.083056 -0.170883 0.073160 13.000000 1737 | v 0.480881 -0.390759 0.389006 14.000000 1738 | v 0.499922 -0.489032 -0.403887 14.000000 1739 | v -0.020224 -0.110784 0.120583 13.000000 1740 | v 0.455662 -0.170883 0.049532 13.000000 1741 | v 0.272183 -0.170883 0.235202 13.000000 1742 | v -0.154351 -0.170883 0.235876 13.000000 1743 | v 0.007223 -0.170883 0.121538 13.000000 1744 | v -0.137894 0.574811 0.293784 12.000000 1745 | v -0.195259 -0.528743 -0.400648 14.000000 1746 | v 0.301670 -0.170883 0.156250 13.000000 1747 | v -0.202655 0.662413 -0.312297 12.000000 1748 | v -0.243320 0.533060 0.173231 12.000000 1749 | v 0.417618 -0.170883 0.111222 13.000000 1750 | v 0.476631 -0.329405 -0.318588 14.000000 1751 | v 0.440890 -0.170883 0.209665 13.000000 1752 | v 0.434056 -0.170883 0.009054 13.000000 1753 | v 0.462664 -0.370632 0.306366 14.000000 1754 | v -0.163525 -0.475403 -0.344874 14.000000 1755 | v 0.001382 -0.110784 0.106111 13.000000 1756 | v -0.255546 0.214761 0.023601 12.000000 1757 | v 0.056426 -0.170883 -0.270808 13.000000 1758 | v -0.195634 -0.010919 0.023246 12.000000 1759 | v -0.164293 -0.670939 0.305411 14.000000 1760 | v -0.091931 0.628507 0.323309 12.000000 1761 | v 0.278174 -0.110784 0.205957 13.000000 1762 | v 0.466690 -0.170883 -0.262477 13.000000 1763 | v -0.199303 0.711167 -0.240703 12.000000 1764 | v -0.232910 0.637943 -0.002554 12.000000 1765 | v -0.195578 0.228204 0.278937 12.000000 1766 | v -0.233210 0.315506 -0.062859 12.000000 1767 | v -0.208047 -0.596312 -0.354778 14.000000 1768 | v -0.091444 -0.110784 0.100401 13.000000 1769 | v 0.493668 -0.190560 0.389137 14.000000 1770 | v -0.096031 0.676568 0.307208 12.000000 1771 | v -0.059953 -0.170883 0.196503 13.000000 1772 | v 0.276807 -0.110784 -0.066566 13.000000 1773 | v -0.148697 -0.171407 0.390205 14.000000 1774 | v -0.237366 0.361620 -0.063570 12.000000 1775 | v 0.185854 -0.110784 0.178716 13.000000 1776 | v -0.137744 0.711522 -0.009575 12.000000 1777 | v -0.218925 0.444110 -0.234580 12.000000 1778 | v 0.202161 -0.170883 0.227938 13.000000 1779 | v -0.258485 0.622254 0.169131 12.000000 1780 | v -0.287186 -0.153864 0.071437 13.000000 1781 | v -0.096705 0.717982 0.338886 12.000000 1782 | v 0.031562 -0.110784 -0.036423 13.000000 1783 | v -0.173897 -0.110784 -0.265716 13.000000 1784 | v -0.143324 0.662563 -0.320179 12.000000 1785 | v 0.473130 -0.195802 -0.318101 14.000000 1786 | v -0.165416 -0.774006 -0.384210 14.000000 1787 | v 0.063784 -0.110784 -0.280226 13.000000 1788 | v 0.449203 -0.110784 0.066457 13.000000 1789 | v 0.261211 -0.135666 0.389006 13.000000 1790 | v -0.171239 -0.048607 -0.351127 12.000000 1791 | v -0.229278 0.337767 -0.062334 12.000000 1792 | v 0.418367 -0.701195 0.370621 14.000000 1793 | v -0.212372 0.369108 0.191111 12.000000 1794 | v -0.212709 0.604056 0.131124 12.000000 1795 | v -0.145458 0.193324 -0.310238 12.000000 1796 | v -0.262922 0.715379 -0.234880 12.000000 1797 | v -0.129263 0.717982 -0.335082 12.000000 1798 | v 0.502299 -0.135891 -0.042245 13.000000 1799 | v -0.245192 0.521565 0.049831 12.000000 1800 | v -0.105560 -0.113611 -0.403793 13.000000 1801 | v -0.245922 0.695964 0.162559 12.000000 1802 | v -0.254010 0.578912 -0.109552 12.000000 1803 | v 0.285681 -0.110784 -0.345548 13.000000 1804 | v -0.247401 0.122254 -0.068869 12.000000 1805 | v 0.064589 -0.110784 0.034629 13.000000 1806 | v -0.137744 -0.273201 0.306684 14.000000 1807 | v 0.458714 -0.386883 -0.318813 14.000000 1808 | v -0.249929 0.360627 0.044664 12.000000 1809 | v -0.138063 0.017259 0.287025 12.000000 1810 | v -0.288123 0.668573 -0.076189 12.000000 1811 | v -0.197413 0.204501 -0.302505 12.000000 1812 | v -0.202355 -0.681873 0.322092 14.000000 1813 | v -0.254403 0.116506 -0.076376 12.000000 1814 | v -0.148154 -0.251146 -0.399749 14.000000 1815 | v 0.502299 -0.136209 -0.150966 13.000000 1816 | v 0.491778 -0.151355 0.389006 13.000000 1817 | v -0.185674 0.103419 0.164150 12.000000 1818 | v 0.502299 -0.629095 -0.346709 14.000000 1819 | v -0.163956 -0.493844 -0.344312 14.000000 1820 | v 0.055770 -0.110784 -0.322538 13.000000 1821 | v 0.272800 -0.170883 -0.055445 13.000000 1822 | v -0.202355 -0.287243 0.365379 14.000000 1823 | v 0.043002 -0.110784 -0.343077 13.000000 1824 | v -0.191833 -0.165903 0.389175 14.000000 1825 | v 0.017446 -0.170883 0.369722 13.000000 1826 | v -0.118423 -0.183202 0.309249 14.000000 1827 | v 0.327620 -0.170883 -0.397016 13.000000 1828 | v 0.502299 -0.676107 0.341096 14.000000 1829 | v 0.283116 -0.170883 0.153497 13.000000 1830 | v -0.057463 -0.170883 -0.258882 13.000000 1831 | v 0.472250 -0.403677 -0.318869 14.000000 1832 | v -0.083000 0.717982 0.384401 12.000000 1833 | v -0.202355 -0.617150 0.375058 14.000000 1834 | v -0.190598 -0.779099 0.380188 14.000000 1835 | v -0.211810 -0.020130 0.053838 12.000000 1836 | v 0.319344 -0.110784 0.121426 13.000000 1837 | v -0.158545 0.080147 -0.294642 12.000000 1838 | v -0.141320 -0.310758 0.306572 14.000000 1839 | v -0.164499 -0.779099 0.388725 14.000000 1840 | v 0.352539 -0.170883 -0.024234 13.000000 1841 | v -0.208047 -0.180431 -0.359309 14.000000 1842 | v -0.222594 0.639928 -0.189366 12.000000 1843 | v -0.231955 0.690890 0.324227 12.000000 1844 | v -0.144054 0.413892 -0.314637 12.000000 1845 | v -0.247289 0.525515 0.005665 12.000000 1846 | v 0.236048 -0.110784 -0.053891 13.000000 1847 | v -0.069520 -0.110784 0.165423 13.000000 1848 | v 0.221763 -0.110784 0.032307 13.000000 1849 | v -0.001127 -0.110784 0.035883 13.000000 1850 | v -0.078731 -0.110784 0.193844 13.000000 1851 | v -0.120988 0.657901 0.389006 12.000000 1852 | v -0.202355 -0.573770 0.332858 14.000000 1853 | v 0.070711 -0.170883 -0.114158 13.000000 1854 | v -0.194323 0.020722 -0.180922 12.000000 1855 | v -0.202187 0.398390 -0.311193 12.000000 1856 | v -0.146918 0.548151 0.269146 12.000000 1857 | v 0.475246 -0.527732 -0.319337 14.000000 1858 | v -0.245772 0.132120 0.158665 12.000000 1859 | v -0.118423 -0.625088 0.371164 14.000000 1860 | v -0.257249 0.520123 -0.082255 12.000000 1861 | v -0.004815 -0.170883 0.286651 13.000000 1862 | v -0.171126 0.676905 -0.216158 12.000000 1863 | v -0.223287 0.065113 -0.063046 12.000000 1864 | v -0.186647 0.153614 0.152468 12.000000 1865 | v 0.349019 -0.170883 0.272703 13.000000 1866 | v -0.246053 0.224871 0.161567 12.000000 1867 | v -0.173317 0.687726 0.261320 12.000000 1868 | v -0.122504 0.691471 0.183360 12.000000 1869 | v -0.174010 -0.170883 0.038692 13.000000 1870 | v -0.174122 -0.415416 0.306235 14.000000 1871 | v -0.194473 -0.110784 -0.079990 13.000000 1872 | v -0.006013 -0.170883 0.275698 13.000000 1873 | v 0.484832 -0.186947 -0.403719 14.000000 1874 | v -0.154407 -0.034228 0.263997 12.000000 1875 | v -0.202355 -0.756313 0.369348 14.000000 1876 | v 0.461279 -0.170883 0.098697 13.000000 1877 | v 0.229776 -0.110784 -0.089164 13.000000 1878 | v -0.098596 0.687520 0.297117 12.000000 1879 | v 0.012353 -0.170883 0.277589 13.000000 1880 | v -0.248693 0.658519 -0.115562 12.000000 1881 | v -0.118423 -0.758747 0.318198 14.000000 1882 | v 0.356976 -0.110784 -0.395724 13.000000 1883 | v 0.452105 -0.110784 0.275492 13.000000 1884 | v 0.307306 -0.170883 0.169542 13.000000 1885 | v 0.033435 -0.170883 -0.212338 13.000000 1886 | v -0.138400 -0.071411 0.297641 12.000000 1887 | v -0.179065 -0.170883 0.265120 13.000000 1888 | v -0.234539 0.622254 -0.015940 12.000000 1889 | v -0.182566 0.189355 -0.234506 12.000000 1890 | v -0.138137 -0.110784 0.180383 13.000000 1891 | v 0.317921 -0.110784 -0.035767 13.000000 1892 | v 0.165596 -0.110784 0.244544 13.000000 1893 | v -0.233734 0.281095 -0.002348 12.000000 1894 | v -0.138699 0.036599 0.299214 12.000000 1895 | v -0.171688 -0.778836 0.305074 14.000000 1896 | v -0.039658 -0.170883 -0.174744 13.000000 1897 | v -0.249180 -0.059447 -0.115188 12.000000 1898 | v -0.168187 -0.255883 -0.399768 14.000000 1899 | v -0.118423 -0.478342 0.389643 14.000000 1900 | v -0.284434 0.717982 -0.082704 12.000000 1901 | v -0.148641 -0.655081 -0.366442 14.000000 1902 | v -0.117955 0.717982 -0.320011 12.000000 1903 | v 0.169172 -0.170883 -0.129267 13.000000 1904 | v 0.327357 -0.170883 0.024743 13.000000 1905 | v -0.180487 -0.061151 0.263585 12.000000 1906 | v -0.202355 -0.317067 0.378316 14.000000 1907 | v 0.366057 -0.110784 -0.048236 13.000000 1908 | v -0.202355 -0.358200 0.358695 14.000000 1909 | v 0.437033 -0.532581 -0.403775 14.000000 1910 | v -0.170864 0.664304 0.389006 12.000000 1911 | v 0.502299 -0.267191 0.370808 14.000000 1912 | v -0.118423 -0.559073 0.346769 14.000000 1913 | v -0.216285 -0.110784 -0.265079 13.000000 1914 | v -0.185224 -0.431311 0.390111 14.000000 1915 | v -0.226825 0.588385 0.189163 12.000000 1916 | v 0.418367 -0.463776 0.340815 14.000000 1917 | v 0.502299 -0.147611 0.090384 13.000000 1918 | v 0.221782 -0.170883 -0.353430 13.000000 1919 | v -0.192844 0.652940 -0.344294 12.000000 1920 | v 0.145432 -0.110784 -0.050034 13.000000 1921 | v -0.202355 -0.629450 0.343530 14.000000 1922 | v 0.406722 -0.170883 -0.380933 13.000000 1923 | v -0.196870 0.286243 0.017404 12.000000 1924 | v -0.135479 0.622254 -0.276388 12.000000 1925 | v -0.036044 -0.170883 -0.346110 13.000000 1926 | v -0.180824 -0.170883 0.282139 13.000000 1927 | v -0.196046 0.068502 0.280098 12.000000 1928 | v -0.215292 0.625680 0.130787 12.000000 1929 | v -0.177155 0.048600 0.262200 12.000000 1930 | v 0.283884 -0.170883 0.077185 13.000000 1931 | v -0.202524 0.316910 -0.311979 12.000000 1932 | v -0.234445 0.676586 -0.331525 12.000000 1933 | v 0.013477 -0.110784 -0.315929 13.000000 1934 | v -0.189062 0.149045 0.175627 12.000000 1935 | v 0.147211 -0.110784 -0.302131 13.000000 1936 | v 0.464256 -0.338018 0.389343 14.000000 1937 | v -0.163881 0.120513 -0.349630 12.000000 1938 | v -0.175919 0.280963 0.320351 12.000000 1939 | v -0.084404 -0.170883 -0.061848 13.000000 1940 | v -0.118423 -0.653378 0.323216 14.000000 1941 | v -0.132465 0.622254 0.337595 12.000000 1942 | v -0.229390 0.351322 -0.220258 12.000000 1943 | v -0.045705 -0.170883 -0.116760 13.000000 1944 | v -0.199659 0.064795 -0.084595 12.000000 1945 | v -0.255826 -0.023350 -0.078848 12.000000 1946 | v -0.135816 -0.169123 -0.401996 13.000000 1947 | v -0.208047 -0.250734 -0.331937 14.000000 1948 | v -0.235063 0.458882 -0.122321 12.000000 1949 | v 0.502299 -0.755096 0.307564 14.000000 1950 | v -0.150176 0.701618 0.389006 12.000000 1951 | v 0.363847 -0.170883 -0.247892 13.000000 1952 | v -0.118423 -0.221752 0.369273 14.000000 1953 | v -0.202355 -0.298813 0.324976 14.000000 1954 | v 0.235842 -0.170883 0.313611 13.000000 1955 | v -0.187789 0.439355 -0.294249 12.000000 1956 | v -0.250640 0.546185 -0.071471 12.000000 1957 | v -0.025990 -0.170883 0.380263 13.000000 1958 | v -0.219449 0.450888 -0.002704 12.000000 1959 | v 0.042346 -0.170883 -0.348450 13.000000 1960 | v -0.101853 0.635135 0.284292 12.000000 1961 | v 0.458938 -0.497869 0.305972 14.000000 1962 | v 0.463825 -0.278143 0.306665 14.000000 1963 | v 0.208564 -0.170883 0.215319 13.000000 1964 | v -0.109099 0.652828 0.254673 12.000000 1965 | v -0.095038 -0.110784 -0.052224 13.000000 1966 | v 0.463245 -0.347173 0.306459 14.000000 1967 | v -0.162608 0.298675 -0.349105 12.000000 1968 | v 0.494511 -0.479802 -0.403887 14.000000 1969 | v -0.170771 0.516454 -0.213387 12.000000 1970 | v -0.048701 -0.170883 -0.351333 13.000000 1971 | v -0.160398 0.324886 0.261600 12.000000 1972 | v -0.204621 0.061050 -0.180585 12.000000 1973 | v -0.169703 0.497787 -0.350921 12.000000 1974 | v -0.276084 -0.170883 0.045694 13.000000 1975 | v -0.167906 -0.603857 0.389568 14.000000 1976 | v -0.247514 0.258740 0.005965 12.000000 1977 | v -0.145233 0.424808 0.310953 12.000000 1978 | v -0.240830 -0.011387 0.053201 12.000000 1979 | v -0.245454 0.311612 0.166041 12.000000 1980 | v 0.476893 -0.170883 0.026335 13.000000 1981 | v 0.468843 -0.698854 0.389006 14.000000 1982 | v -0.235232 0.087599 0.055523 12.000000 1983 | v 0.502299 -0.660867 0.366783 14.000000 1984 | v 0.353101 -0.170883 0.233573 13.000000 1985 | v 0.501026 -0.379132 0.306347 14.000000 1986 | v -0.171519 -0.110784 0.341040 13.000000 1987 | v 0.467045 -0.565364 -0.319468 14.000000 1988 | v -0.222107 0.444653 -0.188842 12.000000 1989 | v -0.218363 -0.063042 0.056366 12.000000 1990 | v -0.149858 0.217513 0.266899 12.000000 1991 | v 0.202854 -0.170883 -0.339126 13.000000 1992 | v -0.239931 0.437164 0.143200 12.000000 1993 | v -0.191683 0.590014 0.309623 12.000000 1994 | v -0.138942 0.455774 0.299813 12.000000 1995 | v -0.159687 0.143223 0.261806 12.000000 1996 | v 0.469198 -0.721209 0.389006 14.000000 1997 | v -0.132483 0.689168 0.101337 12.000000 1998 | v -0.215274 0.288378 0.055280 12.000000 1999 | v 0.368584 -0.170883 -0.304471 13.000000 2000 | v 0.360402 -0.170883 -0.060069 13.000000 2001 | v 0.483109 -0.110784 -0.366873 13.000000 2002 | v 0.243537 -0.170883 -0.227541 13.000000 2003 | v 0.467569 -0.170883 0.052059 13.000000 2004 | v -0.170209 0.622254 -0.003827 12.000000 2005 | v -0.186142 0.656741 0.167595 12.000000 2006 | v -0.181798 0.717982 0.336322 12.000000 2007 | v 0.341568 -0.170883 -0.247237 13.000000 2008 | v 0.092448 -0.170883 -0.159822 13.000000 2009 | v -0.242515 0.579717 0.051910 12.000000 2010 | v -0.218850 0.501813 0.056422 12.000000 2011 | v -0.213008 0.021864 0.131087 12.000000 2012 | v 0.406291 -0.110784 0.212435 13.000000 2013 | v 0.464855 -0.740137 0.389006 14.000000 2014 | v -0.161671 0.622254 -0.402202 12.000000 2015 | v 0.213301 -0.110784 0.154209 13.000000 2016 | v 0.484551 -0.187003 -0.318064 14.000000 2017 | v 0.414604 -0.110784 -0.285131 13.000000 2018 | v 0.493219 -0.230139 -0.318213 14.000000 2019 | v -0.256220 0.031600 -0.105639 12.000000 2020 | v -0.257717 0.711148 -0.102045 12.000000 2021 | v -0.145683 0.622254 -0.143384 12.000000 2022 | v -0.153958 -0.141208 0.389999 13.000000 2023 | v -0.206006 0.649383 -0.240216 12.000000 2024 | v -0.197581 0.317790 0.285228 12.000000 2025 | v -0.010544 -0.110784 0.247203 13.000000 2026 | v 0.418367 -0.326110 -0.387636 14.000000 2027 | v -0.135329 0.717982 0.132472 12.000000 2028 | v 0.102820 -0.110784 0.388239 13.000000 2029 | v -0.131472 0.658444 -0.128443 12.000000 2030 | v 0.429713 -0.676350 -0.319880 14.000000 2031 | v 0.123396 -0.170883 0.092949 13.000000 2032 | v -0.136752 0.663761 0.030735 12.000000 2033 | v 0.103494 -0.170883 -0.066154 13.000000 2034 | v -0.171014 -0.355280 -0.333903 14.000000 2035 | v 0.502299 -0.262360 0.332090 14.000000 2036 | v -0.184925 -0.055721 -0.184667 12.000000 2037 | v 0.078162 -0.170883 -0.182682 13.000000 2038 | v -0.153490 0.012503 -0.298480 12.000000 2039 | v -0.128252 -0.779099 0.376687 14.000000 2040 | v 0.208264 -0.170883 -0.159279 13.000000 2041 | v 0.472175 -0.491129 0.305991 14.000000 2042 | v 0.033584 -0.110784 0.311046 13.000000 2043 | v -0.287711 0.717982 0.026204 12.000000 2044 | v -0.203235 0.360646 -0.327125 12.000000 2045 | v -0.208047 -0.770823 -0.376927 14.000000 2046 | v -0.162065 -0.293496 -0.346353 14.000000 2047 | v -0.202355 -0.310833 0.307863 14.000000 2048 | v -0.118423 -0.577009 0.354763 14.000000 2049 | -------------------------------------------------------------------------------- /tools/obj_data/display.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/tools/obj_data/display.png -------------------------------------------------------------------------------- /tools/pics/display.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eriche2016/pointnet2.pytorch/3f49a4d90efabe1fabe00bea41e3b730ebc995e0/tools/pics/display.png -------------------------------------------------------------------------------- /tools/test.lua: -------------------------------------------------------------------------------- 1 | require 'nn' 2 | require 'cunn' 3 | require 'cudnn' 4 | 5 | -- currently support 12 views 6 | -- inputs will be Tensors of size: bz x 12 x C x H x W 7 | encoder = nn.Sequential() 8 | encoder:add(nn.SpatialConvolution(1, 96, 11, 11, 4, 4)) 9 | encoder:add(nn.ReLU(true)) 10 | 11 | mv_share_net = nn.ParallelTable() 12 | -- siamese style 13 | mv_share_net:add(encoder) 14 | p, pg = mv_share_net:getParameters() 15 | 16 | for k = 1, 11 do 17 | mv_share_net:add(encoder:clone('weight','bias', 'gradWeight','gradBias')) 18 | end 19 | 20 | -- p1, pg1 = mv_share_net:getParameters() -------------------------------------------------------------------------------- /tools/visualizations/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | g++ -std=c++11 render_balls_so.cpp -o render_balls_so.so -shared -fPIC -O2 -D_GLIBCXX_USE_CXX11_ABI=0 3 | 4 | -------------------------------------------------------------------------------- /tools/visualizations/render_balls_so.cpp: -------------------------------------------------------------------------------- 1 | /************************************************************************* 2 | > File Name: render_balls_so.cpp 3 | > Author: 4 | > Mail: 5 | > Created Time: 2017年07月14日 星期五 20时01分15秒 6 | ************************************************************************/ 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | using namespace std; 13 | 14 | struct PointInfo{ 15 | int x,y,z; 16 | float r,g,b; 17 | }; 18 | 19 | extern "C"{ 20 | 21 | void render_ball(int h,int w,unsigned char * show,int n,int * xyzs,float * c0,float * c1,float * c2,int r){ 22 | r=max(r,1); 23 | vector depth(h*w,-2100000000); 24 | vector pattern; 25 | for (int dx=-r;dx<=r;dx++) 26 | for (int dy=-r;dy<=r;dy++) 27 | if (dx*dx+dy*dy=h || y2<0 || y2>=w) && depth[x2*w+y2]0: 88 | show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],1,axis=0)) 89 | if magnifyBlue>=2: 90 | show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],-1,axis=0)) 91 | show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],1,axis=1)) 92 | if magnifyBlue>=2: 93 | show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],-1,axis=1)) 94 | if showrot: 95 | cv2.putText(show,'xangle %d'%(int(xangle/np.pi*180)),(30,showsz-30),0,0.5,cv2.cv.CV_RGB(255,0,0)) 96 | cv2.putText(show,'yangle %d'%(int(yangle/np.pi*180)),(30,showsz-50),0,0.5,cv2.cv.CV_RGB(255,0,0)) 97 | cv2.putText(show,'zoom %d%%'%(int(zoom*100)),(30,showsz-70),0,0.5,cv2.cv.CV_RGB(255,0,0)) 98 | changed=True 99 | while True: 100 | if changed: 101 | render() 102 | changed=False 103 | cv2.imshow('show3d',show) 104 | if waittime==0: 105 | cmd=cv2.waitKey(10)%256 106 | else: 107 | cmd=cv2.waitKey(waittime)%256 108 | if cmd==ord('q'): 109 | break 110 | elif cmd==ord('Q'): 111 | sys.exit(0) 112 | 113 | if cmd==ord('t') or cmd == ord('p'): 114 | if cmd == ord('t'): 115 | if c_gt is None: 116 | c0=np.zeros((len(xyz),),dtype='float32')+255 117 | c1=np.zeros((len(xyz),),dtype='float32')+255 118 | c2=np.zeros((len(xyz),),dtype='float32')+255 119 | else: 120 | c0=c_gt[:,0] 121 | c1=c_gt[:,1] 122 | c2=c_gt[:,2] 123 | else: 124 | if c_pred is None: 125 | c0=np.zeros((len(xyz),),dtype='float32')+255 126 | c1=np.zeros((len(xyz),),dtype='float32')+255 127 | c2=np.zeros((len(xyz),),dtype='float32')+255 128 | else: 129 | c0=c_pred[:,0] 130 | c1=c_pred[:,1] 131 | c2=c_pred[:,2] 132 | if normalizecolor: 133 | c0/=(c0.max()+1e-14)/255.0 134 | c1/=(c1.max()+1e-14)/255.0 135 | c2/=(c2.max()+1e-14)/255.0 136 | c0=np.require(c0,'float32','C') 137 | c1=np.require(c1,'float32','C') 138 | c2=np.require(c2,'float32','C') 139 | changed = True 140 | 141 | 142 | 143 | if cmd==ord('n'): 144 | zoom*=1.1 145 | changed=True 146 | elif cmd==ord('m'): 147 | zoom/=1.1 148 | changed=True 149 | elif cmd==ord('r'): 150 | zoom=1.0 151 | changed=True 152 | elif cmd==ord('s'): 153 | cv2.imwrite('show3d.png',show) 154 | if waittime!=0: 155 | break 156 | return cmd 157 | if __name__=='__main__': 158 | np.random.seed(100) 159 | showpoints(np.random.randn(2500,3)) 160 | 161 | --------------------------------------------------------------------------------