├── .gitignore ├── 3DLoMatch.pkl ├── LICENSE ├── README.md ├── SC2_PCR.py ├── benchmark_utils.py ├── benchmark_utils_predator.py ├── benchmarks └── 3DLoMatch │ ├── 7-scenes-redkitchen │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log │ ├── sun3d-home_at-home_at_scan1_2013_jan_1 │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log │ ├── sun3d-home_md-home_md_scan9_2012_sep_30 │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log │ ├── sun3d-hotel_uc-scan3 │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log │ ├── sun3d-hotel_umd-maryland_hotel1 │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log │ ├── sun3d-hotel_umd-maryland_hotel3 │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log │ ├── sun3d-mit_76_studyroom-76-1studyroom2 │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log │ └── sun3d-mit_lab_hj-lab_hj_tea_nov_2_2012_scan1_erika │ ├── gt.info │ ├── gt.log │ └── gt_overlap.log ├── common.py ├── config.py ├── config_json ├── config_3DLoMatch.json ├── config_3DMatch.json └── config_KITTI.json ├── dataset.py ├── environment.yml ├── evaluate_metric.py ├── figures └── pipeline.png ├── test_3DLoMatch.py ├── test_3DMatch.py ├── test_KITTI.py └── utils ├── SE3.py ├── libpmc.so ├── max_clique.py ├── pointcloud.py ├── sinkhorn.py └── timer.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /3DLoMatch.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ZhiChen902/SC2-PCR/19961eae0a4789442652bfd681589115dc6f0570/3DLoMatch.pkl -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 ZhiChen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SC^2-PCR: A Second Order Spatial Compatibility for Efficient and Robust Point Cloud Registration (CVPR 2022) 2 | 3 | PyTorch implementation of the paper: 4 | 5 | [SC^2-PCR: A Second Order Spatial Compatibility for Efficient and Robust Point Cloud Registration](https://arxiv.org/abs/2203.14453). 6 | 7 | Zhi Chen, [Kun Sun](https://scholar.google.com/citations?user=Ay6kCm4AAAAJ&hl=en), Fan Yang, [Wenbing Tao](https://scholar.google.co.uk/citations?user=jRDPE2AAAAAJ&hl=zh-CN&oi=ao). 8 | 9 | ## Introduction 10 | 11 | In this paper, we present a second order spatial compatibility (SC^2) measure based method for efficient and robust point cloud registration (PCR), called SC^2-PCR. Firstly, we propose a second order spatial compatibility (SC^2) measure to compute the similarity between correspondences. It considers the global compatibility instead of local consistency, allowing for more distinctive clustering between inliers and outliers at early stage. Based on this measure, our registration pipeline employs a global spectral technique to find some reliable seeds from the initial correspondences. Then we design a two-stage strategy to expand each seed to a consensus set based on the SC^2 measure matrix. Finally, we feed each consensus set to a weighted SVD algorithm to generate a candidate rigid transformation and select the best model as the final result. Our method can guarantee to find a certain number of outlier-free consensus sets using fewer samplings, making the model estimation more efficient and robust. In addition, the proposed SC^2 measure is general and can be easily plugged into deep learning based frameworks. Extensive experiments are carried out to investigate the performance of our method. 12 | 13 | ![](figures/pipeline.png) 14 | 15 | ## Requirements 16 | 17 | If you are using conda, you may configure SC2-PCR as: 18 | 19 | conda env create -f environment.yml 20 | conda activate SC2_PCR 21 | 22 | ## 3DMatch 23 | 24 | ### Data preparation 25 | 26 | Downsample and extract FPFH and FCGF descriptors for each frame of the 3DMatch test dataset. [Here](https://drive.google.com/file/d/1kRwuTHlNPr9siENcEMddCO23Oaq0cz-X/view?usp=sharing) we provide the processed test set with pre-computed FPFH/FCGF descriptors. The data should be organized as follows: 27 | 28 | ``` 29 | --data--3DMatch 30 | ├── fragments 31 | │ ├── 7-scene-redkitechen/ 32 | | | ├── cloud_bin_0.ply 33 | | | ├── cloud_bin_0_fcgf.npz 34 | | | ├── cloud_bin_0_fpfh.npz 35 | │ | └── ... 36 | │ ├── sun3d-home_at-home_at_scan1_2013_jan_1/ 37 | │ └── ... 38 | ├── gt_result 39 | │ ├── 7-scene-redkitechen-evaluation/ 40 | | | ├── 3dmatch.log 41 | | | ├── gt.info 42 | | | ├── gt.log 43 | │ | └── ... 44 | │ ├── sun3d-home_at-home_at_scan1_2013_jan_1-evaluation/ 45 | │ └── ... 46 | ``` 47 | 48 | ### Testing 49 | 50 | Use the following command for testing. 51 | 52 | ```bash 53 | python ./test_3DMatch.py --config_path config_json/config_3DMatch.json 54 | ``` 55 | 56 | The CUDA_DEVICE and basic parameters can be changed in the json file. 57 | 58 | ## 3DLoMatch 59 | 60 | ### Data preparation 61 | 62 | FPFH and FCGF descriptors can be prepared in the same way as testing 3DMatch. If you want to test the [predator](https://github.com/prs-eth/OverlapPredator) descriptor, you should first follow the offical instruction of predator to extract the descriptors for 3DMatch dataset and organize the data as follows: 63 | 64 | ``` 65 | --data--3DLoMatch 66 | ├── 0.pth 67 | ├── 1.pth 68 | ├── ... 69 | └── 1780.pth 70 | ``` 71 | 72 | ### Testing 73 | 74 | Use the following command for testing. 75 | 76 | ```bash 77 | python ./test_3DLoMatch.py --config_path config_json/config_3DLoMatch.json 78 | ``` 79 | 80 | ## KITTI odometry 81 | 82 | ### Data preparation 83 | 84 | Downsample and extract FPFH and FCGF descriptors for each frame of the KITTI test dataset. The raw point clouds can be download from [KITTI Odometry website.](http://www.cvlibs.net/datasets/kitti/eval_odometry.php). For your convenience, [here](https://drive.google.com/drive/folders/1sxkHYjWHhSUE3IcvmZ2p1ziw1LqJqqfc?usp=sharing) we provide the pre-computed FPFH and FCGF descriptors for the KITTI test set. 85 | 86 | ``` 87 | --data--KITTI 88 | ├── fpfh_test 89 | │ ├── pair_0.npz 90 | | ├── pair_1.npz 91 | | ├── ... 92 | | └── pair_554.npz 93 | ├── fcgf_test 94 | │ ├── pair_0.npz 95 | | ├── pair_1.npz 96 | | ├── ... 97 | | └── pair_554.npz 98 | ``` 99 | 100 | ### Testing 101 | 102 | Use the following command for testing. 103 | 104 | ```bash 105 | python ./test_KITTI.py --config_path config_json/config_KITTI.json 106 | ``` 107 | 108 | ## Results 109 | 110 | ### 3DMatch 111 | 112 | We evaluate SC^2-PCR on the standard 3DMatch benchmarks: 113 | 114 | | Benchmark | RR(%) | RE(°) |TE(cm) | IP(%) | IR(%) | F1(%) | 115 | |:---------------|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:| 116 | | 3DMatch+FPFH | 83.98 | 2.18 | 6.56 | 72.48 | 78.33 | 75.10 | 117 | | 3DMatch+FCGF | 93.28 | 2.08 | 6.55 | 78.94 | 86.39 | 82.20 | 118 | 119 | ### 3DMatch 120 | 121 | We evaluate SC^2-PCR on the standard 3DLoMatch benchmarks: 122 | 123 | | Benchmark | RR(%) | RE(°) |TE(cm) | IP(%) | IR(%) | F1(%) | 124 | |:--------------------|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:| 125 | | 3DLoMatch+FCGF | 57.83 | 3.77 | 10.46 | 44.87 | 53.69 | 48.38 | 126 | | 3DLoMatch+Predator | 69.46 | 3.46 | 9.58 | 56.98 | 67.47 | 61.08 | 127 | 128 | ### KITTI odometry 129 | 130 | We evaluate SC^2-PCR on the standard KITTI benchmarks: 131 | 132 | | Benchmark | RR(%) | RE(°) |TE(cm) | IP(%) | IR(%) | F1(%) | 133 | |:---------------|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:| 134 | | KITTI+FPFH | 99.64 | 0.32 | 7.23 | 93.63 | 95.89 | 94.63 | 135 | | KITTI+FCGF | 98.20 | 0.33 | 20.95 | 82.01 | 91.03 | 85.90 | 136 | 137 | ## Citation 138 | 139 | ```bibtex 140 | @InProceedings{Chen_2022_CVPR, 141 | author = {Chen, Zhi and Sun, Kun and Yang, Fan and Tao, Wenbing}, 142 | title = {SC2-PCR: A Second Order Spatial Compatibility for Efficient and Robust Point Cloud Registration}, 143 | booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, 144 | month = {June}, 145 | year = {2022}, 146 | pages = {13221-13231} 147 | } 148 | ``` 149 | 150 | ## Acknowledgements 151 | 152 | - [PointDSC](https://github.com/XuyangBai/PointDSC) 153 | - [FCGF](https://github.com/chrischoy/FCGF) 154 | - [DGR](https://github.com/chrischoy/DeepGlobalRegistration) 155 | - [PREDATOR](https://github.com/prs-eth/OverlapPredator) 156 | 157 | -------------------------------------------------------------------------------- /SC2_PCR.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from common import knn, rigid_transform_3d 3 | from utils.SE3 import transform 4 | import numpy as np 5 | 6 | 7 | 8 | class Matcher(): 9 | def __init__(self, 10 | inlier_threshold=0.10, 11 | num_node='all', 12 | use_mutual=True, 13 | d_thre=0.1, 14 | num_iterations=10, 15 | ratio=0.2, 16 | nms_radius=0.1, 17 | max_points=8000, 18 | k1=30, 19 | k2=20, 20 | select_scene=None, 21 | ): 22 | self.inlier_threshold = inlier_threshold 23 | self.num_node = num_node 24 | self.use_mutual = use_mutual 25 | self.d_thre = d_thre 26 | self.num_iterations = num_iterations # maximum iteration of power iteration algorithm 27 | self.ratio = ratio # the maximum ratio of seeds. 28 | self.max_points = max_points 29 | self.nms_radius = nms_radius 30 | self.k1 = k1 31 | self.k2 = k2 32 | 33 | def pick_seeds(self, dists, scores, R, max_num): 34 | """ 35 | Select seeding points using Non Maximum Suppression. (here we only support bs=1) 36 | Input: 37 | - dists: [bs, num_corr, num_corr] src keypoints distance matrix 38 | - scores: [bs, num_corr] initial confidence of each correspondence 39 | - R: float radius of nms 40 | - max_num: int maximum number of returned seeds 41 | Output: 42 | - picked_seeds: [bs, num_seeds] the index to the seeding correspondences 43 | """ 44 | assert scores.shape[0] == 1 45 | 46 | # parallel Non Maximum Suppression (more efficient) 47 | score_relation = scores.T >= scores # [num_corr, num_corr], save the relation of leading_eig 48 | # score_relation[dists[0] >= R] = 1 # mask out the non-neighborhood node 49 | score_relation = score_relation.bool() | (dists[0] >= R).bool() 50 | is_local_max = score_relation.min(-1)[0].float() 51 | 52 | score_local_max = scores * is_local_max 53 | sorted_score = torch.argsort(score_local_max, dim=1, descending=True) 54 | 55 | # max_num = scores.shape[1] 56 | 57 | return_idx = sorted_score[:, 0: max_num].detach() 58 | 59 | return return_idx 60 | 61 | def cal_seed_trans(self, seeds, SC2_measure, src_keypts, tgt_keypts): 62 | """ 63 | Calculate the transformation for each seeding correspondences. 64 | Input: 65 | - seeds: [bs, num_seeds] the index to the seeding correspondence 66 | - SC2_measure: [bs, num_corr, num_channels] 67 | - src_keypts: [bs, num_corr, 3] 68 | - tgt_keypts: [bs, num_corr, 3] 69 | Output: leading eigenvector 70 | - final_trans: [bs, 4, 4] best transformation matrix (after post refinement) for each batch. 71 | """ 72 | bs, num_corr, num_channels = SC2_measure.shape[0], SC2_measure.shape[1], SC2_measure.shape[2] 73 | k1 = self.k1 74 | k2 = self.k2 75 | 76 | if k1 > num_channels: 77 | k1 = 4 78 | k2 = 4 79 | 80 | ################################# 81 | # The first stage consensus set sampling 82 | # Finding the k1 nearest neighbors around each seed 83 | ################################# 84 | sorted_score = torch.argsort(SC2_measure, dim=2, descending=True) 85 | knn_idx = sorted_score[:, :, 0: k1] 86 | sorted_value, _ = torch.sort(SC2_measure, dim=2, descending=True) 87 | idx_tmp = knn_idx.contiguous().view([bs, -1]) 88 | idx_tmp = idx_tmp[:, :, None] 89 | idx_tmp = idx_tmp.expand(-1, -1, 3) 90 | 91 | ################################# 92 | # construct the local SC2 measure of each consensus subset obtained in the first stage. 93 | ################################# 94 | src_knn = src_keypts.gather(dim=1, index=idx_tmp).view([bs, -1, k1, 3]) # [bs, num_seeds, k, 3] 95 | tgt_knn = tgt_keypts.gather(dim=1, index=idx_tmp).view([bs, -1, k1, 3]) 96 | src_dist = ((src_knn[:, :, :, None, :] - src_knn[:, :, None, :, :]) ** 2).sum(-1) ** 0.5 97 | tgt_dist = ((tgt_knn[:, :, :, None, :] - tgt_knn[:, :, None, :, :]) ** 2).sum(-1) ** 0.5 98 | cross_dist = torch.abs(src_dist - tgt_dist) 99 | local_hard_SC_measure = (cross_dist < self.d_thre).float() 100 | local_SC2_measure = torch.matmul(local_hard_SC_measure[:, :, :1, :], local_hard_SC_measure) 101 | 102 | ################################# 103 | # perform second stage consensus set sampling 104 | ################################# 105 | sorted_score = torch.argsort(local_SC2_measure, dim=3, descending=True) 106 | knn_idx_fine = sorted_score[:, :, :, 0: k2] 107 | 108 | ################################# 109 | # construct the soft SC2 matrix of the consensus set 110 | ################################# 111 | num = knn_idx_fine.shape[1] 112 | knn_idx_fine = knn_idx_fine.contiguous().view([bs, num, -1])[:, :, :, None] 113 | knn_idx_fine = knn_idx_fine.expand(-1, -1, -1, 3) 114 | src_knn_fine = src_knn.gather(dim=2, index=knn_idx_fine).view([bs, -1, k2, 3]) # [bs, num_seeds, k, 3] 115 | tgt_knn_fine = tgt_knn.gather(dim=2, index=knn_idx_fine).view([bs, -1, k2, 3]) 116 | 117 | src_dist = ((src_knn_fine[:, :, :, None, :] - src_knn_fine[:, :, None, :, :]) ** 2).sum(-1) ** 0.5 118 | tgt_dist = ((tgt_knn_fine[:, :, :, None, :] - tgt_knn_fine[:, :, None, :, :]) ** 2).sum(-1) ** 0.5 119 | cross_dist = torch.abs(src_dist - tgt_dist) 120 | local_hard_measure = (cross_dist < self.d_thre * 2).float() 121 | local_SC2_measure = torch.matmul(local_hard_measure, local_hard_measure) / k2 122 | local_SC_measure = torch.clamp(1 - cross_dist ** 2 / self.d_thre ** 2, min=0) 123 | # local_SC2_measure = local_SC_measure * local_SC2_measure 124 | local_SC2_measure = local_SC_measure 125 | local_SC2_measure = local_SC2_measure.view([-1, k2, k2]) 126 | 127 | 128 | ################################# 129 | # Power iteratation to get the inlier probability 130 | ################################# 131 | local_SC2_measure[:, torch.arange(local_SC2_measure.shape[1]), torch.arange(local_SC2_measure.shape[1])] = 0 132 | total_weight = self.cal_leading_eigenvector(local_SC2_measure, method='power') 133 | total_weight = total_weight.view([bs, -1, k2]) 134 | total_weight = total_weight / (torch.sum(total_weight, dim=-1, keepdim=True) + 1e-6) 135 | 136 | ################################# 137 | # calculate the transformation by weighted least-squares for each subsets in parallel 138 | ################################# 139 | total_weight = total_weight.view([-1, k2]) 140 | src_knn = src_knn_fine 141 | tgt_knn = tgt_knn_fine 142 | src_knn, tgt_knn = src_knn.view([-1, k2, 3]), tgt_knn.view([-1, k2, 3]) 143 | 144 | ################################# 145 | # compute the rigid transformation for each seed by the weighted SVD 146 | ################################# 147 | seedwise_trans = rigid_transform_3d(src_knn, tgt_knn, total_weight) 148 | seedwise_trans = seedwise_trans.view([bs, -1, 4, 4]) 149 | 150 | ################################# 151 | # calculate the inlier number for each hypothesis, and find the best transformation for each point cloud pair 152 | ################################# 153 | pred_position = torch.einsum('bsnm,bmk->bsnk', seedwise_trans[:, :, :3, :3], 154 | src_keypts.permute(0, 2, 1)) + seedwise_trans[:, :, :3, 155 | 3:4] # [bs, num_seeds, num_corr, 3] 156 | ################################# 157 | # calculate the inlier number for each hypothesis, and find the best transformation for each point cloud pair 158 | ################################# 159 | pred_position = pred_position.permute(0, 1, 3, 2) 160 | L2_dis = torch.norm(pred_position - tgt_keypts[:, None, :, :], dim=-1) # [bs, num_seeds, num_corr] 161 | seedwise_fitness = torch.sum((L2_dis < self.inlier_threshold).float(), dim=-1) # [bs, num_seeds] 162 | batch_best_guess = seedwise_fitness.argmax(dim=1) 163 | best_guess_ratio = seedwise_fitness[0, batch_best_guess] 164 | final_trans = seedwise_trans.gather(dim=1,index=batch_best_guess[:, None, None, None].expand(-1, -1, 4, 4)).squeeze(1) 165 | 166 | return final_trans 167 | 168 | def cal_leading_eigenvector(self, M, method='power'): 169 | """ 170 | Calculate the leading eigenvector using power iteration algorithm or torch.symeig 171 | Input: 172 | - M: [bs, num_corr, num_corr] the compatibility matrix 173 | - method: select different method for calculating the learding eigenvector. 174 | Output: 175 | - solution: [bs, num_corr] leading eigenvector 176 | """ 177 | if method == 'power': 178 | # power iteration algorithm 179 | leading_eig = torch.ones_like(M[:, :, 0:1]) 180 | leading_eig_last = leading_eig 181 | for i in range(self.num_iterations): 182 | leading_eig = torch.bmm(M, leading_eig) 183 | leading_eig = leading_eig / (torch.norm(leading_eig, dim=1, keepdim=True) + 1e-6) 184 | if torch.allclose(leading_eig, leading_eig_last): 185 | break 186 | leading_eig_last = leading_eig 187 | leading_eig = leading_eig.squeeze(-1) 188 | return leading_eig 189 | elif method == 'eig': # cause NaN during back-prop 190 | e, v = torch.symeig(M, eigenvectors=True) 191 | leading_eig = v[:, :, -1] 192 | return leading_eig 193 | else: 194 | exit(-1) 195 | 196 | def cal_confidence(self, M, leading_eig, method='eig_value'): 197 | """ 198 | Calculate the confidence of the spectral matching solution based on spectral analysis. 199 | Input: 200 | - M: [bs, num_corr, num_corr] the compatibility matrix 201 | - leading_eig [bs, num_corr] the leading eigenvector of matrix M 202 | Output: 203 | - confidence 204 | """ 205 | if method == 'eig_value': 206 | # max eigenvalue as the confidence (Rayleigh quotient) 207 | max_eig_value = (leading_eig[:, None, :] @ M @ leading_eig[:, :, None]) / ( 208 | leading_eig[:, None, :] @ leading_eig[:, :, None]) 209 | confidence = max_eig_value.squeeze(-1) 210 | return confidence 211 | elif method == 'eig_value_ratio': 212 | # max eigenvalue / second max eigenvalue as the confidence 213 | max_eig_value = (leading_eig[:, None, :] @ M @ leading_eig[:, :, None]) / ( 214 | leading_eig[:, None, :] @ leading_eig[:, :, None]) 215 | # compute the second largest eigen-value 216 | B = M - max_eig_value * leading_eig[:, :, None] @ leading_eig[:, None, :] 217 | solution = torch.ones_like(B[:, :, 0:1]) 218 | for i in range(self.num_iterations): 219 | solution = torch.bmm(B, solution) 220 | solution = solution / (torch.norm(solution, dim=1, keepdim=True) + 1e-6) 221 | solution = solution.squeeze(-1) 222 | second_eig = solution 223 | second_eig_value = (second_eig[:, None, :] @ B @ second_eig[:, :, None]) / ( 224 | second_eig[:, None, :] @ second_eig[:, :, None]) 225 | confidence = max_eig_value / second_eig_value 226 | return confidence 227 | elif method == 'xMx': 228 | # max xMx as the confidence (x is the binary solution) 229 | # rank = torch.argsort(leading_eig, dim=1, descending=True)[:, 0:int(M.shape[1]*self.ratio)] 230 | # binary_sol = torch.zeros_like(leading_eig) 231 | # binary_sol[0, rank[0]] = 1 232 | confidence = leading_eig[:, None, :] @ M @ leading_eig[:, :, None] 233 | confidence = confidence.squeeze(-1) / M.shape[1] 234 | return confidence 235 | 236 | def post_refinement(self, initial_trans, src_keypts, tgt_keypts, it_num, weights=None): 237 | """ 238 | Perform post refinement using the initial transformation matrix, only adopted during testing. 239 | Input 240 | - initial_trans: [bs, 4, 4] 241 | - src_keypts: [bs, num_corr, 3] 242 | - tgt_keypts: [bs, num_corr, 3] 243 | - weights: [bs, num_corr] 244 | Output: 245 | - final_trans: [bs, 4, 4] 246 | """ 247 | assert initial_trans.shape[0] == 1 248 | inlier_threshold = 1.2 249 | 250 | # inlier_threshold_list = [self.inlier_threshold] * it_num 251 | 252 | if self.inlier_threshold == 0.10: # for 3DMatch 253 | inlier_threshold_list = [0.10] * it_num 254 | else: # for KITTI 255 | inlier_threshold_list = [1.2] * it_num 256 | 257 | previous_inlier_num = 0 258 | for inlier_threshold in inlier_threshold_list: 259 | warped_src_keypts = transform(src_keypts, initial_trans) 260 | 261 | L2_dis = torch.norm(warped_src_keypts - tgt_keypts, dim=-1) 262 | pred_inlier = (L2_dis < inlier_threshold)[0] # assume bs = 1 263 | inlier_num = torch.sum(pred_inlier) 264 | if abs(int(inlier_num - previous_inlier_num)) < 1: 265 | break 266 | else: 267 | previous_inlier_num = inlier_num 268 | initial_trans = rigid_transform_3d( 269 | A=src_keypts[:, pred_inlier, :], 270 | B=tgt_keypts[:, pred_inlier, :], 271 | ## https://link.springer.com/article/10.1007/s10589-014-9643-2 272 | # weights=None, 273 | weights=1 / (1 + (L2_dis / inlier_threshold) ** 2)[:, pred_inlier], 274 | # weights=((1-L2_dis/inlier_threshold)**2)[:, pred_inlier] 275 | ) 276 | return initial_trans 277 | 278 | def match_pair(self, src_keypts, tgt_keypts, src_features, tgt_features): 279 | N_src = src_features.shape[1] 280 | N_tgt = tgt_features.shape[1] 281 | # use all point or sample points. 282 | if self.num_node == 'all': 283 | src_sel_ind = np.arange(N_src) 284 | tgt_sel_ind = np.arange(N_tgt) 285 | else: 286 | src_sel_ind = np.random.choice(N_src, self.num_node) 287 | tgt_sel_ind = np.random.choice(N_tgt, self.num_node) 288 | src_desc = src_features[:, src_sel_ind, :] 289 | tgt_desc = tgt_features[:, tgt_sel_ind, :] 290 | src_keypts = src_keypts[:, src_sel_ind, :] 291 | tgt_keypts = tgt_keypts[:, tgt_sel_ind, :] 292 | 293 | # match points in feature space. 294 | distance = torch.sqrt(2 - 2 * (src_desc[0] @ tgt_desc[0].T) + 1e-6) 295 | distance = distance.unsqueeze(0) 296 | source_idx = torch.argmin(distance[0], dim=1) 297 | corr = torch.cat([torch.arange(source_idx.shape[0])[:, None].cuda(), source_idx[:, None]], dim=-1) 298 | 299 | # generate correspondences 300 | src_keypts_corr = src_keypts[:, corr[:, 0]] 301 | tgt_keypts_corr = tgt_keypts[:, corr[:, 1]] 302 | 303 | return src_keypts_corr, tgt_keypts_corr 304 | 305 | def SC2_PCR(self, src_keypts, tgt_keypts): 306 | """ 307 | Input: 308 | - src_keypts: [bs, num_corr, 3] 309 | - tgt_keypts: [bs, num_corr, 3] 310 | Output: 311 | - pred_trans: [bs, 4, 4], the predicted transformation matrix. 312 | - pred_labels: [bs, num_corr], the predicted inlier/outlier label (0,1), for classification loss calculation. 313 | """ 314 | bs, num_corr = src_keypts.shape[0], tgt_keypts.shape[1] 315 | 316 | ################################# 317 | # downsample points 318 | ################################# 319 | if num_corr > self.max_points: 320 | src_keypts = src_keypts[:, :self.max_points, :] 321 | tgt_keypts = tgt_keypts[:, :self.max_points, :] 322 | num_corr = self.max_points 323 | 324 | ################################# 325 | # compute cross dist 326 | ################################# 327 | src_dist = torch.norm((src_keypts[:, :, None, :] - src_keypts[:, None, :, :]), dim=-1) 328 | target_dist = torch.norm((tgt_keypts[:, :, None, :] - tgt_keypts[:, None, :, :]), dim=-1) 329 | cross_dist = torch.abs(src_dist - target_dist) 330 | 331 | ################################# 332 | # compute first order measure 333 | ################################# 334 | SC_dist_thre = self.d_thre 335 | SC_measure = torch.clamp(1.0 - cross_dist ** 2 / SC_dist_thre ** 2, min=0) 336 | hard_SC_measure = (cross_dist < SC_dist_thre).float() 337 | 338 | ################################# 339 | # select reliable seed correspondences 340 | ################################# 341 | confidence = self.cal_leading_eigenvector(SC_measure, method='power') 342 | seeds = self.pick_seeds(src_dist, confidence, R=self.nms_radius, max_num=int(num_corr * self.ratio)) 343 | 344 | ################################# 345 | # compute second order measure 346 | ################################# 347 | SC2_dist_thre = self.d_thre / 2 348 | hard_SC_measure_tight = (cross_dist < SC2_dist_thre).float() 349 | seed_hard_SC_measure = hard_SC_measure.gather(dim=1, 350 | index=seeds[:, :, None].expand(-1, -1, num_corr)) 351 | seed_hard_SC_measure_tight = hard_SC_measure_tight.gather(dim=1, 352 | index=seeds[:, :, None].expand(-1, -1, num_corr)) 353 | SC2_measure = torch.matmul(seed_hard_SC_measure_tight, hard_SC_measure_tight) * seed_hard_SC_measure 354 | 355 | ################################# 356 | # compute the seed-wise transformations and select the best one 357 | ################################# 358 | final_trans = self.cal_seed_trans(seeds, SC2_measure, src_keypts, tgt_keypts) 359 | 360 | ################################# 361 | # refine the result by recomputing the transformation over the whole set 362 | ################################# 363 | final_trans = self.post_refinement(final_trans, src_keypts, tgt_keypts, 20) 364 | 365 | return final_trans 366 | 367 | def estimator(self, src_keypts, tgt_keypts, src_features, tgt_features): 368 | """ 369 | Input: 370 | - src_keypts: [bs, num_corr, 3] 371 | - tgt_keypts: [bs, num_corr, 3] 372 | - src_features: [bs, num_corr, C] 373 | - tgt_features: [bs, num_corr, C] 374 | Output: 375 | - pred_trans: [bs, 4, 4], the predicted transformation matrix 376 | - pred_trans: [bs, num_corr], the predicted inlier/outlier label (0,1) 377 | - src_keypts_corr: [bs, num_corr, 3], the source points in the matched correspondences 378 | - tgt_keypts_corr: [bs, num_corr, 3], the target points in the matched correspondences 379 | """ 380 | ################################# 381 | # generate coarse correspondences 382 | ################################# 383 | src_keypts_corr, tgt_keypts_corr = self.match_pair(src_keypts, tgt_keypts, src_features, tgt_features) 384 | 385 | ################################# 386 | # use the proposed SC2-PCR to estimate the rigid transformation 387 | ################################# 388 | pred_trans = self.SC2_PCR(src_keypts_corr, tgt_keypts_corr) 389 | 390 | frag1_warp = transform(src_keypts_corr, pred_trans) 391 | distance = torch.sum((frag1_warp - tgt_keypts_corr) ** 2, dim=-1) ** 0.5 392 | pred_labels = (distance < self.inlier_threshold).float() 393 | 394 | return pred_trans, pred_labels, src_keypts_corr, tgt_keypts_corr 395 | -------------------------------------------------------------------------------- /benchmark_utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import random 4 | import math 5 | import open3d as o3d 6 | from utils.pointcloud import make_point_cloud 7 | 8 | 9 | def exact_auc(errors, thresholds): 10 | """ 11 | Calculate the exact area under curve, borrow from https://github.com/magicleap/SuperGluePretrainedNetwork 12 | """ 13 | sort_idx = np.argsort(errors) 14 | errors = np.array(errors.copy())[sort_idx] 15 | recall = (np.arange(len(errors)) + 1) / len(errors) 16 | errors = np.r_[0., errors] 17 | recall = np.r_[0., recall] 18 | aucs = [] 19 | for t in thresholds: 20 | last_index = np.searchsorted(errors, t) 21 | r = np.r_[recall[:last_index], recall[last_index - 1]] 22 | e = np.r_[errors[:last_index], t] 23 | aucs.append(np.trapz(r, x=e) / t) 24 | return aucs 25 | 26 | 27 | def set_seed(seed=51): 28 | """ 29 | Set the random seed for reproduce the results. 30 | """ 31 | torch.manual_seed(seed) 32 | torch.cuda.manual_seed(seed) 33 | torch.cuda.manual_seed_all(seed) # if you are using multi-GPU. 34 | np.random.seed(seed) # Numpy module. 35 | random.seed(seed) # Python random module. 36 | torch.backends.cudnn.benchmark = False 37 | torch.backends.cudnn.deterministic = True 38 | 39 | 40 | def icp_refine(src_keypts, tgt_keypts, pred_trans): 41 | """ 42 | ICP algorithm to refine the initial transformation 43 | Input: 44 | - src_keypts [1, num_corr, 3] FloatTensor 45 | - tgt_keypts [1, num_corr, 3] FloatTensor 46 | - pred_trans [1, 4, 4] FloatTensor, initial transformation 47 | """ 48 | src_pcd = make_point_cloud(src_keypts.detach().cpu().numpy()[0]) 49 | tgt_pcd = make_point_cloud(tgt_keypts.detach().cpu().numpy()[0]) 50 | initial_trans = pred_trans[0].detach().cpu().numpy() 51 | # change the convension of transforamtion because open3d use left multi. 52 | refined_T = o3d.registration.registration_icp( 53 | src_pcd, tgt_pcd, 0.10, initial_trans, 54 | o3d.registration.TransformationEstimationPointToPoint()).transformation 55 | refined_T = torch.from_numpy(refined_T[None, :, :]).to(pred_trans.device).float() 56 | return refined_T 57 | 58 | 59 | def is_rotation_matrix(R): 60 | """ 61 | Checks if a matrix is a valid rotation matrix. 62 | Input: 63 | - R: [3, 3] rotation matrix 64 | Output: 65 | - True/False 66 | """ 67 | Rt = np.transpose(R) 68 | shouldBeIdentity = np.dot(Rt, R) 69 | I = np.identity(3, dtype=R.dtype) 70 | n = np.linalg.norm(I - shouldBeIdentity) 71 | return n < 1e-3 72 | 73 | 74 | def rot_to_euler(R): 75 | """ 76 | Convert the rotation matrix to euler angles(degree) 77 | Input: 78 | - R: [3, 3] rotation matrix 79 | Output: 80 | - alpha. [3], the rotation angle (in degrees) along x,y,z axis. 81 | """ 82 | assert (is_rotation_matrix(R)) 83 | sy = math.sqrt(R[0, 0] * R[0, 0] + R[1, 0] * R[1, 0]) 84 | singular = sy < 1e-6 85 | 86 | if not singular: 87 | x = math.atan2(R[2, 1], R[2, 2]) 88 | y = math.atan2(-R[2, 0], sy) 89 | z = math.atan2(R[1, 0], R[0, 0]) 90 | else: 91 | x = math.atan2(-R[1, 2], R[1, 1]) 92 | y = math.atan2(-R[2, 0], sy) 93 | z = 0 94 | 95 | return np.array([x * 180 / np.pi, y * 180 / np.pi, z * 180 / np.pi]) -------------------------------------------------------------------------------- /benchmark_utils_predator.py: -------------------------------------------------------------------------------- 1 | """ 2 | Script for benchmarking the 3DMatch test dataset. 3 | 4 | Author: Zan Gojcic, Shengyu Huang 5 | Last modified: 30.11.2020 6 | """ 7 | 8 | import numpy as np 9 | import os,sys,glob,torch,math 10 | from collections import defaultdict 11 | import nibabel.quaternions as nq 12 | 13 | 14 | def rotation_error(R1, R2): 15 | """ 16 | Torch batch implementation of the rotation error between the estimated and the ground truth rotatiom matrix. 17 | Rotation error is defined as r_e = \arccos(\frac{Trace(\mathbf{R}_{ij}^{T}\mathbf{R}_{ij}^{\mathrm{GT}) - 1}{2}) 18 | 19 | Args: 20 | R1 (torch tensor): Estimated rotation matrices [b,3,3] 21 | R2 (torch tensor): Ground truth rotation matrices [b,3,3] 22 | 23 | Returns: 24 | ae (torch tensor): Rotation error in angular degreees [b,1] 25 | 26 | """ 27 | R_ = torch.matmul(R1.transpose(1,2), R2) 28 | e = torch.stack([(torch.trace(R_[_, :, :]) - 1) / 2 for _ in range(R_.shape[0])], dim=0).unsqueeze(1) 29 | 30 | # Clamp the errors to the valid range (otherwise torch.acos() is nan) 31 | e = torch.clamp(e, -1, 1, out=None) 32 | 33 | ae = torch.acos(e) 34 | pi = torch.Tensor([math.pi]) 35 | ae = 180. * ae / pi.to(ae.device).type(ae.dtype) 36 | 37 | return ae 38 | 39 | 40 | def translation_error(t1, t2): 41 | """ 42 | Torch batch implementation of the rotation error between the estimated and the ground truth rotatiom matrix. 43 | Rotation error is defined as r_e = \arccos(\frac{Trace(\mathbf{R}_{ij}^{T}\mathbf{R}_{ij}^{\mathrm{GT}) - 1}{2}) 44 | 45 | Args: 46 | t1 (torch tensor): Estimated translation vectors [b,3,1] 47 | t2 (torch tensor): Ground truth translation vectors [b,3,1] 48 | 49 | Returns: 50 | te (torch tensor): translation error in meters [b,1] 51 | 52 | """ 53 | return torch.norm(t1-t2, dim=(1, 2)) 54 | 55 | 56 | def computeTransformationErr(trans, info): 57 | """ 58 | Computer the transformation error as an approximation of the RMSE of corresponding points. 59 | More informaiton at http://redwood-data.org/indoor/registration.html 60 | 61 | Args: 62 | trans (numpy array): transformation matrices [n,4,4] 63 | info (numpy array): covariance matrices of the gt transformation paramaters [n,4,4] 64 | 65 | Returns: 66 | p (float): transformation error 67 | """ 68 | 69 | t = trans[:3, 3] 70 | r = trans[:3, :3] 71 | q = nq.mat2quat(r) 72 | er = np.concatenate([t, q[1:]], axis=0) 73 | p = er.reshape(1, 6) @ info @ er.reshape(6, 1) / info[0, 0] 74 | 75 | return p.item() 76 | 77 | 78 | def read_trajectory(filename, dim=4): 79 | """ 80 | Function that reads a trajectory saved in the 3DMatch/Redwood format to a numpy array. 81 | Format specification can be found at http://redwood-data.org/indoor/fileformat.html 82 | 83 | Args: 84 | filename (str): path to the '.txt' file containing the trajectory data 85 | dim (int): dimension of the transformation matrix (4x4 for 3D data) 86 | 87 | Returns: 88 | final_keys (dict): indices of pairs with more than 30% overlap (only this ones are included in the gt file) 89 | traj (numpy array): gt pairwise transformation matrices for n pairs[n,dim, dim] 90 | """ 91 | 92 | with open(filename) as f: 93 | lines = f.readlines() 94 | 95 | # Extract the point cloud pairs 96 | keys = lines[0::(dim+1)] 97 | temp_keys = [] 98 | for i in range(len(keys)): 99 | temp_keys.append(keys[i].split('\t')[0:3]) 100 | 101 | final_keys = [] 102 | for i in range(len(temp_keys)): 103 | final_keys.append([temp_keys[i][0].strip(), temp_keys[i][1].strip(), temp_keys[i][2].strip()]) 104 | 105 | 106 | traj = [] 107 | for i in range(len(lines)): 108 | if i % 5 != 0: 109 | traj.append(lines[i].split('\t')[0:dim]) 110 | 111 | traj = np.asarray(traj, dtype=np.float).reshape(-1,dim,dim) 112 | 113 | final_keys = np.asarray(final_keys) 114 | 115 | return final_keys, traj 116 | 117 | 118 | def read_trajectory_info(filename, dim=6): 119 | """ 120 | Function that reads the trajectory information saved in the 3DMatch/Redwood format to a numpy array. 121 | Information file contains the variance-covariance matrix of the transformation paramaters. 122 | Format specification can be found at http://redwood-data.org/indoor/fileformat.html 123 | 124 | Args: 125 | filename (str): path to the '.txt' file containing the trajectory information data 126 | dim (int): dimension of the transformation matrix (4x4 for 3D data) 127 | 128 | Returns: 129 | n_frame (int): number of fragments in the scene 130 | cov_matrix (numpy array): covariance matrix of the transformation matrices for n pairs[n,dim, dim] 131 | """ 132 | 133 | with open(filename) as fid: 134 | contents = fid.readlines() 135 | n_pairs = len(contents) // 7 136 | assert (len(contents) == 7 * n_pairs) 137 | info_list = [] 138 | n_frame = 0 139 | 140 | for i in range(n_pairs): 141 | frame_idx0, frame_idx1, n_frame = [int(item) for item in contents[i * 7].strip().split()] 142 | info_matrix = np.concatenate( 143 | [np.fromstring(item, sep='\t').reshape(1, -1) for item in contents[i * 7 + 1:i * 7 + 7]], axis=0) 144 | info_list.append(info_matrix) 145 | 146 | cov_matrix = np.asarray(info_list, dtype=np.float).reshape(-1,dim,dim) 147 | 148 | return n_frame, cov_matrix 149 | 150 | 151 | def extract_corresponding_trajectors(est_pairs,gt_pairs, gt_traj): 152 | """ 153 | Extract only those transformation matrices from the ground truth trajectory that are also in the estimated trajectory. 154 | 155 | Args: 156 | est_pairs (numpy array): indices of point cloud pairs with enough estimated overlap [m, 3] 157 | gt_pairs (numpy array): indices of gt overlaping point cloud pairs [n,3] 158 | gt_traj (numpy array): 3d array of the gt transformation parameters [n,4,4] 159 | 160 | Returns: 161 | ext_traj (numpy array): gt transformation parameters for the point cloud pairs from est_pairs [m,4,4] 162 | """ 163 | ext_traj = np.zeros((len(est_pairs), 4, 4)) 164 | 165 | for est_idx, pair in enumerate(est_pairs): 166 | pair[2] = gt_pairs[0][2] 167 | gt_idx = np.where((gt_pairs == pair).all(axis=1))[0] 168 | 169 | ext_traj[est_idx,:,:] = gt_traj[gt_idx,:,:] 170 | 171 | return ext_traj 172 | 173 | 174 | def evaluate_registration(num_fragment, result, result_pairs, gt_pairs, gt, gt_info, err2=0.2): 175 | """ 176 | Evaluates the performance of the registration algorithm according to the evaluation protocol defined 177 | by the 3DMatch/Redwood datasets. The evaluation protocol can be found at http://redwood-data.org/indoor/registration.html 178 | 179 | Args: 180 | num_fragment (int): path to the '.txt' file containing the trajectory information data 181 | result (numpy array): estimated transformation matrices [n,4,4] 182 | result_pairs (numpy array): indices of the point cloud for which the transformation matrix was estimated (m,3) 183 | gt_pairs (numpy array): indices of the ground truth overlapping point cloud pairs (n,3) 184 | gt (numpy array): ground truth transformation matrices [n,4,4] 185 | gt_cov (numpy array): covariance matrix of the ground truth transfromation parameters [n,6,6] 186 | err2 (float): threshold for the RMSE of the gt correspondences (default: 0.2m) 187 | 188 | Returns: 189 | precision (float): mean registration precision over the scene (not so important because it can be increased see papers) 190 | recall (float): mean registration recall over the scene (deciding parameter for the performance of the algorithm) 191 | """ 192 | 193 | err2 = err2 ** 2 194 | gt_mask = np.zeros((num_fragment, num_fragment), dtype=np.int) 195 | flags=[] 196 | 197 | for idx in range(gt_pairs.shape[0]): 198 | i = int(gt_pairs[idx,0]) 199 | j = int(gt_pairs[idx,1]) 200 | 201 | # Only non consecutive pairs are tested 202 | if j - i > 1: 203 | gt_mask[i, j] = idx 204 | 205 | n_gt = np.sum(gt_mask > 0) 206 | 207 | good = 0 208 | n_res = 0 209 | for idx in range(result_pairs.shape[0]): 210 | i = int(result_pairs[idx,0]) 211 | j = int(result_pairs[idx,1]) 212 | pose = result[idx,:,:] 213 | 214 | if gt_mask[i, j] > 0: 215 | n_res += 1 216 | gt_idx = gt_mask[i, j] 217 | p = computeTransformationErr(np.linalg.inv(gt[gt_idx,:,:]) @ pose, gt_info[gt_idx,:,:]) 218 | if p <= err2: 219 | good += 1 220 | flags.append(0) 221 | else: 222 | flags.append(1) 223 | else: 224 | flags.append(2) 225 | if n_res == 0: 226 | n_res += 1e6 227 | precision = good * 1.0 / n_res 228 | recall = good * 1.0 / n_gt 229 | 230 | return precision, recall, flags 231 | 232 | -------------------------------------------------------------------------------- /benchmarks/3DLoMatch/sun3d-hotel_uc-scan3/gt_overlap.log: -------------------------------------------------------------------------------- 1 | 0,1,0.7974 2 | 0,2,0.4627 3 | 0,3,0.6540 4 | 0,4,0.1338 5 | 0,5,0.0021 6 | 0,6,0.0000 7 | 0,7,0.0000 8 | 0,8,0.0221 9 | 0,9,0.2912 10 | 0,10,0.6083 11 | 0,11,0.2422 12 | 0,12,0.0000 13 | 0,13,0.0000 14 | 0,14,0.0000 15 | 0,15,0.0000 16 | 0,16,0.0000 17 | 0,17,0.0038 18 | 0,18,0.2632 19 | 0,19,0.4746 20 | 0,20,0.2308 21 | 0,21,0.0221 22 | 0,22,0.0000 23 | 0,23,0.0000 24 | 0,24,0.0000 25 | 0,25,0.0000 26 | 0,26,0.0000 27 | 0,27,0.0000 28 | 0,28,0.0000 29 | 0,29,0.0000 30 | 0,30,0.0000 31 | 0,31,0.0000 32 | 0,32,0.0000 33 | 0,33,0.0000 34 | 0,34,0.1454 35 | 0,35,0.4682 36 | 0,36,0.3344 37 | 0,37,0.0024 38 | 0,38,0.0000 39 | 0,39,0.0000 40 | 0,40,0.0000 41 | 0,41,0.0030 42 | 0,42,0.0097 43 | 0,43,0.2649 44 | 0,44,0.4026 45 | 0,45,0.4957 46 | 0,46,0.2291 47 | 0,47,0.0000 48 | 0,48,0.0000 49 | 0,49,0.0000 50 | 0,50,0.0000 51 | 0,51,0.0000 52 | 0,52,0.0000 53 | 0,53,0.1352 54 | 0,54,0.3522 55 | 1,2,0.5948 56 | 1,3,0.7167 57 | 1,4,0.0745 58 | 1,5,0.0000 59 | 1,6,0.0000 60 | 1,7,0.0000 61 | 1,8,0.0000 62 | 1,9,0.1986 63 | 1,10,0.5836 64 | 1,11,0.2832 65 | 1,12,0.0002 66 | 1,13,0.0000 67 | 1,14,0.0000 68 | 1,15,0.0000 69 | 1,16,0.0000 70 | 1,17,0.0460 71 | 1,18,0.2064 72 | 1,19,0.3215 73 | 1,20,0.1931 74 | 1,21,0.0672 75 | 1,22,0.0009 76 | 1,23,0.0000 77 | 1,24,0.0000 78 | 1,25,0.0000 79 | 1,26,0.0000 80 | 1,27,0.0000 81 | 1,28,0.0000 82 | 1,29,0.0000 83 | 1,30,0.0000 84 | 1,31,0.0000 85 | 1,32,0.0000 86 | 1,33,0.0000 87 | 1,34,0.1956 88 | 1,35,0.5315 89 | 1,36,0.2821 90 | 1,37,0.0000 91 | 1,38,0.0000 92 | 1,39,0.0000 93 | 1,40,0.0000 94 | 1,41,0.0000 95 | 1,42,0.0199 96 | 1,43,0.2656 97 | 1,44,0.3829 98 | 1,45,0.5444 99 | 1,46,0.2539 100 | 1,47,0.0236 101 | 1,48,0.0000 102 | 1,49,0.0000 103 | 1,50,0.0000 104 | 1,51,0.0000 105 | 1,52,0.0104 106 | 1,53,0.1771 107 | 1,54,0.3991 108 | 2,3,0.6481 109 | 2,4,0.0013 110 | 2,5,0.0000 111 | 2,6,0.0000 112 | 2,7,0.0000 113 | 2,8,0.0049 114 | 2,9,0.0794 115 | 2,10,0.4688 116 | 2,11,0.4497 117 | 2,12,0.0136 118 | 2,13,0.0000 119 | 2,14,0.0000 120 | 2,15,0.0000 121 | 2,16,0.0053 122 | 2,17,0.0845 123 | 2,18,0.2366 124 | 2,19,0.2156 125 | 2,20,0.1430 126 | 2,21,0.1180 127 | 2,22,0.0158 128 | 2,23,0.0000 129 | 2,24,0.0000 130 | 2,25,0.0000 131 | 2,26,0.0000 132 | 2,27,0.0000 133 | 2,28,0.0000 134 | 2,29,0.0000 135 | 2,30,0.0000 136 | 2,31,0.0000 137 | 2,32,0.0000 138 | 2,33,0.0377 139 | 2,34,0.3889 140 | 2,35,0.6959 141 | 2,36,0.1577 142 | 2,37,0.0000 143 | 2,38,0.0000 144 | 2,39,0.0000 145 | 2,40,0.0000 146 | 2,41,0.0000 147 | 2,42,0.0343 148 | 2,43,0.2743 149 | 2,44,0.4171 150 | 2,45,0.5934 151 | 2,46,0.4256 152 | 2,47,0.1239 153 | 2,48,0.0017 154 | 2,49,0.0000 155 | 2,50,0.0000 156 | 2,51,0.0000 157 | 2,52,0.0724 158 | 2,53,0.3387 159 | 2,54,0.6613 160 | 3,4,0.0534 161 | 3,5,0.0000 162 | 3,6,0.0000 163 | 3,7,0.0000 164 | 3,8,0.0095 165 | 3,9,0.0950 166 | 3,10,0.3998 167 | 3,11,0.2957 168 | 3,12,0.0000 169 | 3,13,0.0000 170 | 3,14,0.0000 171 | 3,15,0.0000 172 | 3,16,0.0000 173 | 3,17,0.0122 174 | 3,18,0.2435 175 | 3,19,0.3992 176 | 3,20,0.1595 177 | 3,21,0.0394 178 | 3,22,0.0000 179 | 3,23,0.0000 180 | 3,24,0.0000 181 | 3,25,0.0000 182 | 3,26,0.0000 183 | 3,27,0.0000 184 | 3,28,0.0000 185 | 3,29,0.0000 186 | 3,30,0.0000 187 | 3,31,0.0000 188 | 3,32,0.0000 189 | 3,33,0.0000 190 | 3,34,0.2017 191 | 3,35,0.5756 192 | 3,36,0.1979 193 | 3,37,0.0000 194 | 3,38,0.0000 195 | 3,39,0.0000 196 | 3,40,0.0000 197 | 3,41,0.0005 198 | 3,42,0.0000 199 | 3,43,0.1813 200 | 3,44,0.2831 201 | 3,45,0.4006 202 | 3,46,0.2590 203 | 3,47,0.0146 204 | 3,48,0.0000 205 | 3,49,0.0000 206 | 3,50,0.0000 207 | 3,51,0.0000 208 | 3,52,0.0008 209 | 3,53,0.1747 210 | 3,54,0.3686 211 | 4,5,0.4245 212 | 4,6,0.3461 213 | 4,7,0.2249 214 | 4,8,0.2461 215 | 4,9,0.6154 216 | 4,10,0.2996 217 | 4,11,0.0000 218 | 4,12,0.0000 219 | 4,13,0.0000 220 | 4,14,0.0000 221 | 4,15,0.0000 222 | 4,16,0.0000 223 | 4,17,0.0000 224 | 4,18,0.3016 225 | 4,19,0.2514 226 | 4,20,0.4484 227 | 4,21,0.0160 228 | 4,22,0.0000 229 | 4,23,0.0000 230 | 4,24,0.0000 231 | 4,25,0.0000 232 | 4,26,0.0000 233 | 4,27,0.0000 234 | 4,28,0.0000 235 | 4,29,0.0000 236 | 4,30,0.0000 237 | 4,31,0.0000 238 | 4,32,0.0000 239 | 4,33,0.0000 240 | 4,34,0.0000 241 | 4,35,0.0000 242 | 4,36,0.5052 243 | 4,37,0.3313 244 | 4,38,0.2090 245 | 4,39,0.2173 246 | 4,40,0.1416 247 | 4,41,0.1388 248 | 4,42,0.0592 249 | 4,43,0.1081 250 | 4,44,0.1697 251 | 4,45,0.1502 252 | 4,46,0.0000 253 | 4,47,0.0000 254 | 4,48,0.0000 255 | 4,49,0.0000 256 | 4,50,0.0000 257 | 4,51,0.0000 258 | 4,52,0.0000 259 | 4,53,0.0000 260 | 4,54,0.0000 261 | 5,6,0.8468 262 | 5,7,0.5897 263 | 5,8,0.4769 264 | 5,9,0.3912 265 | 5,10,0.0470 266 | 5,11,0.0000 267 | 5,12,0.0000 268 | 5,13,0.0000 269 | 5,14,0.0000 270 | 5,15,0.0000 271 | 5,16,0.0000 272 | 5,17,0.0000 273 | 5,18,0.0255 274 | 5,19,0.0003 275 | 5,20,0.0639 276 | 5,21,0.0120 277 | 5,22,0.0000 278 | 5,23,0.0000 279 | 5,24,0.0000 280 | 5,25,0.0000 281 | 5,26,0.0000 282 | 5,27,0.0000 283 | 5,28,0.0000 284 | 5,29,0.0000 285 | 5,30,0.0000 286 | 5,31,0.0000 287 | 5,32,0.0000 288 | 5,33,0.0000 289 | 5,34,0.0000 290 | 5,35,0.0000 291 | 5,36,0.2001 292 | 5,37,0.7478 293 | 5,38,0.5662 294 | 5,39,0.4664 295 | 5,40,0.3593 296 | 5,41,0.3817 297 | 5,42,0.1226 298 | 5,43,0.1333 299 | 5,44,0.1313 300 | 5,45,0.0491 301 | 5,46,0.0000 302 | 5,47,0.0000 303 | 5,48,0.0000 304 | 5,49,0.0000 305 | 5,50,0.0000 306 | 5,51,0.0000 307 | 5,52,0.0000 308 | 5,53,0.0000 309 | 5,54,0.0000 310 | 6,7,0.7782 311 | 6,8,0.5645 312 | 6,9,0.4541 313 | 6,10,0.0328 314 | 6,11,0.0000 315 | 6,12,0.0000 316 | 6,13,0.0000 317 | 6,14,0.0000 318 | 6,15,0.0000 319 | 6,16,0.0000 320 | 6,17,0.0000 321 | 6,18,0.0343 322 | 6,19,0.0000 323 | 6,20,0.0440 324 | 6,21,0.0198 325 | 6,22,0.0000 326 | 6,23,0.0000 327 | 6,24,0.0000 328 | 6,25,0.0000 329 | 6,26,0.0000 330 | 6,27,0.0000 331 | 6,28,0.0000 332 | 6,29,0.0000 333 | 6,30,0.0000 334 | 6,31,0.0000 335 | 6,32,0.0000 336 | 6,33,0.0000 337 | 6,34,0.0000 338 | 6,35,0.0000 339 | 6,36,0.2087 340 | 6,37,0.7015 341 | 6,38,0.5432 342 | 6,39,0.4756 343 | 6,40,0.4228 344 | 6,41,0.4376 345 | 6,42,0.1733 346 | 6,43,0.1657 347 | 6,44,0.1401 348 | 6,45,0.0305 349 | 6,46,0.0000 350 | 6,47,0.0000 351 | 6,48,0.0000 352 | 6,49,0.0000 353 | 6,50,0.0000 354 | 6,51,0.0000 355 | 6,52,0.0000 356 | 6,53,0.0000 357 | 6,54,0.0000 358 | 7,8,0.6586 359 | 7,9,0.3342 360 | 7,10,0.0069 361 | 7,11,0.0000 362 | 7,12,0.0000 363 | 7,13,0.0000 364 | 7,14,0.0000 365 | 7,15,0.0000 366 | 7,16,0.0000 367 | 7,17,0.0000 368 | 7,18,0.0095 369 | 7,19,0.0000 370 | 7,20,0.0145 371 | 7,21,0.0060 372 | 7,22,0.0000 373 | 7,23,0.0000 374 | 7,24,0.0000 375 | 7,25,0.0000 376 | 7,26,0.0000 377 | 7,27,0.0000 378 | 7,28,0.0000 379 | 7,29,0.0000 380 | 7,30,0.0000 381 | 7,31,0.0000 382 | 7,32,0.0000 383 | 7,33,0.0000 384 | 7,34,0.0000 385 | 7,35,0.0000 386 | 7,36,0.1581 387 | 7,37,0.6742 388 | 7,38,0.6361 389 | 7,39,0.4744 390 | 7,40,0.4834 391 | 7,41,0.5094 392 | 7,42,0.2086 393 | 7,43,0.1140 394 | 7,44,0.0743 395 | 7,45,0.0188 396 | 7,46,0.0000 397 | 7,47,0.0000 398 | 7,48,0.0000 399 | 7,49,0.0000 400 | 7,50,0.0000 401 | 7,51,0.0000 402 | 7,52,0.0000 403 | 7,53,0.0000 404 | 7,54,0.0000 405 | 8,9,0.5670 406 | 8,10,0.1409 407 | 8,11,0.0000 408 | 8,12,0.0000 409 | 8,13,0.0000 410 | 8,14,0.0000 411 | 8,15,0.0000 412 | 8,16,0.0000 413 | 8,17,0.0004 414 | 8,18,0.1260 415 | 8,19,0.0361 416 | 8,20,0.1477 417 | 8,21,0.0417 418 | 8,22,0.0000 419 | 8,23,0.0000 420 | 8,24,0.0000 421 | 8,25,0.0000 422 | 8,26,0.0000 423 | 8,27,0.0000 424 | 8,28,0.0000 425 | 8,29,0.0000 426 | 8,30,0.0000 427 | 8,31,0.0000 428 | 8,32,0.0000 429 | 8,33,0.0000 430 | 8,34,0.0000 431 | 8,35,0.0000 432 | 8,36,0.2936 433 | 8,37,0.5987 434 | 8,38,0.5411 435 | 8,39,0.5677 436 | 8,40,0.6010 437 | 8,41,0.6566 438 | 8,42,0.3435 439 | 8,43,0.2534 440 | 8,44,0.1904 441 | 8,45,0.0790 442 | 8,46,0.0000 443 | 8,47,0.0000 444 | 8,48,0.0000 445 | 8,49,0.0000 446 | 8,50,0.0000 447 | 8,51,0.0000 448 | 8,52,0.0000 449 | 8,53,0.0000 450 | 8,54,0.0000 451 | 9,10,0.5925 452 | 9,11,0.0000 453 | 9,12,0.0000 454 | 9,13,0.0000 455 | 9,14,0.0000 456 | 9,15,0.0000 457 | 9,16,0.0000 458 | 9,17,0.0000 459 | 9,18,0.1786 460 | 9,19,0.2260 461 | 9,20,0.2885 462 | 9,21,0.0229 463 | 9,22,0.0000 464 | 9,23,0.0000 465 | 9,24,0.0000 466 | 9,25,0.0000 467 | 9,26,0.0000 468 | 9,27,0.0000 469 | 9,28,0.0000 470 | 9,29,0.0000 471 | 9,30,0.0000 472 | 9,31,0.0000 473 | 9,32,0.0000 474 | 9,33,0.0000 475 | 9,34,0.0000 476 | 9,35,0.0074 477 | 9,36,0.5568 478 | 9,37,0.2426 479 | 9,38,0.1840 480 | 9,39,0.2289 481 | 9,40,0.1780 482 | 9,41,0.1876 483 | 9,42,0.2038 484 | 9,43,0.3772 485 | 9,44,0.4017 486 | 9,45,0.3210 487 | 9,46,0.0095 488 | 9,47,0.0000 489 | 9,48,0.0000 490 | 9,49,0.0000 491 | 9,50,0.0000 492 | 9,51,0.0000 493 | 9,52,0.0000 494 | 9,53,0.0051 495 | 9,54,0.0678 496 | 10,11,0.1919 497 | 10,12,0.0000 498 | 10,13,0.0000 499 | 10,14,0.0000 500 | 10,15,0.0000 501 | 10,16,0.0000 502 | 10,17,0.0492 503 | 10,18,0.4287 504 | 10,19,0.4381 505 | 10,20,0.3788 506 | 10,21,0.0906 507 | 10,22,0.0000 508 | 10,23,0.0000 509 | 10,24,0.0000 510 | 10,25,0.0000 511 | 10,26,0.0000 512 | 10,27,0.0000 513 | 10,28,0.0000 514 | 10,29,0.0000 515 | 10,30,0.0000 516 | 10,31,0.0000 517 | 10,32,0.0000 518 | 10,33,0.0000 519 | 10,34,0.1906 520 | 10,35,0.3363 521 | 10,36,0.5490 522 | 10,37,0.0349 523 | 10,38,0.0012 524 | 10,39,0.0180 525 | 10,40,0.0179 526 | 10,41,0.0291 527 | 10,42,0.0571 528 | 10,43,0.3916 529 | 10,44,0.5841 530 | 10,45,0.6626 531 | 10,46,0.1832 532 | 10,47,0.0071 533 | 10,48,0.0000 534 | 10,49,0.0000 535 | 10,50,0.0000 536 | 10,51,0.0000 537 | 10,52,0.0024 538 | 10,53,0.1371 539 | 10,54,0.3171 540 | 11,12,0.0919 541 | 11,13,0.0000 542 | 11,14,0.0000 543 | 11,15,0.0000 544 | 11,16,0.0053 545 | 11,17,0.0906 546 | 11,18,0.1677 547 | 11,19,0.1012 548 | 11,20,0.1184 549 | 11,21,0.1299 550 | 11,22,0.0307 551 | 11,23,0.0000 552 | 11,24,0.0000 553 | 11,25,0.0000 554 | 11,26,0.0000 555 | 11,27,0.0000 556 | 11,28,0.0000 557 | 11,29,0.0000 558 | 11,30,0.0000 559 | 11,31,0.0088 560 | 11,32,0.0133 561 | 11,33,0.1978 562 | 11,34,0.6366 563 | 11,35,0.6840 564 | 11,36,0.0674 565 | 11,37,0.0000 566 | 11,38,0.0000 567 | 11,39,0.0000 568 | 11,40,0.0000 569 | 11,41,0.0000 570 | 11,42,0.0000 571 | 11,43,0.1227 572 | 11,44,0.1383 573 | 11,45,0.2712 574 | 11,46,0.5265 575 | 11,47,0.3153 576 | 11,48,0.0246 577 | 11,49,0.0000 578 | 11,50,0.0000 579 | 11,51,0.0000 580 | 11,52,0.1434 581 | 11,53,0.4163 582 | 11,54,0.5177 583 | 12,13,0.4615 584 | 12,14,0.0606 585 | 12,15,0.0468 586 | 12,16,0.0958 587 | 12,17,0.0395 588 | 12,18,0.0000 589 | 12,19,0.0000 590 | 12,20,0.0000 591 | 12,21,0.0253 592 | 12,22,0.1188 593 | 12,23,0.0000 594 | 12,24,0.0554 595 | 12,25,0.1894 596 | 12,26,0.0754 597 | 12,27,0.0012 598 | 12,28,0.0000 599 | 12,29,0.0031 600 | 12,30,0.1267 601 | 12,31,0.3068 602 | 12,32,0.5036 603 | 12,33,0.6026 604 | 12,34,0.2828 605 | 12,35,0.0275 606 | 12,36,0.0000 607 | 12,37,0.0000 608 | 12,38,0.0000 609 | 12,39,0.0000 610 | 12,40,0.0000 611 | 12,41,0.0000 612 | 12,42,0.0000 613 | 12,43,0.0000 614 | 12,44,0.0000 615 | 12,45,0.0000 616 | 12,46,0.1504 617 | 12,47,0.6687 618 | 12,48,0.5981 619 | 12,49,0.0582 620 | 12,50,0.0000 621 | 12,51,0.0361 622 | 12,52,0.5519 623 | 12,53,0.3538 624 | 12,54,0.0265 625 | 13,14,0.4749 626 | 13,15,0.2952 627 | 13,16,0.2460 628 | 13,17,0.0158 629 | 13,18,0.0000 630 | 13,19,0.0000 631 | 13,20,0.0000 632 | 13,21,0.0000 633 | 13,22,0.0931 634 | 13,23,0.1087 635 | 13,24,0.3934 636 | 13,25,0.4812 637 | 13,26,0.4162 638 | 13,27,0.1815 639 | 13,28,0.0530 640 | 13,29,0.0332 641 | 13,30,0.1132 642 | 13,31,0.1271 643 | 13,32,0.3152 644 | 13,33,0.2453 645 | 13,34,0.0012 646 | 13,35,0.0000 647 | 13,36,0.0000 648 | 13,37,0.0000 649 | 13,38,0.0000 650 | 13,39,0.0000 651 | 13,40,0.0000 652 | 13,41,0.0000 653 | 13,42,0.0000 654 | 13,43,0.0000 655 | 13,44,0.0000 656 | 13,45,0.0000 657 | 13,46,0.0026 658 | 13,47,0.2495 659 | 13,48,0.5299 660 | 13,49,0.3233 661 | 13,50,0.0566 662 | 13,51,0.2918 663 | 13,52,0.3947 664 | 13,53,0.0824 665 | 13,54,0.0000 666 | 14,15,0.5807 667 | 14,16,0.3687 668 | 14,17,0.0232 669 | 14,18,0.0000 670 | 14,19,0.0000 671 | 14,20,0.0000 672 | 14,21,0.0000 673 | 14,22,0.1523 674 | 14,23,0.4454 675 | 14,24,0.7291 676 | 14,25,0.4178 677 | 14,26,0.3838 678 | 14,27,0.2465 679 | 14,28,0.1651 680 | 14,29,0.0000 681 | 14,30,0.0005 682 | 14,31,0.0108 683 | 14,32,0.0581 684 | 14,33,0.0310 685 | 14,34,0.0000 686 | 14,35,0.0000 687 | 14,36,0.0000 688 | 14,37,0.0000 689 | 14,38,0.0000 690 | 14,39,0.0000 691 | 14,40,0.0000 692 | 14,41,0.0000 693 | 14,42,0.0000 694 | 14,43,0.0000 695 | 14,44,0.0000 696 | 14,45,0.0000 697 | 14,46,0.0000 698 | 14,47,0.0274 699 | 14,48,0.2402 700 | 14,49,0.4638 701 | 14,50,0.2631 702 | 14,51,0.4174 703 | 14,52,0.1451 704 | 14,53,0.0000 705 | 14,54,0.0000 706 | 15,16,0.5795 707 | 15,17,0.1227 708 | 15,18,0.0000 709 | 15,19,0.0000 710 | 15,20,0.0000 711 | 15,21,0.0266 712 | 15,22,0.3410 713 | 15,23,0.6912 714 | 15,24,0.6655 715 | 15,25,0.3042 716 | 15,26,0.1510 717 | 15,27,0.0173 718 | 15,28,0.0000 719 | 15,29,0.0000 720 | 15,30,0.0000 721 | 15,31,0.0000 722 | 15,32,0.0398 723 | 15,33,0.0204 724 | 15,34,0.0000 725 | 15,35,0.0000 726 | 15,36,0.0000 727 | 15,37,0.0000 728 | 15,38,0.0000 729 | 15,39,0.0000 730 | 15,40,0.0000 731 | 15,41,0.0000 732 | 15,42,0.0000 733 | 15,43,0.0000 734 | 15,44,0.0000 735 | 15,45,0.0000 736 | 15,46,0.0006 737 | 15,47,0.0540 738 | 15,48,0.2749 739 | 15,49,0.4682 740 | 15,50,0.2787 741 | 15,51,0.4260 742 | 15,52,0.1663 743 | 15,53,0.0001 744 | 15,54,0.0000 745 | 16,17,0.3407 746 | 16,18,0.0002 747 | 16,19,0.0000 748 | 16,20,0.0012 749 | 16,21,0.1898 750 | 16,22,0.6238 751 | 16,23,0.6092 752 | 16,24,0.5168 753 | 16,25,0.2654 754 | 16,26,0.0610 755 | 16,27,0.0000 756 | 16,28,0.0000 757 | 16,29,0.0000 758 | 16,30,0.0000 759 | 16,31,0.0013 760 | 16,32,0.0730 761 | 16,33,0.0589 762 | 16,34,0.0159 763 | 16,35,0.0026 764 | 16,36,0.0000 765 | 16,37,0.0000 766 | 16,38,0.0000 767 | 16,39,0.0000 768 | 16,40,0.0000 769 | 16,41,0.0000 770 | 16,42,0.0000 771 | 16,43,0.0000 772 | 16,44,0.0000 773 | 16,45,0.0001 774 | 16,46,0.0172 775 | 16,47,0.1110 776 | 16,48,0.3174 777 | 16,49,0.4453 778 | 16,50,0.2733 779 | 16,51,0.4227 780 | 16,52,0.2068 781 | 16,53,0.0250 782 | 16,54,0.0029 783 | 17,18,0.1011 784 | 17,19,0.0000 785 | 17,20,0.0611 786 | 17,21,0.5402 787 | 17,22,0.5339 788 | 17,23,0.1183 789 | 17,24,0.0218 790 | 17,25,0.0204 791 | 17,26,0.0000 792 | 17,27,0.0000 793 | 17,28,0.0000 794 | 17,29,0.0000 795 | 17,30,0.0000 796 | 17,31,0.0000 797 | 17,32,0.0060 798 | 17,33,0.0333 799 | 17,34,0.1006 800 | 17,35,0.0729 801 | 17,36,0.0217 802 | 17,37,0.0000 803 | 17,38,0.0000 804 | 17,39,0.0000 805 | 17,40,0.0000 806 | 17,41,0.0000 807 | 17,42,0.0126 808 | 17,43,0.0188 809 | 17,44,0.0137 810 | 17,45,0.0438 811 | 17,46,0.0982 812 | 17,47,0.1095 813 | 17,48,0.0778 814 | 17,49,0.0277 815 | 17,50,0.0087 816 | 17,51,0.0175 817 | 17,52,0.0718 818 | 17,53,0.1015 819 | 17,54,0.0746 820 | 18,19,0.4131 821 | 18,20,0.7789 822 | 18,21,0.2041 823 | 18,22,0.0354 824 | 18,23,0.0000 825 | 18,24,0.0000 826 | 18,25,0.0000 827 | 18,26,0.0000 828 | 18,27,0.0000 829 | 18,28,0.0000 830 | 18,29,0.0000 831 | 18,30,0.0000 832 | 18,31,0.0000 833 | 18,32,0.0000 834 | 18,33,0.0000 835 | 18,34,0.1883 836 | 18,35,0.2289 837 | 18,36,0.5541 838 | 18,37,0.0612 839 | 18,38,0.0007 840 | 18,39,0.0252 841 | 18,40,0.0211 842 | 18,41,0.0472 843 | 18,42,0.0483 844 | 18,43,0.1999 845 | 18,44,0.2645 846 | 18,45,0.3291 847 | 18,46,0.1570 848 | 18,47,0.0160 849 | 18,48,0.0000 850 | 18,49,0.0000 851 | 18,50,0.0000 852 | 18,51,0.0000 853 | 18,52,0.0107 854 | 18,53,0.1445 855 | 18,54,0.1998 856 | 19,20,0.5392 857 | 19,21,0.0000 858 | 19,22,0.0000 859 | 19,23,0.0000 860 | 19,24,0.0000 861 | 19,25,0.0000 862 | 19,26,0.0000 863 | 19,27,0.0000 864 | 19,28,0.0000 865 | 19,29,0.0000 866 | 19,30,0.0000 867 | 19,31,0.0000 868 | 19,32,0.0000 869 | 19,33,0.0000 870 | 19,34,0.0746 871 | 19,35,0.2504 872 | 19,36,0.4269 873 | 19,37,0.0031 874 | 19,38,0.0000 875 | 19,39,0.0000 876 | 19,40,0.0000 877 | 19,41,0.0010 878 | 19,42,0.0000 879 | 19,43,0.0791 880 | 19,44,0.1953 881 | 19,45,0.2443 882 | 19,46,0.0711 883 | 19,47,0.0000 884 | 19,48,0.0000 885 | 19,49,0.0000 886 | 19,50,0.0000 887 | 19,51,0.0000 888 | 19,52,0.0000 889 | 19,53,0.0230 890 | 19,54,0.1374 891 | 20,21,0.1837 892 | 20,22,0.0089 893 | 20,23,0.0000 894 | 20,24,0.0000 895 | 20,25,0.0000 896 | 20,26,0.0000 897 | 20,27,0.0000 898 | 20,28,0.0000 899 | 20,29,0.0000 900 | 20,30,0.0000 901 | 20,31,0.0000 902 | 20,32,0.0000 903 | 20,33,0.0000 904 | 20,34,0.1175 905 | 20,35,0.1324 906 | 20,36,0.6331 907 | 20,37,0.1040 908 | 20,38,0.0035 909 | 20,39,0.0348 910 | 20,40,0.0262 911 | 20,41,0.0569 912 | 20,42,0.0566 913 | 20,43,0.2080 914 | 20,44,0.2366 915 | 20,45,0.2709 916 | 20,46,0.1170 917 | 20,47,0.0118 918 | 20,48,0.0000 919 | 20,49,0.0000 920 | 20,50,0.0000 921 | 20,51,0.0000 922 | 20,52,0.0074 923 | 20,53,0.1127 924 | 20,54,0.1291 925 | 21,22,0.5278 926 | 21,23,0.0239 927 | 21,24,0.0000 928 | 21,25,0.0000 929 | 21,26,0.0000 930 | 21,27,0.0000 931 | 21,28,0.0000 932 | 21,29,0.0000 933 | 21,30,0.0000 934 | 21,31,0.0000 935 | 21,32,0.0107 936 | 21,33,0.0619 937 | 21,34,0.2008 938 | 21,35,0.1533 939 | 21,36,0.1765 940 | 21,37,0.0741 941 | 21,38,0.0020 942 | 21,39,0.0096 943 | 21,40,0.0082 944 | 21,41,0.0300 945 | 21,42,0.0463 946 | 21,43,0.1055 947 | 21,44,0.0883 948 | 21,45,0.1398 949 | 21,46,0.2037 950 | 21,47,0.1330 951 | 21,48,0.0518 952 | 21,49,0.0000 953 | 21,50,0.0000 954 | 21,51,0.0000 955 | 21,52,0.0925 956 | 21,53,0.1979 957 | 21,54,0.1486 958 | 22,23,0.3581 959 | 22,24,0.1963 960 | 22,25,0.0953 961 | 22,26,0.0000 962 | 22,27,0.0000 963 | 22,28,0.0000 964 | 22,29,0.0000 965 | 22,30,0.0000 966 | 22,31,0.0081 967 | 22,32,0.0682 968 | 22,33,0.1025 969 | 22,34,0.0716 970 | 22,35,0.0159 971 | 22,36,0.0000 972 | 22,37,0.0000 973 | 22,38,0.0000 974 | 22,39,0.0000 975 | 22,40,0.0000 976 | 22,41,0.0000 977 | 22,42,0.0000 978 | 22,43,0.0001 979 | 22,44,0.0000 980 | 22,45,0.0003 981 | 22,46,0.0625 982 | 22,47,0.1812 983 | 22,48,0.2499 984 | 22,49,0.1507 985 | 22,50,0.0409 986 | 22,51,0.1381 987 | 22,52,0.1808 988 | 22,53,0.0904 989 | 22,54,0.0237 990 | 23,24,0.4911 991 | 23,25,0.1508 992 | 23,26,0.0132 993 | 23,27,0.0000 994 | 23,28,0.0000 995 | 23,29,0.0000 996 | 23,30,0.0000 997 | 23,31,0.0000 998 | 23,32,0.0078 999 | 23,33,0.0020 1000 | 23,34,0.0000 1001 | 23,35,0.0000 1002 | 23,36,0.0000 1003 | 23,37,0.0000 1004 | 23,38,0.0000 1005 | 23,39,0.0000 1006 | 23,40,0.0000 1007 | 23,41,0.0000 1008 | 23,42,0.0000 1009 | 23,43,0.0000 1010 | 23,44,0.0000 1011 | 23,45,0.0000 1012 | 23,46,0.0000 1013 | 23,47,0.0174 1014 | 23,48,0.1382 1015 | 23,49,0.3482 1016 | 23,50,0.2782 1017 | 23,51,0.3260 1018 | 23,52,0.0770 1019 | 23,53,0.0000 1020 | 23,54,0.0000 1021 | 24,25,0.3473 1022 | 24,26,0.2009 1023 | 24,27,0.0658 1024 | 24,28,0.0202 1025 | 24,29,0.0000 1026 | 24,30,0.0000 1027 | 24,31,0.0042 1028 | 24,32,0.0489 1029 | 24,33,0.0292 1030 | 24,34,0.0000 1031 | 24,35,0.0000 1032 | 24,36,0.0000 1033 | 24,37,0.0000 1034 | 24,38,0.0000 1035 | 24,39,0.0000 1036 | 24,40,0.0000 1037 | 24,41,0.0000 1038 | 24,42,0.0000 1039 | 24,43,0.0000 1040 | 24,44,0.0000 1041 | 24,45,0.0000 1042 | 24,46,0.0000 1043 | 24,47,0.0357 1044 | 24,48,0.2309 1045 | 24,49,0.5091 1046 | 24,50,0.2938 1047 | 24,51,0.4590 1048 | 24,52,0.1466 1049 | 24,53,0.0000 1050 | 24,54,0.0000 1051 | 25,26,0.5118 1052 | 25,27,0.2717 1053 | 25,28,0.1412 1054 | 25,29,0.0039 1055 | 25,30,0.0710 1056 | 25,31,0.1085 1057 | 25,32,0.2925 1058 | 25,33,0.1966 1059 | 25,34,0.0000 1060 | 25,35,0.0000 1061 | 25,36,0.0000 1062 | 25,37,0.0000 1063 | 25,38,0.0000 1064 | 25,39,0.0000 1065 | 25,40,0.0000 1066 | 25,41,0.0000 1067 | 25,42,0.0000 1068 | 25,43,0.0000 1069 | 25,44,0.0000 1070 | 25,45,0.0000 1071 | 25,46,0.0000 1072 | 25,47,0.1532 1073 | 25,48,0.4371 1074 | 25,49,0.4844 1075 | 25,50,0.1706 1076 | 25,51,0.4205 1077 | 25,52,0.3137 1078 | 25,53,0.0015 1079 | 25,54,0.0000 1080 | 26,27,0.7494 1081 | 26,28,0.5605 1082 | 26,29,0.2736 1083 | 26,30,0.0674 1084 | 26,31,0.0253 1085 | 26,32,0.0971 1086 | 26,33,0.0356 1087 | 26,34,0.0000 1088 | 26,35,0.0000 1089 | 26,36,0.0000 1090 | 26,37,0.0000 1091 | 26,38,0.0000 1092 | 26,39,0.0000 1093 | 26,40,0.0000 1094 | 26,41,0.0000 1095 | 26,42,0.0000 1096 | 26,43,0.0000 1097 | 26,44,0.0000 1098 | 26,45,0.0000 1099 | 26,46,0.0000 1100 | 26,47,0.0003 1101 | 26,48,0.0749 1102 | 26,49,0.1098 1103 | 26,50,0.0398 1104 | 26,51,0.0947 1105 | 26,52,0.0430 1106 | 26,53,0.0000 1107 | 26,54,0.0000 1108 | 27,28,0.6933 1109 | 27,29,0.3615 1110 | 27,30,0.0840 1111 | 27,31,0.0002 1112 | 27,32,0.0740 1113 | 27,33,0.0054 1114 | 27,34,0.0000 1115 | 27,35,0.0000 1116 | 27,36,0.0000 1117 | 27,37,0.0000 1118 | 27,38,0.0000 1119 | 27,39,0.0000 1120 | 27,40,0.0000 1121 | 27,41,0.0000 1122 | 27,42,0.0000 1123 | 27,43,0.0000 1124 | 27,44,0.0000 1125 | 27,45,0.0000 1126 | 27,46,0.0000 1127 | 27,47,0.0000 1128 | 27,48,0.0126 1129 | 27,49,0.0223 1130 | 27,50,0.0000 1131 | 27,51,0.0225 1132 | 27,52,0.0058 1133 | 27,53,0.0000 1134 | 27,54,0.0000 1135 | 28,29,0.3685 1136 | 28,30,0.0262 1137 | 28,31,0.0000 1138 | 28,32,0.0074 1139 | 28,33,0.0000 1140 | 28,34,0.0000 1141 | 28,35,0.0000 1142 | 28,36,0.0000 1143 | 28,37,0.0000 1144 | 28,38,0.0000 1145 | 28,39,0.0000 1146 | 28,40,0.0000 1147 | 28,41,0.0000 1148 | 28,42,0.0000 1149 | 28,43,0.0000 1150 | 28,44,0.0000 1151 | 28,45,0.0000 1152 | 28,46,0.0000 1153 | 28,47,0.0000 1154 | 28,48,0.0000 1155 | 28,49,0.0000 1156 | 28,50,0.0000 1157 | 28,51,0.0000 1158 | 28,52,0.0000 1159 | 28,53,0.0000 1160 | 28,54,0.0000 1161 | 29,30,0.2973 1162 | 29,31,0.0000 1163 | 29,32,0.0697 1164 | 29,33,0.0016 1165 | 29,34,0.0000 1166 | 29,35,0.0000 1167 | 29,36,0.0000 1168 | 29,37,0.0000 1169 | 29,38,0.0000 1170 | 29,39,0.0000 1171 | 29,40,0.0000 1172 | 29,41,0.0000 1173 | 29,42,0.0000 1174 | 29,43,0.0000 1175 | 29,44,0.0000 1176 | 29,45,0.0000 1177 | 29,46,0.0000 1178 | 29,47,0.0000 1179 | 29,48,0.0000 1180 | 29,49,0.0000 1181 | 29,50,0.0000 1182 | 29,51,0.0000 1183 | 29,52,0.0000 1184 | 29,53,0.0000 1185 | 29,54,0.0000 1186 | 30,31,0.2387 1187 | 30,32,0.2846 1188 | 30,33,0.2267 1189 | 30,34,0.0000 1190 | 30,35,0.0000 1191 | 30,36,0.0000 1192 | 30,37,0.0000 1193 | 30,38,0.0000 1194 | 30,39,0.0000 1195 | 30,40,0.0000 1196 | 30,41,0.0000 1197 | 30,42,0.0000 1198 | 30,43,0.0000 1199 | 30,44,0.0000 1200 | 30,45,0.0000 1201 | 30,46,0.0000 1202 | 30,47,0.0931 1203 | 30,48,0.1172 1204 | 30,49,0.0000 1205 | 30,50,0.0000 1206 | 30,51,0.0000 1207 | 30,52,0.0618 1208 | 30,53,0.0039 1209 | 30,54,0.0000 1210 | 31,32,0.8562 1211 | 31,33,0.8547 1212 | 31,34,0.2035 1213 | 31,35,0.0060 1214 | 31,36,0.0000 1215 | 31,37,0.0000 1216 | 31,38,0.0000 1217 | 31,39,0.0000 1218 | 31,40,0.0000 1219 | 31,41,0.0000 1220 | 31,42,0.0000 1221 | 31,43,0.0000 1222 | 31,44,0.0000 1223 | 31,45,0.0000 1224 | 31,46,0.0957 1225 | 31,47,0.5405 1226 | 31,48,0.6223 1227 | 31,49,0.0038 1228 | 31,50,0.0000 1229 | 31,51,0.0000 1230 | 31,52,0.4414 1231 | 31,53,0.2923 1232 | 31,54,0.0013 1233 | 32,33,0.7167 1234 | 32,34,0.1572 1235 | 32,35,0.0004 1236 | 32,36,0.0000 1237 | 32,37,0.0000 1238 | 32,38,0.0000 1239 | 32,39,0.0000 1240 | 32,40,0.0000 1241 | 32,41,0.0000 1242 | 32,42,0.0000 1243 | 32,43,0.0000 1244 | 32,44,0.0000 1245 | 32,45,0.0000 1246 | 32,46,0.0909 1247 | 32,47,0.4880 1248 | 32,48,0.6109 1249 | 32,49,0.0975 1250 | 32,50,0.0000 1251 | 32,51,0.0572 1252 | 32,52,0.4723 1253 | 32,53,0.2686 1254 | 32,54,0.0023 1255 | 33,34,0.3792 1256 | 33,35,0.1076 1257 | 33,36,0.0000 1258 | 33,37,0.0000 1259 | 33,38,0.0000 1260 | 33,39,0.0000 1261 | 33,40,0.0000 1262 | 33,41,0.0000 1263 | 33,42,0.0000 1264 | 33,43,0.0000 1265 | 33,44,0.0000 1266 | 33,45,0.0000 1267 | 33,46,0.1940 1268 | 33,47,0.6142 1269 | 33,48,0.4067 1270 | 33,49,0.0285 1271 | 33,50,0.0000 1272 | 33,51,0.0163 1273 | 33,52,0.4170 1274 | 33,53,0.3263 1275 | 33,54,0.0728 1276 | 34,35,0.4918 1277 | 34,36,0.0773 1278 | 34,37,0.0000 1279 | 34,38,0.0000 1280 | 34,39,0.0000 1281 | 34,40,0.0000 1282 | 34,41,0.0000 1283 | 34,42,0.0082 1284 | 34,43,0.1436 1285 | 34,44,0.1450 1286 | 34,45,0.2458 1287 | 34,46,0.4938 1288 | 34,47,0.3615 1289 | 34,48,0.0862 1290 | 34,49,0.0000 1291 | 34,50,0.0000 1292 | 34,51,0.0000 1293 | 34,52,0.2021 1294 | 34,53,0.5202 1295 | 34,54,0.4283 1296 | 35,36,0.1174 1297 | 35,37,0.0000 1298 | 35,38,0.0000 1299 | 35,39,0.0000 1300 | 35,40,0.0000 1301 | 35,41,0.0000 1302 | 35,42,0.0034 1303 | 35,43,0.1051 1304 | 35,44,0.1802 1305 | 35,45,0.2696 1306 | 35,46,0.2856 1307 | 35,47,0.0660 1308 | 35,48,0.0000 1309 | 35,49,0.0000 1310 | 35,50,0.0000 1311 | 35,51,0.0000 1312 | 35,52,0.0322 1313 | 35,53,0.2032 1314 | 35,54,0.3769 1315 | 36,37,0.1977 1316 | 36,38,0.0610 1317 | 36,39,0.0859 1318 | 36,40,0.0606 1319 | 36,41,0.0677 1320 | 36,42,0.0759 1321 | 36,43,0.2177 1322 | 36,44,0.2356 1323 | 36,45,0.2408 1324 | 36,46,0.0508 1325 | 36,47,0.0000 1326 | 36,48,0.0000 1327 | 36,49,0.0000 1328 | 36,50,0.0000 1329 | 36,51,0.0000 1330 | 36,52,0.0000 1331 | 36,53,0.0431 1332 | 36,54,0.0720 1333 | 37,38,0.6031 1334 | 37,39,0.4313 1335 | 37,40,0.3682 1336 | 37,41,0.4354 1337 | 37,42,0.1677 1338 | 37,43,0.1655 1339 | 37,44,0.1244 1340 | 37,45,0.0616 1341 | 37,46,0.0000 1342 | 37,47,0.0000 1343 | 37,48,0.0000 1344 | 37,49,0.0000 1345 | 37,50,0.0000 1346 | 37,51,0.0000 1347 | 37,52,0.0000 1348 | 37,53,0.0000 1349 | 37,54,0.0000 1350 | 38,39,0.6832 1351 | 38,40,0.6312 1352 | 38,41,0.6748 1353 | 38,42,0.2598 1354 | 38,43,0.0925 1355 | 38,44,0.0463 1356 | 38,45,0.0115 1357 | 38,46,0.0000 1358 | 38,47,0.0000 1359 | 38,48,0.0000 1360 | 38,49,0.0000 1361 | 38,50,0.0000 1362 | 38,51,0.0000 1363 | 38,52,0.0000 1364 | 38,53,0.0000 1365 | 38,54,0.0000 1366 | 39,40,0.7871 1367 | 39,41,0.7114 1368 | 39,42,0.4466 1369 | 39,43,0.3822 1370 | 39,44,0.2173 1371 | 39,45,0.0538 1372 | 39,46,0.0000 1373 | 39,47,0.0000 1374 | 39,48,0.0000 1375 | 39,49,0.0000 1376 | 39,50,0.0000 1377 | 39,51,0.0000 1378 | 39,52,0.0000 1379 | 39,53,0.0000 1380 | 39,54,0.0000 1381 | 40,41,0.8652 1382 | 40,42,0.4895 1383 | 40,43,0.2982 1384 | 40,44,0.1856 1385 | 40,45,0.0393 1386 | 40,46,0.0000 1387 | 40,47,0.0000 1388 | 40,48,0.0000 1389 | 40,49,0.0000 1390 | 40,50,0.0000 1391 | 40,51,0.0000 1392 | 40,52,0.0000 1393 | 40,53,0.0000 1394 | 40,54,0.0000 1395 | 41,42,0.5345 1396 | 41,43,0.3678 1397 | 41,44,0.2287 1398 | 41,45,0.0717 1399 | 41,46,0.0000 1400 | 41,47,0.0000 1401 | 41,48,0.0000 1402 | 41,49,0.0000 1403 | 41,50,0.0000 1404 | 41,51,0.0000 1405 | 41,52,0.0000 1406 | 41,53,0.0000 1407 | 41,54,0.0000 1408 | 42,43,0.7297 1409 | 42,44,0.4166 1410 | 42,45,0.2754 1411 | 42,46,0.0536 1412 | 42,47,0.0000 1413 | 42,48,0.0000 1414 | 42,49,0.0000 1415 | 42,50,0.0000 1416 | 42,51,0.0000 1417 | 42,52,0.0000 1418 | 42,53,0.0926 1419 | 42,54,0.1467 1420 | 43,44,0.7781 1421 | 43,45,0.6598 1422 | 43,46,0.1883 1423 | 43,47,0.0000 1424 | 43,48,0.0000 1425 | 43,49,0.0000 1426 | 43,50,0.0000 1427 | 43,51,0.0000 1428 | 43,52,0.0000 1429 | 43,53,0.1926 1430 | 43,54,0.3557 1431 | 44,45,0.8212 1432 | 44,46,0.1749 1433 | 44,47,0.0000 1434 | 44,48,0.0000 1435 | 44,49,0.0000 1436 | 44,50,0.0000 1437 | 44,51,0.0000 1438 | 44,52,0.0000 1439 | 44,53,0.1370 1440 | 44,54,0.4209 1441 | 45,46,0.3013 1442 | 45,47,0.0175 1443 | 45,48,0.0000 1444 | 45,49,0.0000 1445 | 45,50,0.0000 1446 | 45,51,0.0000 1447 | 45,52,0.0079 1448 | 45,53,0.2549 1449 | 45,54,0.5114 1450 | 46,47,0.4020 1451 | 46,48,0.1097 1452 | 46,49,0.0000 1453 | 46,50,0.0000 1454 | 46,51,0.0000 1455 | 46,52,0.3236 1456 | 46,53,0.8073 1457 | 46,54,0.6459 1458 | 47,48,0.4859 1459 | 47,49,0.0275 1460 | 47,50,0.0000 1461 | 47,51,0.0248 1462 | 47,52,0.6143 1463 | 47,53,0.4969 1464 | 47,54,0.1425 1465 | 48,49,0.3715 1466 | 48,50,0.0870 1467 | 48,51,0.3476 1468 | 48,52,0.6786 1469 | 48,53,0.2304 1470 | 48,54,0.0083 1471 | 49,50,0.5697 1472 | 49,51,0.7624 1473 | 49,52,0.1486 1474 | 49,53,0.0000 1475 | 49,54,0.0000 1476 | 50,51,0.7021 1477 | 50,52,0.0300 1478 | 50,53,0.0000 1479 | 50,54,0.0000 1480 | 51,52,0.1839 1481 | 51,53,0.0000 1482 | 51,54,0.0000 1483 | 52,53,0.4048 1484 | 52,54,0.0833 1485 | 53,54,0.3770 1486 | -------------------------------------------------------------------------------- /benchmarks/3DLoMatch/sun3d-hotel_umd-maryland_hotel1/gt_overlap.log: -------------------------------------------------------------------------------- 1 | 0,1,0.4164 2 | 0,2,0.0966 3 | 0,3,0.0000 4 | 0,4,0.0000 5 | 0,5,0.0000 6 | 0,6,0.0000 7 | 0,7,0.0000 8 | 0,8,0.1374 9 | 0,9,0.2265 10 | 0,10,0.2259 11 | 0,11,0.0000 12 | 0,12,0.0000 13 | 0,13,0.0558 14 | 0,14,0.6789 15 | 0,15,0.1939 16 | 0,16,0.0161 17 | 0,17,0.0000 18 | 0,18,0.0000 19 | 0,19,0.0000 20 | 0,20,0.0000 21 | 0,21,0.0000 22 | 0,22,0.0000 23 | 0,23,0.2292 24 | 0,24,0.6450 25 | 0,25,0.4599 26 | 0,26,0.3713 27 | 0,27,0.0413 28 | 0,28,0.0000 29 | 0,29,0.1054 30 | 0,30,0.0000 31 | 0,31,0.0000 32 | 0,32,0.0322 33 | 0,33,0.0000 34 | 0,34,0.0000 35 | 0,35,0.0000 36 | 0,36,0.0000 37 | 0,37,0.0000 38 | 0,38,0.0000 39 | 0,39,0.0000 40 | 0,40,0.0000 41 | 0,41,0.0000 42 | 0,42,0.0000 43 | 0,43,0.0000 44 | 0,44,0.0000 45 | 0,45,0.0000 46 | 0,46,0.0000 47 | 0,47,0.0000 48 | 0,48,0.0000 49 | 0,49,0.0000 50 | 0,50,0.0000 51 | 0,51,0.0000 52 | 0,52,0.0000 53 | 0,53,0.0000 54 | 0,54,0.0000 55 | 0,55,0.0000 56 | 0,56,0.0000 57 | 1,2,0.3852 58 | 1,3,0.0716 59 | 1,4,0.0008 60 | 1,5,0.0000 61 | 1,6,0.0000 62 | 1,7,0.0000 63 | 1,8,0.0000 64 | 1,9,0.0348 65 | 1,10,0.0538 66 | 1,11,0.0000 67 | 1,12,0.0347 68 | 1,13,0.0551 69 | 1,14,0.4752 70 | 1,15,0.6679 71 | 1,16,0.3431 72 | 1,17,0.0253 73 | 1,18,0.0000 74 | 1,19,0.0000 75 | 1,20,0.0000 76 | 1,21,0.0000 77 | 1,22,0.0000 78 | 1,23,0.3793 79 | 1,24,0.3298 80 | 1,25,0.4264 81 | 1,26,0.1708 82 | 1,27,0.0843 83 | 1,28,0.0055 84 | 1,29,0.0600 85 | 1,30,0.0000 86 | 1,31,0.0000 87 | 1,32,0.0158 88 | 1,33,0.0000 89 | 1,34,0.0000 90 | 1,35,0.0000 91 | 1,36,0.0000 92 | 1,37,0.0000 93 | 1,38,0.0000 94 | 1,39,0.0000 95 | 1,40,0.0000 96 | 1,41,0.0000 97 | 1,42,0.0000 98 | 1,43,0.0000 99 | 1,44,0.0000 100 | 1,45,0.0000 101 | 1,46,0.0000 102 | 1,47,0.0000 103 | 1,48,0.0000 104 | 1,49,0.0000 105 | 1,50,0.0000 106 | 1,51,0.0000 107 | 1,52,0.0000 108 | 1,53,0.0000 109 | 1,54,0.0000 110 | 1,55,0.0000 111 | 1,56,0.0000 112 | 2,3,0.1603 113 | 2,4,0.0594 114 | 2,5,0.0000 115 | 2,6,0.0000 116 | 2,7,0.0000 117 | 2,8,0.0000 118 | 2,9,0.0000 119 | 2,10,0.0000 120 | 2,11,0.0011 121 | 2,12,0.1148 122 | 2,13,0.0002 123 | 2,14,0.0933 124 | 2,15,0.4015 125 | 2,16,0.4839 126 | 2,17,0.0865 127 | 2,18,0.0000 128 | 2,19,0.0000 129 | 2,20,0.0000 130 | 2,21,0.0000 131 | 2,22,0.0000 132 | 2,23,0.2584 133 | 2,24,0.1081 134 | 2,25,0.1072 135 | 2,26,0.0017 136 | 2,27,0.0206 137 | 2,28,0.0090 138 | 2,29,0.0162 139 | 2,30,0.0000 140 | 2,31,0.0000 141 | 2,32,0.0000 142 | 2,33,0.0000 143 | 2,34,0.0000 144 | 2,35,0.0000 145 | 2,36,0.0000 146 | 2,37,0.0000 147 | 2,38,0.0000 148 | 2,39,0.0000 149 | 2,40,0.0000 150 | 2,41,0.0000 151 | 2,42,0.0000 152 | 2,43,0.0000 153 | 2,44,0.0000 154 | 2,45,0.0000 155 | 2,46,0.0000 156 | 2,47,0.0000 157 | 2,48,0.0000 158 | 2,49,0.0000 159 | 2,50,0.0000 160 | 2,51,0.0000 161 | 2,52,0.0000 162 | 2,53,0.0000 163 | 2,54,0.0000 164 | 2,55,0.0000 165 | 2,56,0.0000 166 | 3,4,0.4932 167 | 3,5,0.0881 168 | 3,6,0.0000 169 | 3,7,0.0000 170 | 3,8,0.0000 171 | 3,9,0.0000 172 | 3,10,0.0000 173 | 3,11,0.1192 174 | 3,12,0.5607 175 | 3,13,0.0415 176 | 3,14,0.0000 177 | 3,15,0.1420 178 | 3,16,0.3818 179 | 3,17,0.6915 180 | 3,18,0.0000 181 | 3,19,0.0000 182 | 3,20,0.0000 183 | 3,21,0.0000 184 | 3,22,0.0169 185 | 3,23,0.1155 186 | 3,24,0.0000 187 | 3,25,0.0000 188 | 3,26,0.0000 189 | 3,27,0.1533 190 | 3,28,0.1269 191 | 3,29,0.0853 192 | 3,30,0.0000 193 | 3,31,0.0000 194 | 3,32,0.0563 195 | 3,33,0.0000 196 | 3,34,0.0000 197 | 3,35,0.0000 198 | 3,36,0.0000 199 | 3,37,0.0000 200 | 3,38,0.0000 201 | 3,39,0.0000 202 | 3,40,0.0000 203 | 3,41,0.0000 204 | 3,42,0.0000 205 | 3,43,0.0000 206 | 3,44,0.0000 207 | 3,45,0.0000 208 | 3,46,0.0000 209 | 3,47,0.0000 210 | 3,48,0.0000 211 | 3,49,0.0000 212 | 3,50,0.0000 213 | 3,51,0.0000 214 | 3,52,0.0000 215 | 3,53,0.0000 216 | 3,54,0.0000 217 | 3,55,0.0000 218 | 3,56,0.0000 219 | 4,5,0.0698 220 | 4,6,0.0000 221 | 4,7,0.0000 222 | 4,8,0.0000 223 | 4,9,0.0000 224 | 4,10,0.0000 225 | 4,11,0.1537 226 | 4,12,0.2770 227 | 4,13,0.0465 228 | 4,14,0.0000 229 | 4,15,0.0001 230 | 4,16,0.1852 231 | 4,17,0.3837 232 | 4,18,0.0117 233 | 4,19,0.0000 234 | 4,20,0.0000 235 | 4,21,0.0000 236 | 4,22,0.0320 237 | 4,23,0.0405 238 | 4,24,0.0000 239 | 4,25,0.0000 240 | 4,26,0.0000 241 | 4,27,0.0471 242 | 4,28,0.1474 243 | 4,29,0.0160 244 | 4,30,0.0000 245 | 4,31,0.0000 246 | 4,32,0.0134 247 | 4,33,0.0000 248 | 4,34,0.0000 249 | 4,35,0.0000 250 | 4,36,0.0000 251 | 4,37,0.0000 252 | 4,38,0.0000 253 | 4,39,0.0000 254 | 4,40,0.0000 255 | 4,41,0.0000 256 | 4,42,0.0000 257 | 4,43,0.0000 258 | 4,44,0.0000 259 | 4,45,0.0000 260 | 4,46,0.0000 261 | 4,47,0.0000 262 | 4,48,0.0000 263 | 4,49,0.0000 264 | 4,50,0.0000 265 | 4,51,0.0000 266 | 4,52,0.0000 267 | 4,53,0.0000 268 | 4,54,0.0000 269 | 4,55,0.0000 270 | 4,56,0.0000 271 | 5,6,0.5433 272 | 5,7,0.1001 273 | 5,8,0.0000 274 | 5,9,0.0000 275 | 5,10,0.0000 276 | 5,11,0.4509 277 | 5,12,0.0178 278 | 5,13,0.4033 279 | 5,14,0.0000 280 | 5,15,0.0000 281 | 5,16,0.0000 282 | 5,17,0.4198 283 | 5,18,0.7203 284 | 5,19,0.0000 285 | 5,20,0.0000 286 | 5,21,0.0000 287 | 5,22,0.7150 288 | 5,23,0.0000 289 | 5,24,0.0000 290 | 5,25,0.0000 291 | 5,26,0.0000 292 | 5,27,0.2527 293 | 5,28,0.3494 294 | 5,29,0.0216 295 | 5,30,0.0000 296 | 5,31,0.0000 297 | 5,32,0.3884 298 | 5,33,0.2817 299 | 5,34,0.1124 300 | 5,35,0.1774 301 | 5,36,0.0000 302 | 5,37,0.0000 303 | 5,38,0.0000 304 | 5,39,0.0000 305 | 5,40,0.0000 306 | 5,41,0.0000 307 | 5,42,0.0000 308 | 5,43,0.0000 309 | 5,44,0.0000 310 | 5,45,0.0000 311 | 5,46,0.0000 312 | 5,47,0.0000 313 | 5,48,0.0000 314 | 5,49,0.0000 315 | 5,50,0.0000 316 | 5,51,0.0000 317 | 5,52,0.0000 318 | 5,53,0.0000 319 | 5,54,0.0000 320 | 5,55,0.0000 321 | 5,56,0.0000 322 | 6,7,0.1436 323 | 6,8,0.0054 324 | 6,9,0.0000 325 | 6,10,0.0000 326 | 6,11,0.0810 327 | 6,12,0.0000 328 | 6,13,0.0707 329 | 6,14,0.0000 330 | 6,15,0.0000 331 | 6,16,0.0000 332 | 6,17,0.0255 333 | 6,18,0.5955 334 | 6,19,0.1631 335 | 6,20,0.0000 336 | 6,21,0.0187 337 | 6,22,0.2615 338 | 6,23,0.0000 339 | 6,24,0.0000 340 | 6,25,0.0000 341 | 6,26,0.0000 342 | 6,27,0.0132 343 | 6,28,0.0294 344 | 6,29,0.0119 345 | 6,30,0.0449 346 | 6,31,0.0312 347 | 6,32,0.0899 348 | 6,33,0.1980 349 | 6,34,0.1972 350 | 6,35,0.1968 351 | 6,36,0.1282 352 | 6,37,0.1337 353 | 6,38,0.0266 354 | 6,39,0.0462 355 | 6,40,0.0380 356 | 6,41,0.0000 357 | 6,42,0.0000 358 | 6,43,0.0000 359 | 6,44,0.0000 360 | 6,45,0.0000 361 | 6,46,0.0053 362 | 6,47,0.0000 363 | 6,48,0.0000 364 | 6,49,0.0000 365 | 6,50,0.0000 366 | 6,51,0.0000 367 | 6,52,0.0000 368 | 6,53,0.0000 369 | 6,54,0.0000 370 | 6,55,0.0000 371 | 6,56,0.0896 372 | 7,8,0.2835 373 | 7,9,0.1527 374 | 7,10,0.1714 375 | 7,11,0.1062 376 | 7,12,0.0000 377 | 7,13,0.1685 378 | 7,14,0.0000 379 | 7,15,0.0000 380 | 7,16,0.0000 381 | 7,17,0.0044 382 | 7,18,0.2419 383 | 7,19,0.3308 384 | 7,20,0.1004 385 | 7,21,0.2393 386 | 7,22,0.1408 387 | 7,23,0.0000 388 | 7,24,0.0000 389 | 7,25,0.0001 390 | 7,26,0.0698 391 | 7,27,0.0507 392 | 7,28,0.0041 393 | 7,29,0.1395 394 | 7,30,0.2508 395 | 7,31,0.2217 396 | 7,32,0.1818 397 | 7,33,0.0578 398 | 7,34,0.0000 399 | 7,35,0.0324 400 | 7,36,0.1625 401 | 7,37,0.1250 402 | 7,38,0.0000 403 | 7,39,0.0000 404 | 7,40,0.0000 405 | 7,41,0.0000 406 | 7,42,0.0043 407 | 7,43,0.0000 408 | 7,44,0.0060 409 | 7,45,0.0133 410 | 7,46,0.0154 411 | 7,47,0.0083 412 | 7,48,0.0000 413 | 7,49,0.0000 414 | 7,50,0.0000 415 | 7,51,0.0000 416 | 7,52,0.0000 417 | 7,53,0.0000 418 | 7,54,0.0000 419 | 7,55,0.0045 420 | 7,56,0.0087 421 | 8,9,0.3109 422 | 8,10,0.2747 423 | 8,11,0.0044 424 | 8,12,0.0000 425 | 8,13,0.0266 426 | 8,14,0.0616 427 | 8,15,0.0000 428 | 8,16,0.0000 429 | 8,17,0.0000 430 | 8,18,0.0333 431 | 8,19,0.1629 432 | 8,20,0.1319 433 | 8,21,0.2138 434 | 8,22,0.0046 435 | 8,23,0.0104 436 | 8,24,0.0474 437 | 8,25,0.0690 438 | 8,26,0.1222 439 | 8,27,0.0048 440 | 8,28,0.0000 441 | 8,29,0.0629 442 | 8,30,0.0968 443 | 8,31,0.2321 444 | 8,32,0.0248 445 | 8,33,0.0000 446 | 8,34,0.0000 447 | 8,35,0.0000 448 | 8,36,0.0000 449 | 8,37,0.0000 450 | 8,38,0.0000 451 | 8,39,0.0000 452 | 8,40,0.0000 453 | 8,41,0.0000 454 | 8,42,0.0000 455 | 8,43,0.0000 456 | 8,44,0.0122 457 | 8,45,0.0041 458 | 8,46,0.0000 459 | 8,47,0.0027 460 | 8,48,0.0125 461 | 8,49,0.0000 462 | 8,50,0.0000 463 | 8,51,0.0000 464 | 8,52,0.0000 465 | 8,53,0.0000 466 | 8,54,0.0000 467 | 8,55,0.0000 468 | 8,56,0.0000 469 | 9,10,0.6309 470 | 9,11,0.0032 471 | 9,12,0.0000 472 | 9,13,0.0802 473 | 9,14,0.2758 474 | 9,15,0.0000 475 | 9,16,0.0000 476 | 9,17,0.0000 477 | 9,18,0.0175 478 | 9,19,0.1367 479 | 9,20,0.2621 480 | 9,21,0.2182 481 | 9,22,0.0072 482 | 9,23,0.0908 483 | 9,24,0.2001 484 | 9,25,0.2843 485 | 9,26,0.4017 486 | 9,27,0.0224 487 | 9,28,0.0000 488 | 9,29,0.1702 489 | 9,30,0.0656 490 | 9,31,0.2148 491 | 9,32,0.0670 492 | 9,33,0.0000 493 | 9,34,0.0000 494 | 9,35,0.0000 495 | 9,36,0.0000 496 | 9,37,0.0000 497 | 9,38,0.0000 498 | 9,39,0.0000 499 | 9,40,0.0000 500 | 9,41,0.0000 501 | 9,42,0.0000 502 | 9,43,0.0000 503 | 9,44,0.0000 504 | 9,45,0.0000 505 | 9,46,0.0000 506 | 9,47,0.0000 507 | 9,48,0.0000 508 | 9,49,0.0000 509 | 9,50,0.0000 510 | 9,51,0.0000 511 | 9,52,0.0000 512 | 9,53,0.0000 513 | 9,54,0.0000 514 | 9,55,0.0000 515 | 9,56,0.0000 516 | 10,11,0.0437 517 | 10,12,0.0000 518 | 10,13,0.1708 519 | 10,14,0.2425 520 | 10,15,0.0000 521 | 10,16,0.0000 522 | 10,17,0.0011 523 | 10,18,0.0528 524 | 10,19,0.1114 525 | 10,20,0.1732 526 | 10,21,0.1294 527 | 10,22,0.0364 528 | 10,23,0.1074 529 | 10,24,0.1715 530 | 10,25,0.2390 531 | 10,26,0.3404 532 | 10,27,0.0546 533 | 10,28,0.0000 534 | 10,29,0.2253 535 | 10,30,0.0062 536 | 10,31,0.0725 537 | 10,32,0.0926 538 | 10,33,0.0000 539 | 10,34,0.0000 540 | 10,35,0.0000 541 | 10,36,0.0000 542 | 10,37,0.0000 543 | 10,38,0.0000 544 | 10,39,0.0000 545 | 10,40,0.0000 546 | 10,41,0.0000 547 | 10,42,0.0000 548 | 10,43,0.0000 549 | 10,44,0.0000 550 | 10,45,0.0000 551 | 10,46,0.0000 552 | 10,47,0.0000 553 | 10,48,0.0000 554 | 10,49,0.0000 555 | 10,50,0.0000 556 | 10,51,0.0000 557 | 10,52,0.0000 558 | 10,53,0.0000 559 | 10,54,0.0000 560 | 10,55,0.0000 561 | 10,56,0.0000 562 | 11,12,0.0704 563 | 11,13,0.3811 564 | 11,14,0.0000 565 | 11,15,0.0000 566 | 11,16,0.0223 567 | 11,17,0.2971 568 | 11,18,0.2596 569 | 11,19,0.0118 570 | 11,20,0.0000 571 | 11,21,0.0109 572 | 11,22,0.3533 573 | 11,23,0.0023 574 | 11,24,0.0000 575 | 11,25,0.0009 576 | 11,26,0.0056 577 | 11,27,0.2038 578 | 11,28,0.2997 579 | 11,29,0.0570 580 | 11,30,0.0049 581 | 11,31,0.0055 582 | 11,32,0.2270 583 | 11,33,0.0266 584 | 11,34,0.0209 585 | 11,35,0.0000 586 | 11,36,0.0000 587 | 11,37,0.0000 588 | 11,38,0.0000 589 | 11,39,0.0000 590 | 11,40,0.0000 591 | 11,41,0.0000 592 | 11,42,0.0000 593 | 11,43,0.0000 594 | 11,44,0.0000 595 | 11,45,0.0000 596 | 11,46,0.0000 597 | 11,47,0.0000 598 | 11,48,0.0000 599 | 11,49,0.0000 600 | 11,50,0.0000 601 | 11,51,0.0000 602 | 11,52,0.0000 603 | 11,53,0.0000 604 | 11,54,0.0000 605 | 11,55,0.0000 606 | 11,56,0.0000 607 | 12,13,0.0217 608 | 12,14,0.0000 609 | 12,15,0.1130 610 | 12,16,0.3225 611 | 12,17,0.6196 612 | 12,18,0.0000 613 | 12,19,0.0000 614 | 12,20,0.0000 615 | 12,21,0.0000 616 | 12,22,0.0146 617 | 12,23,0.1270 618 | 12,24,0.0000 619 | 12,25,0.0000 620 | 12,26,0.0078 621 | 12,27,0.1557 622 | 12,28,0.1721 623 | 12,29,0.1195 624 | 12,30,0.0000 625 | 12,31,0.0000 626 | 12,32,0.0791 627 | 12,33,0.0000 628 | 12,34,0.0000 629 | 12,35,0.0000 630 | 12,36,0.0000 631 | 12,37,0.0000 632 | 12,38,0.0000 633 | 12,39,0.0000 634 | 12,40,0.0000 635 | 12,41,0.0000 636 | 12,42,0.0000 637 | 12,43,0.0000 638 | 12,44,0.0000 639 | 12,45,0.0000 640 | 12,46,0.0000 641 | 12,47,0.0000 642 | 12,48,0.0000 643 | 12,49,0.0000 644 | 12,50,0.0000 645 | 12,51,0.0000 646 | 12,52,0.0000 647 | 12,53,0.0000 648 | 12,54,0.0000 649 | 12,55,0.0000 650 | 12,56,0.0000 651 | 13,14,0.0520 652 | 13,15,0.0004 653 | 13,16,0.0055 654 | 13,17,0.3341 655 | 13,18,0.4207 656 | 13,19,0.0609 657 | 13,20,0.0000 658 | 13,21,0.0547 659 | 13,22,0.5138 660 | 13,23,0.0869 661 | 13,24,0.0384 662 | 13,25,0.1037 663 | 13,26,0.1596 664 | 13,27,0.4282 665 | 13,28,0.3401 666 | 13,29,0.3101 667 | 13,30,0.0097 668 | 13,31,0.0065 669 | 13,32,0.4575 670 | 13,33,0.0589 671 | 13,34,0.0068 672 | 13,35,0.0018 673 | 13,36,0.0000 674 | 13,37,0.0000 675 | 13,38,0.0000 676 | 13,39,0.0000 677 | 13,40,0.0000 678 | 13,41,0.0000 679 | 13,42,0.0000 680 | 13,43,0.0000 681 | 13,44,0.0000 682 | 13,45,0.0000 683 | 13,46,0.0000 684 | 13,47,0.0000 685 | 13,48,0.0000 686 | 13,49,0.0000 687 | 13,50,0.0000 688 | 13,51,0.0000 689 | 13,52,0.0000 690 | 13,53,0.0000 691 | 13,54,0.0000 692 | 13,55,0.0000 693 | 13,56,0.0000 694 | 14,15,0.2828 695 | 14,16,0.0890 696 | 14,17,0.0000 697 | 14,18,0.0000 698 | 14,19,0.0000 699 | 14,20,0.0004 700 | 14,21,0.0000 701 | 14,22,0.0000 702 | 14,23,0.3026 703 | 14,24,0.5108 704 | 14,25,0.5678 705 | 14,26,0.4203 706 | 14,27,0.0573 707 | 14,28,0.0000 708 | 14,29,0.1185 709 | 14,30,0.0000 710 | 14,31,0.0000 711 | 14,32,0.0407 712 | 14,33,0.0000 713 | 14,34,0.0000 714 | 14,35,0.0000 715 | 14,36,0.0000 716 | 14,37,0.0000 717 | 14,38,0.0000 718 | 14,39,0.0000 719 | 14,40,0.0000 720 | 14,41,0.0000 721 | 14,42,0.0000 722 | 14,43,0.0000 723 | 14,44,0.0000 724 | 14,45,0.0000 725 | 14,46,0.0000 726 | 14,47,0.0000 727 | 14,48,0.0000 728 | 14,49,0.0000 729 | 14,50,0.0000 730 | 14,51,0.0000 731 | 14,52,0.0000 732 | 14,53,0.0000 733 | 14,54,0.0000 734 | 14,55,0.0000 735 | 14,56,0.0000 736 | 15,16,0.5006 737 | 15,17,0.0740 738 | 15,18,0.0000 739 | 15,19,0.0000 740 | 15,20,0.0000 741 | 15,21,0.0000 742 | 15,22,0.0000 743 | 15,23,0.3247 744 | 15,24,0.1923 745 | 15,25,0.2209 746 | 15,26,0.0129 747 | 15,27,0.0302 748 | 15,28,0.0000 749 | 15,29,0.0060 750 | 15,30,0.0000 751 | 15,31,0.0000 752 | 15,32,0.0000 753 | 15,33,0.0000 754 | 15,34,0.0000 755 | 15,35,0.0000 756 | 15,36,0.0000 757 | 15,37,0.0000 758 | 15,38,0.0000 759 | 15,39,0.0000 760 | 15,40,0.0000 761 | 15,41,0.0000 762 | 15,42,0.0000 763 | 15,43,0.0000 764 | 15,44,0.0000 765 | 15,45,0.0000 766 | 15,46,0.0000 767 | 15,47,0.0000 768 | 15,48,0.0000 769 | 15,49,0.0000 770 | 15,50,0.0000 771 | 15,51,0.0000 772 | 15,52,0.0000 773 | 15,53,0.0000 774 | 15,54,0.0000 775 | 15,55,0.0000 776 | 15,56,0.0000 777 | 16,17,0.3016 778 | 16,18,0.0000 779 | 16,19,0.0000 780 | 16,20,0.0000 781 | 16,21,0.0000 782 | 16,22,0.0000 783 | 16,23,0.3288 784 | 16,24,0.0227 785 | 16,25,0.0827 786 | 16,26,0.0036 787 | 16,27,0.0635 788 | 16,28,0.0439 789 | 16,29,0.0518 790 | 16,30,0.0000 791 | 16,31,0.0000 792 | 16,32,0.0049 793 | 16,33,0.0000 794 | 16,34,0.0000 795 | 16,35,0.0000 796 | 16,36,0.0000 797 | 16,37,0.0000 798 | 16,38,0.0000 799 | 16,39,0.0000 800 | 16,40,0.0000 801 | 16,41,0.0000 802 | 16,42,0.0000 803 | 16,43,0.0000 804 | 16,44,0.0000 805 | 16,45,0.0000 806 | 16,46,0.0000 807 | 16,47,0.0000 808 | 16,48,0.0000 809 | 16,49,0.0000 810 | 16,50,0.0000 811 | 16,51,0.0000 812 | 16,52,0.0000 813 | 16,53,0.0000 814 | 16,54,0.0000 815 | 16,55,0.0000 816 | 16,56,0.0000 817 | 17,18,0.1271 818 | 17,19,0.0000 819 | 17,20,0.0000 820 | 17,21,0.0000 821 | 17,22,0.2039 822 | 17,23,0.0993 823 | 17,24,0.0000 824 | 17,25,0.0011 825 | 17,26,0.0088 826 | 17,27,0.2888 827 | 17,28,0.3155 828 | 17,29,0.1461 829 | 17,30,0.0000 830 | 17,31,0.0000 831 | 17,32,0.1997 832 | 17,33,0.0044 833 | 17,34,0.0000 834 | 17,35,0.0000 835 | 17,36,0.0000 836 | 17,37,0.0000 837 | 17,38,0.0000 838 | 17,39,0.0000 839 | 17,40,0.0000 840 | 17,41,0.0000 841 | 17,42,0.0000 842 | 17,43,0.0000 843 | 17,44,0.0000 844 | 17,45,0.0000 845 | 17,46,0.0000 846 | 17,47,0.0000 847 | 17,48,0.0000 848 | 17,49,0.0000 849 | 17,50,0.0000 850 | 17,51,0.0000 851 | 17,52,0.0000 852 | 17,53,0.0000 853 | 17,54,0.0000 854 | 17,55,0.0000 855 | 17,56,0.0000 856 | 18,19,0.1677 857 | 18,20,0.0000 858 | 18,21,0.0468 859 | 18,22,0.5037 860 | 18,23,0.0000 861 | 18,24,0.0000 862 | 18,25,0.0000 863 | 18,26,0.0012 864 | 18,27,0.1330 865 | 18,28,0.1792 866 | 18,29,0.0570 867 | 18,30,0.0462 868 | 18,31,0.0399 869 | 18,32,0.2109 870 | 18,33,0.2616 871 | 18,34,0.0878 872 | 18,35,0.2230 873 | 18,36,0.1139 874 | 18,37,0.0941 875 | 18,38,0.0152 876 | 18,39,0.0188 877 | 18,40,0.0069 878 | 18,41,0.0002 879 | 18,42,0.0000 880 | 18,43,0.0000 881 | 18,44,0.0000 882 | 18,45,0.0000 883 | 18,46,0.0071 884 | 18,47,0.0000 885 | 18,48,0.0000 886 | 18,49,0.0000 887 | 18,50,0.0000 888 | 18,51,0.0000 889 | 18,52,0.0000 890 | 18,53,0.0000 891 | 18,54,0.0000 892 | 18,55,0.0000 893 | 18,56,0.0409 894 | 19,20,0.1833 895 | 19,21,0.5141 896 | 19,22,0.0999 897 | 19,23,0.0000 898 | 19,24,0.0000 899 | 19,25,0.0000 900 | 19,26,0.0009 901 | 19,27,0.0000 902 | 19,28,0.0000 903 | 19,29,0.0621 904 | 19,30,0.2916 905 | 19,31,0.4179 906 | 19,32,0.0185 907 | 19,33,0.0468 908 | 19,34,0.0118 909 | 19,35,0.1044 910 | 19,36,0.1373 911 | 19,37,0.1379 912 | 19,38,0.0001 913 | 19,39,0.0000 914 | 19,40,0.0007 915 | 19,41,0.0000 916 | 19,42,0.0000 917 | 19,43,0.0000 918 | 19,44,0.0024 919 | 19,45,0.0048 920 | 19,46,0.0046 921 | 19,47,0.0037 922 | 19,48,0.0000 923 | 19,49,0.0000 924 | 19,50,0.0000 925 | 19,51,0.0000 926 | 19,52,0.0000 927 | 19,53,0.0000 928 | 19,54,0.0000 929 | 19,55,0.0000 930 | 19,56,0.0360 931 | 20,21,0.7000 932 | 20,22,0.0000 933 | 20,23,0.0000 934 | 20,24,0.0000 935 | 20,25,0.0000 936 | 20,26,0.0765 937 | 20,27,0.0000 938 | 20,28,0.0000 939 | 20,29,0.0116 940 | 20,30,0.4184 941 | 20,31,0.6360 942 | 20,32,0.0000 943 | 20,33,0.0000 944 | 20,34,0.0000 945 | 20,35,0.0000 946 | 20,36,0.0000 947 | 20,37,0.0000 948 | 20,38,0.0000 949 | 20,39,0.0000 950 | 20,40,0.0000 951 | 20,41,0.0000 952 | 20,42,0.0000 953 | 20,43,0.0000 954 | 20,44,0.0082 955 | 20,45,0.0154 956 | 20,46,0.0000 957 | 20,47,0.0117 958 | 20,48,0.0000 959 | 20,49,0.0000 960 | 20,50,0.0000 961 | 20,51,0.0000 962 | 20,52,0.0000 963 | 20,53,0.0000 964 | 20,54,0.0000 965 | 20,55,0.0000 966 | 20,56,0.0000 967 | 21,22,0.0146 968 | 21,23,0.0000 969 | 21,24,0.0000 970 | 21,25,0.0000 971 | 21,26,0.0000 972 | 21,27,0.0000 973 | 21,28,0.0000 974 | 21,29,0.0915 975 | 21,30,0.3808 976 | 21,31,0.6851 977 | 21,32,0.0313 978 | 21,33,0.0000 979 | 21,34,0.0000 980 | 21,35,0.0000 981 | 21,36,0.0000 982 | 21,37,0.0000 983 | 21,38,0.0000 984 | 21,39,0.0000 985 | 21,40,0.0000 986 | 21,41,0.0000 987 | 21,42,0.0000 988 | 21,43,0.0000 989 | 21,44,0.0030 990 | 21,45,0.0098 991 | 21,46,0.0000 992 | 21,47,0.0078 993 | 21,48,0.0000 994 | 21,49,0.0000 995 | 21,50,0.0000 996 | 21,51,0.0000 997 | 21,52,0.0000 998 | 21,53,0.0000 999 | 21,54,0.0000 1000 | 21,55,0.0000 1001 | 21,56,0.0000 1002 | 22,23,0.0000 1003 | 22,24,0.0000 1004 | 22,25,0.0031 1005 | 22,26,0.0077 1006 | 22,27,0.2417 1007 | 22,28,0.3661 1008 | 22,29,0.0967 1009 | 22,30,0.0000 1010 | 22,31,0.0003 1011 | 22,32,0.3371 1012 | 22,33,0.2856 1013 | 22,34,0.0447 1014 | 22,35,0.2005 1015 | 22,36,0.0235 1016 | 22,37,0.0116 1017 | 22,38,0.0000 1018 | 22,39,0.0000 1019 | 22,40,0.0000 1020 | 22,41,0.0000 1021 | 22,42,0.0000 1022 | 22,43,0.0000 1023 | 22,44,0.0000 1024 | 22,45,0.0000 1025 | 22,46,0.0000 1026 | 22,47,0.0000 1027 | 22,48,0.0000 1028 | 22,49,0.0000 1029 | 22,50,0.0000 1030 | 22,51,0.0000 1031 | 22,52,0.0000 1032 | 22,53,0.0000 1033 | 22,54,0.0000 1034 | 22,55,0.0000 1035 | 22,56,0.0000 1036 | 23,24,0.2479 1037 | 23,25,0.2609 1038 | 23,26,0.1521 1039 | 23,27,0.0882 1040 | 23,28,0.0188 1041 | 23,29,0.1160 1042 | 23,30,0.0000 1043 | 23,31,0.0000 1044 | 23,32,0.0439 1045 | 23,33,0.0000 1046 | 23,34,0.0000 1047 | 23,35,0.0000 1048 | 23,36,0.0000 1049 | 23,37,0.0000 1050 | 23,38,0.0000 1051 | 23,39,0.0000 1052 | 23,40,0.0000 1053 | 23,41,0.0000 1054 | 23,42,0.0000 1055 | 23,43,0.0000 1056 | 23,44,0.0000 1057 | 23,45,0.0000 1058 | 23,46,0.0000 1059 | 23,47,0.0000 1060 | 23,48,0.0000 1061 | 23,49,0.0000 1062 | 23,50,0.0000 1063 | 23,51,0.0000 1064 | 23,52,0.0000 1065 | 23,53,0.0000 1066 | 23,54,0.0000 1067 | 23,55,0.0000 1068 | 23,56,0.0000 1069 | 24,25,0.6146 1070 | 24,26,0.3155 1071 | 24,27,0.0348 1072 | 24,28,0.0000 1073 | 24,29,0.0814 1074 | 24,30,0.0000 1075 | 24,31,0.0000 1076 | 24,32,0.0264 1077 | 24,33,0.0000 1078 | 24,34,0.0000 1079 | 24,35,0.0000 1080 | 24,36,0.0000 1081 | 24,37,0.0000 1082 | 24,38,0.0000 1083 | 24,39,0.0000 1084 | 24,40,0.0000 1085 | 24,41,0.0000 1086 | 24,42,0.0000 1087 | 24,43,0.0000 1088 | 24,44,0.0000 1089 | 24,45,0.0000 1090 | 24,46,0.0000 1091 | 24,47,0.0000 1092 | 24,48,0.0000 1093 | 24,49,0.0000 1094 | 24,50,0.0000 1095 | 24,51,0.0000 1096 | 24,52,0.0000 1097 | 24,53,0.0000 1098 | 24,54,0.0000 1099 | 24,55,0.0000 1100 | 24,56,0.0000 1101 | 25,26,0.6137 1102 | 25,27,0.1143 1103 | 25,28,0.0000 1104 | 25,29,0.2310 1105 | 25,30,0.0000 1106 | 25,31,0.0000 1107 | 25,32,0.1041 1108 | 25,33,0.0000 1109 | 25,34,0.0000 1110 | 25,35,0.0000 1111 | 25,36,0.0000 1112 | 25,37,0.0000 1113 | 25,38,0.0000 1114 | 25,39,0.0000 1115 | 25,40,0.0000 1116 | 25,41,0.0000 1117 | 25,42,0.0000 1118 | 25,43,0.0000 1119 | 25,44,0.0000 1120 | 25,45,0.0000 1121 | 25,46,0.0000 1122 | 25,47,0.0000 1123 | 25,48,0.0000 1124 | 25,49,0.0000 1125 | 25,50,0.0000 1126 | 25,51,0.0000 1127 | 25,52,0.0000 1128 | 25,53,0.0000 1129 | 25,54,0.0000 1130 | 25,55,0.0000 1131 | 25,56,0.0000 1132 | 26,27,0.2295 1133 | 26,28,0.0424 1134 | 26,29,0.4471 1135 | 26,30,0.0000 1136 | 26,31,0.0063 1137 | 26,32,0.2472 1138 | 26,33,0.0000 1139 | 26,34,0.0000 1140 | 26,35,0.0000 1141 | 26,36,0.0000 1142 | 26,37,0.0000 1143 | 26,38,0.0000 1144 | 26,39,0.0000 1145 | 26,40,0.0000 1146 | 26,41,0.0000 1147 | 26,42,0.0000 1148 | 26,43,0.0000 1149 | 26,44,0.0000 1150 | 26,45,0.0000 1151 | 26,46,0.0000 1152 | 26,47,0.0000 1153 | 26,48,0.0000 1154 | 26,49,0.0000 1155 | 26,50,0.0000 1156 | 26,51,0.0000 1157 | 26,52,0.0000 1158 | 26,53,0.0000 1159 | 26,54,0.0000 1160 | 26,55,0.0000 1161 | 26,56,0.0000 1162 | 27,28,0.5788 1163 | 27,29,0.5745 1164 | 27,30,0.0000 1165 | 27,31,0.0000 1166 | 27,32,0.5753 1167 | 27,33,0.0317 1168 | 27,34,0.0000 1169 | 27,35,0.0000 1170 | 27,36,0.0000 1171 | 27,37,0.0000 1172 | 27,38,0.0000 1173 | 27,39,0.0000 1174 | 27,40,0.0000 1175 | 27,41,0.0000 1176 | 27,42,0.0000 1177 | 27,43,0.0000 1178 | 27,44,0.0000 1179 | 27,45,0.0000 1180 | 27,46,0.0000 1181 | 27,47,0.0000 1182 | 27,48,0.0000 1183 | 27,49,0.0000 1184 | 27,50,0.0000 1185 | 27,51,0.0000 1186 | 27,52,0.0000 1187 | 27,53,0.0000 1188 | 27,54,0.0000 1189 | 27,55,0.0000 1190 | 27,56,0.0000 1191 | 28,29,0.3295 1192 | 28,30,0.0000 1193 | 28,31,0.0000 1194 | 28,32,0.4979 1195 | 28,33,0.1063 1196 | 28,34,0.0000 1197 | 28,35,0.0293 1198 | 28,36,0.0000 1199 | 28,37,0.0000 1200 | 28,38,0.0000 1201 | 28,39,0.0000 1202 | 28,40,0.0000 1203 | 28,41,0.0000 1204 | 28,42,0.0000 1205 | 28,43,0.0000 1206 | 28,44,0.0000 1207 | 28,45,0.0000 1208 | 28,46,0.0000 1209 | 28,47,0.0000 1210 | 28,48,0.0000 1211 | 28,49,0.0000 1212 | 28,50,0.0000 1213 | 28,51,0.0000 1214 | 28,52,0.0000 1215 | 28,53,0.0000 1216 | 28,54,0.0000 1217 | 28,55,0.0000 1218 | 28,56,0.0000 1219 | 29,30,0.0331 1220 | 29,31,0.0575 1221 | 29,32,0.5734 1222 | 29,33,0.0000 1223 | 29,34,0.0000 1224 | 29,35,0.0000 1225 | 29,36,0.0000 1226 | 29,37,0.0000 1227 | 29,38,0.0000 1228 | 29,39,0.0000 1229 | 29,40,0.0000 1230 | 29,41,0.0000 1231 | 29,42,0.0000 1232 | 29,43,0.0000 1233 | 29,44,0.0000 1234 | 29,45,0.0000 1235 | 29,46,0.0000 1236 | 29,47,0.0000 1237 | 29,48,0.0000 1238 | 29,49,0.0000 1239 | 29,50,0.0000 1240 | 29,51,0.0000 1241 | 29,52,0.0000 1242 | 29,53,0.0000 1243 | 29,54,0.0000 1244 | 29,55,0.0000 1245 | 29,56,0.0000 1246 | 30,31,0.7857 1247 | 30,32,0.0395 1248 | 30,33,0.0000 1249 | 30,34,0.0000 1250 | 30,35,0.0000 1251 | 30,36,0.0352 1252 | 30,37,0.0258 1253 | 30,38,0.0000 1254 | 30,39,0.0000 1255 | 30,40,0.0000 1256 | 30,41,0.0000 1257 | 30,42,0.0058 1258 | 30,43,0.0000 1259 | 30,44,0.0072 1260 | 30,45,0.0207 1261 | 30,46,0.0000 1262 | 30,47,0.0152 1263 | 30,48,0.0000 1264 | 30,49,0.0000 1265 | 30,50,0.0000 1266 | 30,51,0.0000 1267 | 30,52,0.0000 1268 | 30,53,0.0000 1269 | 30,54,0.0000 1270 | 30,55,0.0005 1271 | 30,56,0.0000 1272 | 31,32,0.0312 1273 | 31,33,0.0000 1274 | 31,34,0.0000 1275 | 31,35,0.0000 1276 | 31,36,0.0000 1277 | 31,37,0.0000 1278 | 31,38,0.0000 1279 | 31,39,0.0000 1280 | 31,40,0.0000 1281 | 31,41,0.0000 1282 | 31,42,0.0000 1283 | 31,43,0.0000 1284 | 31,44,0.0055 1285 | 31,45,0.0216 1286 | 31,46,0.0000 1287 | 31,47,0.0159 1288 | 31,48,0.0000 1289 | 31,49,0.0000 1290 | 31,50,0.0000 1291 | 31,51,0.0000 1292 | 31,52,0.0000 1293 | 31,53,0.0000 1294 | 31,54,0.0000 1295 | 31,55,0.0000 1296 | 31,56,0.0000 1297 | 32,33,0.1656 1298 | 32,34,0.0000 1299 | 32,35,0.0589 1300 | 32,36,0.0000 1301 | 32,37,0.0000 1302 | 32,38,0.0000 1303 | 32,39,0.0000 1304 | 32,40,0.0000 1305 | 32,41,0.0000 1306 | 32,42,0.0000 1307 | 32,43,0.0000 1308 | 32,44,0.0000 1309 | 32,45,0.0000 1310 | 32,46,0.0000 1311 | 32,47,0.0000 1312 | 32,48,0.0000 1313 | 32,49,0.0000 1314 | 32,50,0.0000 1315 | 32,51,0.0000 1316 | 32,52,0.0000 1317 | 32,53,0.0000 1318 | 32,54,0.0000 1319 | 32,55,0.0000 1320 | 32,56,0.0000 1321 | 33,34,0.0953 1322 | 33,35,0.6766 1323 | 33,36,0.0337 1324 | 33,37,0.0172 1325 | 33,38,0.0000 1326 | 33,39,0.0000 1327 | 33,40,0.0000 1328 | 33,41,0.0000 1329 | 33,42,0.0000 1330 | 33,43,0.0000 1331 | 33,44,0.0000 1332 | 33,45,0.0000 1333 | 33,46,0.0000 1334 | 33,47,0.0000 1335 | 33,48,0.0000 1336 | 33,49,0.0000 1337 | 33,50,0.0000 1338 | 33,51,0.0000 1339 | 33,52,0.0000 1340 | 33,53,0.0000 1341 | 33,54,0.0000 1342 | 33,55,0.0000 1343 | 33,56,0.0000 1344 | 34,35,0.0715 1345 | 34,36,0.0005 1346 | 34,37,0.0074 1347 | 34,38,0.0000 1348 | 34,39,0.0000 1349 | 34,40,0.0000 1350 | 34,41,0.0000 1351 | 34,42,0.0000 1352 | 34,43,0.0000 1353 | 34,44,0.0000 1354 | 34,45,0.0000 1355 | 34,46,0.0000 1356 | 34,47,0.0000 1357 | 34,48,0.0000 1358 | 34,49,0.0000 1359 | 34,50,0.0000 1360 | 34,51,0.0000 1361 | 34,52,0.0000 1362 | 34,53,0.0000 1363 | 34,54,0.0000 1364 | 34,55,0.0000 1365 | 34,56,0.0000 1366 | 35,36,0.1933 1367 | 35,37,0.1410 1368 | 35,38,0.0000 1369 | 35,39,0.0105 1370 | 35,40,0.0000 1371 | 35,41,0.0000 1372 | 35,42,0.0000 1373 | 35,43,0.0000 1374 | 35,44,0.0000 1375 | 35,45,0.0000 1376 | 35,46,0.0075 1377 | 35,47,0.0000 1378 | 35,48,0.0000 1379 | 35,49,0.0000 1380 | 35,50,0.0000 1381 | 35,51,0.0000 1382 | 35,52,0.0000 1383 | 35,53,0.0000 1384 | 35,54,0.0000 1385 | 35,55,0.0000 1386 | 35,56,0.0099 1387 | 36,37,0.9268 1388 | 36,38,0.2252 1389 | 36,39,0.1381 1390 | 36,40,0.0463 1391 | 36,41,0.3462 1392 | 36,42,0.1149 1393 | 36,43,0.0000 1394 | 36,44,0.0000 1395 | 36,45,0.0000 1396 | 36,46,0.0208 1397 | 36,47,0.0000 1398 | 36,48,0.0000 1399 | 36,49,0.0000 1400 | 36,50,0.0151 1401 | 36,51,0.0000 1402 | 36,52,0.0000 1403 | 36,53,0.1036 1404 | 36,54,0.0758 1405 | 36,55,0.2942 1406 | 36,56,0.1529 1407 | 37,38,0.2001 1408 | 37,39,0.1456 1409 | 37,40,0.0744 1410 | 37,41,0.2864 1411 | 37,42,0.1023 1412 | 37,43,0.0000 1413 | 37,44,0.0000 1414 | 37,45,0.0000 1415 | 37,46,0.0129 1416 | 37,47,0.0000 1417 | 37,48,0.0000 1418 | 37,49,0.0083 1419 | 37,50,0.0156 1420 | 37,51,0.0063 1421 | 37,52,0.0000 1422 | 37,53,0.0765 1423 | 37,54,0.0653 1424 | 37,55,0.2092 1425 | 37,56,0.1533 1426 | 38,39,0.7336 1427 | 38,40,0.4500 1428 | 38,41,0.7052 1429 | 38,42,0.0000 1430 | 38,43,0.0000 1431 | 38,44,0.0000 1432 | 38,45,0.0000 1433 | 38,46,0.0000 1434 | 38,47,0.0000 1435 | 38,48,0.0000 1436 | 38,49,0.0000 1437 | 38,50,0.0000 1438 | 38,51,0.0000 1439 | 38,52,0.0000 1440 | 38,53,0.0532 1441 | 38,54,0.0656 1442 | 38,55,0.2010 1443 | 38,56,0.6778 1444 | 39,40,0.7405 1445 | 39,41,0.3294 1446 | 39,42,0.0000 1447 | 39,43,0.0000 1448 | 39,44,0.0000 1449 | 39,45,0.0000 1450 | 39,46,0.0000 1451 | 39,47,0.0000 1452 | 39,48,0.0000 1453 | 39,49,0.0000 1454 | 39,50,0.0000 1455 | 39,51,0.0000 1456 | 39,52,0.0000 1457 | 39,53,0.0007 1458 | 39,54,0.0123 1459 | 39,55,0.0585 1460 | 39,56,0.8692 1461 | 40,41,0.2377 1462 | 40,42,0.0000 1463 | 40,43,0.0000 1464 | 40,44,0.0000 1465 | 40,45,0.0000 1466 | 40,46,0.0000 1467 | 40,47,0.0000 1468 | 40,48,0.0000 1469 | 40,49,0.0000 1470 | 40,50,0.0000 1471 | 40,51,0.0000 1472 | 40,52,0.0000 1473 | 40,53,0.0049 1474 | 40,54,0.0060 1475 | 40,55,0.0201 1476 | 40,56,0.7968 1477 | 41,42,0.1106 1478 | 41,43,0.0000 1479 | 41,44,0.0000 1480 | 41,45,0.0000 1481 | 41,46,0.0000 1482 | 41,47,0.0000 1483 | 41,48,0.0000 1484 | 41,49,0.0000 1485 | 41,50,0.0105 1486 | 41,51,0.0000 1487 | 41,52,0.0000 1488 | 41,53,0.1075 1489 | 41,54,0.0839 1490 | 41,55,0.2627 1491 | 41,56,0.4159 1492 | 42,43,0.6018 1493 | 42,44,0.0251 1494 | 42,45,0.0266 1495 | 42,46,0.0028 1496 | 42,47,0.0189 1497 | 42,48,0.0053 1498 | 42,49,0.0642 1499 | 42,50,0.0132 1500 | 42,51,0.0060 1501 | 42,52,0.0000 1502 | 42,53,0.1699 1503 | 42,54,0.0213 1504 | 42,55,0.2864 1505 | 42,56,0.0234 1506 | 43,44,0.3421 1507 | 43,45,0.1134 1508 | 43,46,0.0035 1509 | 43,47,0.0914 1510 | 43,48,0.1894 1511 | 43,49,0.1920 1512 | 43,50,0.0121 1513 | 43,51,0.0706 1514 | 43,52,0.0420 1515 | 43,53,0.1082 1516 | 43,54,0.0064 1517 | 43,55,0.1888 1518 | 43,56,0.0004 1519 | 44,45,0.1405 1520 | 44,46,0.0000 1521 | 44,47,0.1365 1522 | 44,48,0.6500 1523 | 44,49,0.0928 1524 | 44,50,0.0000 1525 | 44,51,0.0360 1526 | 44,52,0.1109 1527 | 44,53,0.0000 1528 | 44,54,0.0000 1529 | 44,55,0.0013 1530 | 44,56,0.0000 1531 | 45,46,0.1742 1532 | 45,47,0.6005 1533 | 45,48,0.0854 1534 | 45,49,0.0000 1535 | 45,50,0.0000 1536 | 45,51,0.0000 1537 | 45,52,0.0000 1538 | 45,53,0.0000 1539 | 45,54,0.0000 1540 | 45,55,0.0385 1541 | 45,56,0.0734 1542 | 46,47,0.4487 1543 | 46,48,0.0000 1544 | 46,49,0.0000 1545 | 46,50,0.0000 1546 | 46,51,0.0000 1547 | 46,52,0.0000 1548 | 46,53,0.0000 1549 | 46,54,0.0000 1550 | 46,55,0.0658 1551 | 46,56,0.5766 1552 | 47,48,0.1739 1553 | 47,49,0.0000 1554 | 47,50,0.0000 1555 | 47,51,0.0000 1556 | 47,52,0.0000 1557 | 47,53,0.0000 1558 | 47,54,0.0000 1559 | 47,55,0.0173 1560 | 47,56,0.1386 1561 | 48,49,0.1001 1562 | 48,50,0.0000 1563 | 48,51,0.0415 1564 | 48,52,0.0617 1565 | 48,53,0.0000 1566 | 48,54,0.0000 1567 | 48,55,0.0000 1568 | 48,56,0.0000 1569 | 49,50,0.4584 1570 | 49,51,0.7472 1571 | 49,52,0.3620 1572 | 49,53,0.4390 1573 | 49,54,0.1820 1574 | 49,55,0.3128 1575 | 49,56,0.0000 1576 | 50,51,0.7048 1577 | 50,52,0.3648 1578 | 50,53,0.3808 1579 | 50,54,0.5410 1580 | 50,55,0.2180 1581 | 50,56,0.0000 1582 | 51,52,0.5193 1583 | 51,53,0.3090 1584 | 51,54,0.3374 1585 | 51,55,0.1798 1586 | 51,56,0.0000 1587 | 52,53,0.1072 1588 | 52,54,0.1001 1589 | 52,55,0.0196 1590 | 52,56,0.0000 1591 | 53,54,0.3400 1592 | 53,55,0.6997 1593 | 53,56,0.0069 1594 | 54,55,0.1975 1595 | 54,56,0.0313 1596 | 55,56,0.0771 1597 | -------------------------------------------------------------------------------- /benchmarks/3DLoMatch/sun3d-hotel_umd-maryland_hotel3/gt.log: -------------------------------------------------------------------------------- 1 | 0 11 37 2 | 0.845403263000 0.184488299000 -0.501254531000 -0.871081512000 3 | 0.024190222100 0.924268732000 0.380979024000 -0.009370982560 4 | 0.533578586000 -0.334206210000 0.776917536000 0.938034250000 5 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 6 | 0 27 37 7 | -0.099880360900 0.107629987000 0.989160283000 -3.699847660000 8 | 0.209464001000 0.974129376000 -0.084843011100 -0.269254424000 9 | -0.972700799000 0.198718435000 -0.119841896000 2.552552320000 10 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 11 | 1 11 37 12 | 0.687424464000 0.293490134000 -0.664312434000 -1.057783560000 13 | 0.177002143000 0.819422857000 0.545178102000 0.196681838000 14 | 0.704355890000 -0.492352814000 0.511341016000 0.800738200000 15 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 16 | 1 12 37 17 | 0.960556540000 0.033808444700 -0.276018756000 -1.116873040000 18 | 0.051059258000 0.954264797000 0.294571850000 0.062695909400 19 | 0.273353584000 -0.297047636000 0.914897341000 0.439388872000 20 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 21 | 1 13 37 22 | 0.980321934000 -0.065946368600 -0.186059221000 -0.938714246000 23 | 0.120942954000 0.945578266000 0.302083796000 0.059428536200 24 | 0.156012316000 -0.318643613000 0.934945573000 0.250832924000 25 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 26 | 3 5 37 27 | 0.786899934000 -0.222962581000 0.575392042000 -0.207416877000 28 | 0.242832724000 0.969095423000 0.043425343700 0.167282662000 29 | -0.567293402000 0.105553648000 0.816723997000 -0.210308429000 30 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 31 | 5 7 37 32 | 0.300628685000 -0.554189304000 0.776205921000 -0.086675535300 33 | 0.558343551000 0.762078161000 0.327854712000 0.134648212000 34 | -0.773222281000 0.334826462000 0.538530074000 -0.252140139000 35 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 36 | 8 14 37 37 | 0.117748647000 -0.433278662000 0.893534912000 -0.209235553000 38 | 0.476566249000 0.814064577000 0.331942162000 -0.291461592000 39 | -0.871216951000 0.386743511000 0.302341474000 0.626277413000 40 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 41 | 8 16 37 42 | 0.950610966000 -0.179520622000 0.253201750000 0.676507520000 43 | 0.167604260000 0.983502913000 0.068063225900 -0.541325023000 44 | -0.261242096000 -0.022262909000 0.965015793000 1.037638530000 45 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 46 | 9 10 37 47 | 0.893077339000 -0.316428340000 0.319825053000 -0.070129513800 48 | 0.073032202300 0.803405537000 0.590936024000 0.148324383000 49 | -0.443937771000 -0.504391875000 0.740612461000 0.069353142900 50 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 51 | 9 15 37 52 | 0.982745740000 -0.184769463000 -0.008449330860 0.020193657300 53 | 0.135890443000 0.690276953000 0.710672648000 0.265155160000 54 | -0.125478546000 -0.699556997000 0.703471595000 0.827538672000 55 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 56 | 9 16 37 57 | 0.990146484000 -0.137984138000 -0.023876500800 0.382550566000 58 | 0.126773073000 0.810825172000 0.571396077000 0.263974039000 59 | -0.059484090500 -0.568792105000 0.820326296000 1.321505760000 60 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 61 | 10 11 37 62 | 0.695141522000 -0.287322221000 0.658957648000 -0.130291160000 63 | 0.373691104000 0.927499166000 0.010205571500 -0.107840833000 64 | -0.614112392000 0.239152916000 0.752111269000 0.183584131000 65 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 66 | 10 13 37 67 | 0.137529068000 -0.072126710100 0.987867633000 -0.701771146000 68 | 0.583263901000 0.811988544000 -0.021914668500 -0.056298260500 69 | -0.800554215000 0.579201211000 0.153740341000 0.105055871000 70 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 71 | 10 17 37 72 | 0.919018165000 -0.266374687000 0.290602552000 -0.388821215000 73 | 0.239327340000 0.962776783000 0.125650212000 -0.799771285000 74 | -0.313254770000 -0.045924302600 0.948557796000 1.477576670000 75 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 76 | 11 14 37 77 | 0.997738807000 0.037394764300 -0.055837884000 -0.376269301000 78 | -0.034646277700 0.998177901000 0.049404074000 0.073328536900 79 | 0.057582028800 -0.047358142000 0.997215727000 -0.123981903000 80 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 81 | 11 27 37 82 | -0.598385550000 0.220587355000 0.770243661000 -1.536266450000 83 | 0.500256170000 0.853798454000 0.144122774000 -1.301657120000 84 | -0.625840378000 0.471559045000 -0.621250869000 2.573265460000 85 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 86 | 12 27 37 87 | -0.126653412000 0.129764883000 0.983423399000 -2.618656180000 88 | 0.263046960000 0.960305940000 -0.092836538700 -0.282281827000 89 | -0.956433135000 0.246928607000 -0.155759365000 2.041682210000 90 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 91 | 13 14 37 92 | 0.763523454000 0.363491833000 -0.533767313000 -0.231895465000 93 | -0.110955598000 0.888094266000 0.446070600000 0.018700847400 94 | 0.636177241000 -0.281359986000 0.718410961000 0.243345383000 95 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 96 | 13 27 37 97 | -0.010353163100 0.203910751000 0.978935549000 -2.955711350000 98 | 0.269616491000 0.943298786000 -0.193635561000 -0.047851651300 99 | -0.962911646000 0.261932849000 -0.064743213800 2.000541510000 100 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 101 | 14 16 37 102 | 0.419406739000 0.466962039000 -0.778488050000 -0.373166394000 103 | -0.376472867000 0.869805176000 0.318913324000 -0.428087450000 104 | 0.826054718000 0.159325899000 0.540601152000 0.832873728000 105 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 106 | 14 17 37 107 | 0.944073440000 0.157615497000 -0.289626828000 -0.790289721000 108 | -0.099993162800 0.973845011000 0.204029721000 -0.406338900000 109 | 0.314210187000 -0.163658341000 0.935140966000 0.948692905000 110 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 111 | 16 18 37 112 | 0.850318420000 -0.006851779670 0.526222024000 0.290033208000 113 | 0.159786267000 0.956074000000 -0.245748924000 -0.426238032000 114 | -0.501423986000 0.293047810000 0.814062106000 0.678094603000 115 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 116 | 16 19 37 117 | 0.920721200000 -0.044625998900 0.387659552000 0.702164046000 118 | 0.130538705000 0.971428765000 -0.198212019000 -0.634013671000 119 | -0.367740259000 0.233102638000 0.900239719000 0.988360063000 120 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 121 | 18 20 37 122 | 0.852124234000 -0.451147306000 0.265240563000 0.213394517000 123 | 0.315924168000 0.847495634000 0.426548722000 -0.156902796000 124 | -0.417226840000 -0.279675898000 0.864698311000 0.986096496000 125 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 126 | 20 33 37 127 | 0.882788665000 0.268495606000 -0.385479580000 1.597134590000 128 | -0.402159256000 0.856049341000 -0.324729569000 -1.023819270000 129 | 0.242802872000 0.441690069000 0.863687818000 0.435585482000 130 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 131 | 20 34 37 132 | 0.651537223000 0.623233873000 -0.432526968000 1.581782020000 133 | -0.513200443000 0.781989954000 0.353718857000 -1.087905800000 134 | 0.558682861000 -0.008488190970 0.829338753000 0.702154763000 135 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 136 | 20 36 37 137 | 0.518779275000 0.416317761000 -0.746691410000 1.637919680000 138 | -0.580734412000 0.812582858000 0.049575103800 -1.080250410000 139 | 0.627389488000 0.407909118000 0.663320593000 0.562255304000 140 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 141 | 21 24 37 142 | 0.808370152000 -0.251347304000 0.532317386000 0.033445992500 143 | 0.409120833000 0.890063263000 -0.201018191000 -0.568421744000 144 | -0.423270885000 0.380278973000 0.822331727000 0.529359291000 145 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 146 | 21 29 37 147 | 0.987946988000 0.076211790300 0.134734232000 0.169137975000 148 | -0.039748512200 0.966120537000 -0.255014388000 -0.758321319000 149 | -0.149604640000 0.246585026000 0.957504867000 0.696616197000 150 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 151 | 21 30 37 152 | 0.964268739000 0.117264811000 -0.237559576000 0.205118293000 153 | -0.248675248000 0.709851919000 -0.658993124000 -0.751181964000 154 | 0.091355966500 0.694522948000 0.713648748000 0.561698898000 155 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 156 | 21 31 37 157 | 0.854683749000 0.470327192000 -0.219790200000 0.018616262900 158 | -0.394874125000 0.863802835000 0.312919112000 -0.920789744000 159 | 0.337031518000 -0.180658158000 0.923998663000 1.082939540000 160 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 161 | 22 24 37 162 | 0.956851005000 -0.170849879000 0.235046319000 -0.003905728690 163 | 0.142971302000 0.981011020000 0.131049733000 -0.239647568000 164 | -0.252973191000 -0.091789623000 0.963109733000 0.692698979000 165 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 166 | 22 27 37 167 | 0.683148716000 0.198197372000 0.702869298000 -0.113615059000 168 | 0.243822157000 0.845332701000 -0.475352638000 -0.256764922000 169 | -0.688373638000 0.496112912000 0.529163987000 0.537108309000 170 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 171 | 22 30 37 172 | 0.834848443000 0.054641094100 -0.547761557000 0.113666979000 173 | -0.245783531000 0.927369106000 -0.282092011000 -0.402023094000 174 | 0.492563785000 0.370137894000 0.787643193000 0.846742307000 175 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 176 | 22 31 37 177 | 0.635599845000 0.658804758000 -0.402478790000 -0.238747597000 178 | -0.268168167000 0.677274111000 0.685114020000 -0.325067103000 179 | 0.723947442000 -0.327526466000 0.607148839000 1.299648070000 180 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 181 | 23 24 37 182 | 0.920352141000 -0.322193908000 0.221683430000 0.081217407800 183 | 0.106490788000 0.751871263000 0.650653108000 0.155310318000 184 | -0.376314182000 -0.575223154000 0.726296856000 0.425493088000 185 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 186 | 23 27 37 187 | 0.629849780000 0.069466983100 0.773604309000 -0.026087500600 188 | -0.074640964200 0.996795738000 -0.028739062200 0.044528858400 189 | -0.773122660000 -0.039641730500 0.633017410000 0.312574133000 190 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 191 | 24 25 37 192 | 0.643985512000 -0.428192104000 0.633982645000 -0.004962098240 193 | 0.402769705000 0.894311187000 0.194896053000 0.015277046200 194 | -0.650432256000 0.129839966000 0.748385424000 0.049697552900 195 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 196 | 24 30 37 197 | 0.639079792000 0.091236711900 -0.763709936000 0.050315510700 198 | -0.428962711000 0.866451164000 -0.255449393000 -0.193519461000 199 | 0.638410653000 0.490857423000 0.592868528000 0.154716249000 200 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 201 | 25 26 37 202 | 0.939511140000 0.029746120900 0.341220238000 -0.030827191200 203 | 0.152026044000 0.856497748000 -0.493253411000 -0.015655466000 204 | -0.306927350000 0.515291511000 0.800169064000 -0.124219342000 205 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 206 | 25 28 37 207 | 0.845971223000 0.384264506000 -0.369692128000 0.042603958900 208 | -0.329276767000 0.921791795000 0.204638544000 0.010910844000 209 | 0.419414493000 -0.051388426500 0.906339512000 0.006589727580 210 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 211 | 27 28 37 212 | 0.976070739000 0.104173795000 -0.190880047000 -0.014279237800 213 | 0.094759338600 0.586300552000 0.804533489000 0.168072539000 214 | 0.195723103000 -0.803367475000 0.562399077000 0.151402232000 215 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 216 | 28 31 37 217 | 0.085434798900 0.865464834000 -0.493630594000 -0.547522738000 218 | -0.631036296000 0.430406001000 0.645400778000 -0.146927389000 219 | 0.771034849000 0.256359760000 0.582911526000 0.330278357000 220 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 221 | 29 31 37 222 | 0.809656311000 0.457349365000 -0.367813560000 -0.200044954000 223 | -0.233252710000 0.825832985000 0.513410894000 -0.073173770200 224 | 0.538562781000 -0.329893487000 0.775319932000 0.391057276000 225 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 226 | 30 32 37 227 | 0.537301247000 0.452937214000 -0.711445632000 -0.177871655000 228 | 0.019664258200 0.836596328000 0.547465191000 0.096025153400 229 | 0.843161323000 -0.308144960000 0.440598229000 0.896229369000 230 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 231 | 30 33 37 232 | 0.138052001000 0.331464260000 -0.933313547000 -0.043420106700 233 | 0.084593241400 0.934945125000 0.344556234000 0.045656795000 234 | 0.986806209000 -0.126520045000 0.101031771000 1.057321820000 235 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 236 | 31 35 37 237 | -0.129680421000 0.290638277000 -0.948004117000 0.145127952000 238 | -0.731563082000 0.617334100000 0.289335802000 -0.670544024000 239 | 0.669327723000 0.731044825000 0.132565959000 0.213944494000 240 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 241 | 31 36 37 242 | -0.109815164000 0.445330431000 -0.888608016000 0.142986349000 243 | -0.719172374000 0.581515233000 0.380304110000 -0.576168801000 244 | 0.686099422000 0.680824440000 0.256411619000 0.232083126000 245 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 246 | -------------------------------------------------------------------------------- /benchmarks/3DLoMatch/sun3d-hotel_umd-maryland_hotel3/gt_overlap.log: -------------------------------------------------------------------------------- 1 | 0,1,0.5325 2 | 0,2,0.0683 3 | 0,3,0.0000 4 | 0,4,0.0000 5 | 0,5,0.0000 6 | 0,6,0.0000 7 | 0,7,0.0000 8 | 0,8,0.0000 9 | 0,9,0.0000 10 | 0,10,0.0269 11 | 0,11,0.1804 12 | 0,12,0.3909 13 | 0,13,0.3089 14 | 0,14,0.0004 15 | 0,15,0.0000 16 | 0,16,0.0000 17 | 0,17,0.0000 18 | 0,18,0.0000 19 | 0,19,0.0000 20 | 0,20,0.0000 21 | 0,21,0.0000 22 | 0,22,0.0263 23 | 0,23,0.0105 24 | 0,24,0.0098 25 | 0,25,0.0002 26 | 0,26,0.0000 27 | 0,27,0.1104 28 | 0,28,0.0002 29 | 0,29,0.0000 30 | 0,30,0.0000 31 | 0,31,0.0000 32 | 0,32,0.0000 33 | 0,33,0.0000 34 | 0,34,0.0000 35 | 0,35,0.0000 36 | 0,36,0.0000 37 | 1,2,0.3132 38 | 1,3,0.0537 39 | 1,4,0.0000 40 | 1,5,0.0000 41 | 1,6,0.0000 42 | 1,7,0.0000 43 | 1,8,0.0000 44 | 1,9,0.0000 45 | 1,10,0.0000 46 | 1,11,0.1110 47 | 1,12,0.2444 48 | 1,13,0.1772 49 | 1,14,0.0000 50 | 1,15,0.0000 51 | 1,16,0.0000 52 | 1,17,0.0000 53 | 1,18,0.0000 54 | 1,19,0.0000 55 | 1,20,0.0000 56 | 1,21,0.0000 57 | 1,22,0.0000 58 | 1,23,0.0000 59 | 1,24,0.0000 60 | 1,25,0.0000 61 | 1,26,0.0000 62 | 1,27,0.0436 63 | 1,28,0.0000 64 | 1,29,0.0000 65 | 1,30,0.0000 66 | 1,31,0.0000 67 | 1,32,0.0000 68 | 1,33,0.0000 69 | 1,34,0.0000 70 | 1,35,0.0000 71 | 1,36,0.0000 72 | 2,3,0.4890 73 | 2,4,0.0686 74 | 2,5,0.0600 75 | 2,6,0.0000 76 | 2,7,0.0000 77 | 2,8,0.0000 78 | 2,9,0.0000 79 | 2,10,0.0000 80 | 2,11,0.0000 81 | 2,12,0.0000 82 | 2,13,0.0000 83 | 2,14,0.0000 84 | 2,15,0.0000 85 | 2,16,0.0000 86 | 2,17,0.0000 87 | 2,18,0.0000 88 | 2,19,0.0000 89 | 2,20,0.0000 90 | 2,21,0.0000 91 | 2,22,0.0000 92 | 2,23,0.0000 93 | 2,24,0.0000 94 | 2,25,0.0000 95 | 2,26,0.0000 96 | 2,27,0.0000 97 | 2,28,0.0000 98 | 2,29,0.0000 99 | 2,30,0.0000 100 | 2,31,0.0000 101 | 2,32,0.0000 102 | 2,33,0.0000 103 | 2,34,0.0000 104 | 2,35,0.0000 105 | 2,36,0.0000 106 | 3,4,0.5921 107 | 3,5,0.2586 108 | 3,6,0.0192 109 | 3,7,0.0000 110 | 3,8,0.0000 111 | 3,9,0.0000 112 | 3,10,0.0000 113 | 3,11,0.0000 114 | 3,12,0.0000 115 | 3,13,0.0000 116 | 3,14,0.0000 117 | 3,15,0.0000 118 | 3,16,0.0000 119 | 3,17,0.0000 120 | 3,18,0.0000 121 | 3,19,0.0000 122 | 3,20,0.0000 123 | 3,21,0.0000 124 | 3,22,0.0000 125 | 3,23,0.0000 126 | 3,24,0.0000 127 | 3,25,0.0000 128 | 3,26,0.0000 129 | 3,27,0.0000 130 | 3,28,0.0000 131 | 3,29,0.0000 132 | 3,30,0.0000 133 | 3,31,0.0000 134 | 3,32,0.0000 135 | 3,33,0.0000 136 | 3,34,0.0000 137 | 3,35,0.0000 138 | 3,36,0.0000 139 | 4,5,0.3325 140 | 4,6,0.0784 141 | 4,7,0.0000 142 | 4,8,0.0000 143 | 4,9,0.0000 144 | 4,10,0.0000 145 | 4,11,0.0000 146 | 4,12,0.0000 147 | 4,13,0.0000 148 | 4,14,0.0000 149 | 4,15,0.0000 150 | 4,16,0.0000 151 | 4,17,0.0000 152 | 4,18,0.0000 153 | 4,19,0.0000 154 | 4,20,0.0000 155 | 4,21,0.0000 156 | 4,22,0.0000 157 | 4,23,0.0000 158 | 4,24,0.0000 159 | 4,25,0.0000 160 | 4,26,0.0000 161 | 4,27,0.0000 162 | 4,28,0.0000 163 | 4,29,0.0000 164 | 4,30,0.0000 165 | 4,31,0.0000 166 | 4,32,0.0000 167 | 4,33,0.0000 168 | 4,34,0.0000 169 | 4,35,0.0000 170 | 4,36,0.0000 171 | 5,6,0.4993 172 | 5,7,0.1245 173 | 5,8,0.0000 174 | 5,9,0.0000 175 | 5,10,0.0000 176 | 5,11,0.0000 177 | 5,12,0.0000 178 | 5,13,0.0000 179 | 5,14,0.0000 180 | 5,15,0.0000 181 | 5,16,0.0000 182 | 5,17,0.0000 183 | 5,18,0.0000 184 | 5,19,0.0000 185 | 5,20,0.0000 186 | 5,21,0.0000 187 | 5,22,0.0000 188 | 5,23,0.0000 189 | 5,24,0.0000 190 | 5,25,0.0000 191 | 5,26,0.0000 192 | 5,27,0.0000 193 | 5,28,0.0000 194 | 5,29,0.0000 195 | 5,30,0.0000 196 | 5,31,0.0000 197 | 5,32,0.0000 198 | 5,33,0.0000 199 | 5,34,0.0000 200 | 5,35,0.0000 201 | 5,36,0.0000 202 | 6,7,0.4663 203 | 6,8,0.0432 204 | 6,9,0.0000 205 | 6,10,0.0000 206 | 6,11,0.0000 207 | 6,12,0.0000 208 | 6,13,0.0000 209 | 6,14,0.0000 210 | 6,15,0.0000 211 | 6,16,0.0000 212 | 6,17,0.0000 213 | 6,18,0.0000 214 | 6,19,0.0000 215 | 6,20,0.0000 216 | 6,21,0.0000 217 | 6,22,0.0000 218 | 6,23,0.0000 219 | 6,24,0.0000 220 | 6,25,0.0000 221 | 6,26,0.0000 222 | 6,27,0.0000 223 | 6,28,0.0000 224 | 6,29,0.0000 225 | 6,30,0.0000 226 | 6,31,0.0000 227 | 6,32,0.0000 228 | 6,33,0.0000 229 | 6,34,0.0000 230 | 6,35,0.0000 231 | 6,36,0.0000 232 | 7,8,0.3918 233 | 7,9,0.0272 234 | 7,10,0.0796 235 | 7,11,0.0000 236 | 7,12,0.0000 237 | 7,13,0.0000 238 | 7,14,0.0605 239 | 7,15,0.0651 240 | 7,16,0.0000 241 | 7,17,0.0000 242 | 7,18,0.0000 243 | 7,19,0.0000 244 | 7,20,0.0000 245 | 7,21,0.0000 246 | 7,22,0.0000 247 | 7,23,0.0000 248 | 7,24,0.0000 249 | 7,25,0.0000 250 | 7,26,0.0000 251 | 7,27,0.0000 252 | 7,28,0.0000 253 | 7,29,0.0000 254 | 7,30,0.0000 255 | 7,31,0.0000 256 | 7,32,0.0000 257 | 7,33,0.0000 258 | 7,34,0.0000 259 | 7,35,0.0000 260 | 7,36,0.0000 261 | 8,9,0.4565 262 | 8,10,0.3677 263 | 8,11,0.0000 264 | 8,12,0.0000 265 | 8,13,0.0000 266 | 8,14,0.1760 267 | 8,15,0.4993 268 | 8,16,0.2940 269 | 8,17,0.0577 270 | 8,18,0.0006 271 | 8,19,0.0000 272 | 8,20,0.0000 273 | 8,21,0.0000 274 | 8,22,0.0000 275 | 8,23,0.0000 276 | 8,24,0.0000 277 | 8,25,0.0000 278 | 8,26,0.0000 279 | 8,27,0.0000 280 | 8,28,0.0000 281 | 8,29,0.0000 282 | 8,30,0.0000 283 | 8,31,0.0000 284 | 8,32,0.0000 285 | 8,33,0.0000 286 | 8,34,0.0000 287 | 8,35,0.0000 288 | 8,36,0.0000 289 | 9,10,0.2301 290 | 9,11,0.0546 291 | 9,12,0.0000 292 | 9,13,0.0274 293 | 9,14,0.0741 294 | 9,15,0.2451 295 | 9,16,0.1903 296 | 9,17,0.0907 297 | 9,18,0.0404 298 | 9,19,0.0087 299 | 9,20,0.0000 300 | 9,21,0.0000 301 | 9,22,0.0000 302 | 9,23,0.0000 303 | 9,24,0.0000 304 | 9,25,0.0000 305 | 9,26,0.0000 306 | 9,27,0.0000 307 | 9,28,0.0000 308 | 9,29,0.0000 309 | 9,30,0.0000 310 | 9,31,0.0000 311 | 9,32,0.0000 312 | 9,33,0.0000 313 | 9,34,0.0000 314 | 9,35,0.0000 315 | 9,36,0.0000 316 | 10,11,0.2857 317 | 10,12,0.0900 318 | 10,13,0.1489 319 | 10,14,0.5227 320 | 10,15,0.5015 321 | 10,16,0.4256 322 | 10,17,0.2176 323 | 10,18,0.0341 324 | 10,19,0.0106 325 | 10,20,0.0000 326 | 10,21,0.0000 327 | 10,22,0.0000 328 | 10,23,0.0005 329 | 10,24,0.0018 330 | 10,25,0.0010 331 | 10,26,0.0000 332 | 10,27,0.0011 333 | 10,28,0.0007 334 | 10,29,0.0000 335 | 10,30,0.0000 336 | 10,31,0.0000 337 | 10,32,0.0000 338 | 10,33,0.0000 339 | 10,34,0.0000 340 | 10,35,0.0000 341 | 10,36,0.0000 342 | 11,12,0.6637 343 | 11,13,0.7383 344 | 11,14,0.2992 345 | 11,15,0.0000 346 | 11,16,0.0219 347 | 11,17,0.0121 348 | 11,18,0.0000 349 | 11,19,0.0000 350 | 11,20,0.0000 351 | 11,21,0.0006 352 | 11,22,0.0436 353 | 11,23,0.0238 354 | 11,24,0.0194 355 | 11,25,0.0047 356 | 11,26,0.0028 357 | 11,27,0.1267 358 | 11,28,0.0028 359 | 11,29,0.0000 360 | 11,30,0.0000 361 | 11,31,0.0000 362 | 11,32,0.0000 363 | 11,33,0.0000 364 | 11,34,0.0000 365 | 11,35,0.0000 366 | 11,36,0.0000 367 | 12,13,0.5171 368 | 12,14,0.0847 369 | 12,15,0.0000 370 | 12,16,0.0000 371 | 12,17,0.0000 372 | 12,18,0.0000 373 | 12,19,0.0000 374 | 12,20,0.0000 375 | 12,21,0.0008 376 | 12,22,0.0525 377 | 12,23,0.0422 378 | 12,24,0.0159 379 | 12,25,0.0030 380 | 12,26,0.0073 381 | 12,27,0.1169 382 | 12,28,0.0028 383 | 12,29,0.0093 384 | 12,30,0.0000 385 | 12,31,0.0000 386 | 12,32,0.0000 387 | 12,33,0.0000 388 | 12,34,0.0000 389 | 12,35,0.0000 390 | 12,36,0.0000 391 | 13,14,0.1723 392 | 13,15,0.0003 393 | 13,16,0.0000 394 | 13,17,0.0000 395 | 13,18,0.0000 396 | 13,19,0.0000 397 | 13,20,0.0000 398 | 13,21,0.0017 399 | 13,22,0.0478 400 | 13,23,0.0371 401 | 13,24,0.0210 402 | 13,25,0.0049 403 | 13,26,0.0000 404 | 13,27,0.1237 405 | 13,28,0.0030 406 | 13,29,0.0050 407 | 13,30,0.0000 408 | 13,31,0.0000 409 | 13,32,0.0000 410 | 13,33,0.0000 411 | 13,34,0.0000 412 | 13,35,0.0000 413 | 13,36,0.0000 414 | 14,15,0.3409 415 | 14,16,0.2692 416 | 14,17,0.2338 417 | 14,18,0.0061 418 | 14,19,0.0075 419 | 14,20,0.0079 420 | 14,21,0.0065 421 | 14,22,0.0011 422 | 14,23,0.0043 423 | 14,24,0.0063 424 | 14,25,0.0023 425 | 14,26,0.0000 426 | 14,27,0.0045 427 | 14,28,0.0027 428 | 14,29,0.0000 429 | 14,30,0.0000 430 | 14,31,0.0000 431 | 14,32,0.0000 432 | 14,33,0.0000 433 | 14,34,0.0000 434 | 14,35,0.0000 435 | 14,36,0.0002 436 | 15,16,0.6412 437 | 15,17,0.3784 438 | 15,18,0.0479 439 | 15,19,0.0572 440 | 15,20,0.0059 441 | 15,21,0.0000 442 | 15,22,0.0000 443 | 15,23,0.0000 444 | 15,24,0.0000 445 | 15,25,0.0000 446 | 15,26,0.0000 447 | 15,27,0.0000 448 | 15,28,0.0000 449 | 15,29,0.0000 450 | 15,30,0.0000 451 | 15,31,0.0000 452 | 15,32,0.0000 453 | 15,33,0.0000 454 | 15,34,0.0000 455 | 15,35,0.0000 456 | 15,36,0.0000 457 | 16,17,0.6605 458 | 16,18,0.1551 459 | 16,19,0.1564 460 | 16,20,0.0070 461 | 16,21,0.0037 462 | 16,22,0.0000 463 | 16,23,0.0000 464 | 16,24,0.0000 465 | 16,25,0.0000 466 | 16,26,0.0000 467 | 16,27,0.0000 468 | 16,28,0.0000 469 | 16,29,0.0000 470 | 16,30,0.0000 471 | 16,31,0.0000 472 | 16,32,0.0000 473 | 16,33,0.0000 474 | 16,34,0.0000 475 | 16,35,0.0003 476 | 16,36,0.0065 477 | 17,18,0.4449 478 | 17,19,0.5912 479 | 17,20,0.3402 480 | 17,21,0.0151 481 | 17,22,0.0000 482 | 17,23,0.0000 483 | 17,24,0.0000 484 | 17,25,0.0000 485 | 17,26,0.0000 486 | 17,27,0.0000 487 | 17,28,0.0000 488 | 17,29,0.0000 489 | 17,30,0.0000 490 | 17,31,0.0000 491 | 17,32,0.0000 492 | 17,33,0.0144 493 | 17,34,0.0610 494 | 17,35,0.0111 495 | 17,36,0.0755 496 | 18,19,0.5922 497 | 18,20,0.2836 498 | 18,21,0.0132 499 | 18,22,0.0000 500 | 18,23,0.0000 501 | 18,24,0.0000 502 | 18,25,0.0000 503 | 18,26,0.0000 504 | 18,27,0.0000 505 | 18,28,0.0000 506 | 18,29,0.0000 507 | 18,30,0.0000 508 | 18,31,0.0000 509 | 18,32,0.0000 510 | 18,33,0.0000 511 | 18,34,0.0055 512 | 18,35,0.0089 513 | 18,36,0.0382 514 | 19,20,0.4955 515 | 19,21,0.0496 516 | 19,22,0.0000 517 | 19,23,0.0000 518 | 19,24,0.0000 519 | 19,25,0.0000 520 | 19,26,0.0000 521 | 19,27,0.0000 522 | 19,28,0.0000 523 | 19,29,0.0000 524 | 19,30,0.0000 525 | 19,31,0.0000 526 | 19,32,0.0000 527 | 19,33,0.0274 528 | 19,34,0.0596 529 | 19,35,0.0140 530 | 19,36,0.0950 531 | 20,21,0.3810 532 | 20,22,0.0238 533 | 20,23,0.0040 534 | 20,24,0.0045 535 | 20,25,0.0010 536 | 20,26,0.0000 537 | 20,27,0.0045 538 | 20,28,0.0062 539 | 20,29,0.0000 540 | 20,30,0.0053 541 | 20,31,0.0576 542 | 20,32,0.0000 543 | 20,33,0.1121 544 | 20,34,0.1527 545 | 20,35,0.0143 546 | 20,36,0.1041 547 | 21,22,0.5275 548 | 21,23,0.3011 549 | 21,24,0.1952 550 | 21,25,0.0320 551 | 21,26,0.0016 552 | 21,27,0.0899 553 | 21,28,0.3441 554 | 21,29,0.2048 555 | 21,30,0.2257 556 | 21,31,0.2364 557 | 21,32,0.0316 558 | 21,33,0.0744 559 | 21,34,0.0670 560 | 21,35,0.0050 561 | 21,36,0.0172 562 | 22,23,0.7330 563 | 22,24,0.2303 564 | 22,25,0.0266 565 | 22,26,0.0267 566 | 22,27,0.1673 567 | 22,28,0.3191 568 | 22,29,0.4485 569 | 22,30,0.2956 570 | 22,31,0.1013 571 | 22,32,0.0161 572 | 22,33,0.0044 573 | 22,34,0.0085 574 | 22,35,0.0046 575 | 22,36,0.0075 576 | 23,24,0.2493 577 | 23,25,0.0259 578 | 23,26,0.0426 579 | 23,27,0.2199 580 | 23,28,0.3316 581 | 23,29,0.5661 582 | 23,30,0.3621 583 | 23,31,0.0670 584 | 23,32,0.0031 585 | 23,33,0.0000 586 | 23,34,0.0000 587 | 23,35,0.0000 588 | 23,36,0.0000 589 | 24,25,0.2683 590 | 24,26,0.0284 591 | 24,27,0.4720 592 | 24,28,0.6725 593 | 24,29,0.3175 594 | 24,30,0.1250 595 | 24,31,0.0131 596 | 24,32,0.0000 597 | 24,33,0.0000 598 | 24,34,0.0000 599 | 24,35,0.0000 600 | 24,36,0.0000 601 | 25,26,0.2499 602 | 25,27,0.4611 603 | 25,28,0.2101 604 | 25,29,0.0000 605 | 25,30,0.0000 606 | 25,31,0.0000 607 | 25,32,0.0000 608 | 25,33,0.0000 609 | 25,34,0.0000 610 | 25,35,0.0000 611 | 25,36,0.0000 612 | 26,27,0.7114 613 | 26,28,0.0033 614 | 26,29,0.0000 615 | 26,30,0.0000 616 | 26,31,0.0000 617 | 26,32,0.0000 618 | 26,33,0.0000 619 | 26,34,0.0000 620 | 26,35,0.0000 621 | 26,36,0.0000 622 | 27,28,0.1769 623 | 27,29,0.0335 624 | 27,30,0.0000 625 | 27,31,0.0000 626 | 27,32,0.0000 627 | 27,33,0.0000 628 | 27,34,0.0000 629 | 27,35,0.0000 630 | 27,36,0.0000 631 | 28,29,0.5445 632 | 28,30,0.3390 633 | 28,31,0.1712 634 | 28,32,0.0000 635 | 28,33,0.0000 636 | 28,34,0.0000 637 | 28,35,0.0000 638 | 28,36,0.0000 639 | 29,30,0.6915 640 | 29,31,0.1352 641 | 29,32,0.0507 642 | 29,33,0.0016 643 | 29,34,0.0000 644 | 29,35,0.0000 645 | 29,36,0.0000 646 | 30,31,0.4260 647 | 30,32,0.2778 648 | 30,33,0.1563 649 | 30,34,0.0450 650 | 30,35,0.0000 651 | 30,36,0.0080 652 | 31,32,0.4631 653 | 31,33,0.5335 654 | 31,34,0.3379 655 | 31,35,0.1129 656 | 31,36,0.2214 657 | 32,33,0.6403 658 | 32,34,0.3848 659 | 32,35,0.3697 660 | 32,36,0.3271 661 | 33,34,0.7574 662 | 33,35,0.3338 663 | 33,36,0.4225 664 | 34,35,0.4932 665 | 34,36,0.5212 666 | 35,36,0.5585 667 | -------------------------------------------------------------------------------- /benchmarks/3DLoMatch/sun3d-mit_lab_hj-lab_hj_tea_nov_2_2012_scan1_erika/gt.log: -------------------------------------------------------------------------------- 1 | 0 2 38 2 | 0.996946231000 -0.019570181900 -0.075589841400 -0.095817690400 3 | 0.026283052000 0.995698657000 0.088856477700 0.080418857600 4 | 0.073525232200 -0.090571688200 0.993173500000 1.341256520000 5 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 6 | 0 6 38 7 | 0.519691840000 0.125593738000 -0.845070222000 1.149590460000 8 | 0.048207401300 0.983249840000 0.175775830000 0.029224642400 9 | 0.852992382000 -0.132087058000 0.504933954000 3.198937190000 10 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 11 | 0 7 38 12 | 0.220710514000 0.056682327600 -0.973690389000 1.556456220000 13 | 0.071009047400 0.994727100000 0.074003871600 -0.037995660900 14 | 0.972751335000 -0.085472481000 0.215523482000 3.286677370000 15 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 16 | 0 11 38 17 | -0.596122016000 0.159558973000 -0.786879560000 1.375078480000 18 | 0.116952871000 0.986857033000 0.111508988000 -0.000126804152 19 | 0.794331718000 -0.025554374700 -0.606949188000 3.308136500000 20 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 21 | 0 24 38 22 | 0.861977209000 -0.130708338000 0.489804663000 0.052763822600 23 | 0.002686385430 0.967352566000 0.253418153000 -0.003558432930 24 | -0.506939728000 -0.217123816000 0.834188937000 -0.151604650000 25 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 26 | 0 26 38 27 | 0.998627197000 0.022677394100 -0.047198282700 1.069849370000 28 | -0.010284866900 0.968749637000 0.247822248000 -0.014263247400 29 | 0.051343090700 -0.246995132000 0.967656567000 0.210630866000 30 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 31 | 1 5 38 32 | 0.814718889000 0.052602775800 -0.577465624000 0.950337820000 33 | 0.044335633200 0.987310559000 0.152487595000 -0.107001647000 34 | 0.578159185000 -0.149835670000 0.802046786000 2.659989350000 35 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 36 | 1 7 38 37 | 0.262531317000 0.044533909600 -0.963895061000 1.765367070000 38 | 0.023644191300 0.998337486000 0.052565603100 -0.196117084000 39 | 0.964632254000 -0.036589931000 0.261043080000 2.734406030000 40 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 41 | 1 26 38 42 | 0.999969385000 0.003701139700 -0.006817427600 1.144220470000 43 | -0.002279249970 0.980272182000 0.197630557000 -0.020698862000 44 | 0.007414273690 -0.197608436000 0.980252219000 -0.312360120000 45 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 46 | 2 6 38 47 | 0.582089403000 0.141341197000 -0.800745305000 1.376848380000 48 | -0.039427607200 0.988523957000 0.145825033000 -0.243600352000 49 | 0.812167359000 -0.053310844900 0.580982636000 1.746305440000 50 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 51 | 2 7 38 52 | 0.293425314000 0.076369148600 -0.952926827000 1.787156660000 53 | -0.021719673000 0.997078488000 0.073220588400 -0.326440684000 54 | 0.955734304000 -0.000785822802 0.294227908000 1.796718760000 55 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 56 | 2 9 38 57 | -0.229498881000 0.085194930200 -0.969573257000 2.455308470000 58 | 0.007182179590 0.996282865000 0.085842581000 -0.330824649000 59 | 0.973283326000 0.012737434600 -0.229257016000 1.770838140000 60 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 61 | 2 28 38 62 | 0.968234089000 0.072580940400 -0.239279386000 1.470634750000 63 | -0.039191157800 0.989165603000 0.141455804000 -0.123450979000 64 | 0.246953948000 -0.127583195000 0.960591549000 -0.403159300000 65 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 66 | 2 30 38 67 | 0.969696953000 -0.028225472800 0.242674637000 1.696994950000 68 | 0.030557953200 0.999514051000 -0.005849604120 -0.214081404000 69 | -0.242393421000 0.013089369500 0.970089653000 -0.048568424400 70 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 71 | 3 5 38 72 | 0.849607756000 0.094295586900 -0.518918458000 0.748300316000 73 | -0.034008950500 0.991633188000 0.124515030000 -0.195723539000 74 | 0.526317982000 -0.088139940400 0.845705781000 0.987769024000 75 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 76 | 3 6 38 77 | 0.605500476000 0.156918334000 -0.780220708000 1.150527060000 78 | -0.073089273100 0.987188807000 0.141822146000 -0.237203398000 79 | 0.792479617000 -0.028847640800 0.609213771000 0.968328905000 80 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 81 | 3 7 38 82 | 0.321643105000 0.093716497700 -0.942211829000 1.560657470000 83 | -0.054854832600 0.995261093000 0.080268381500 -0.328289793000 84 | 0.945268885000 0.025867938000 0.325261879000 1.003729760000 85 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 86 | 3 8 38 87 | 0.160516935000 0.038139157400 -0.986294320000 1.869075070000 88 | -0.062959266200 0.997612028000 0.028331061900 -0.331726846000 89 | 0.985022220000 0.057549004200 0.162536483000 0.902329279000 90 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 91 | 3 30 38 92 | 0.962345249000 -0.010354449200 0.271632610000 1.416955700000 93 | 0.021638385300 0.999018561000 -0.038577411500 -0.159892330000 94 | -0.270969524000 0.043003037500 0.961626253000 -0.833884583000 95 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 96 | 4 7 38 97 | 0.509008691000 0.099115171600 -0.855036671000 1.170012720000 98 | -0.112008308000 0.992527997000 0.048375204700 -0.248322308000 99 | 0.853441563000 0.071148443300 0.516309813000 0.232881529000 100 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 101 | 4 8 38 102 | 0.359481734000 0.051244036100 -0.931743224000 1.450984900000 103 | -0.124496834000 0.992197233000 0.006537152970 -0.241695413000 104 | 0.924809894000 0.113649166000 0.363058294000 0.070320018200 105 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 106 | 5 9 38 107 | 0.346351040000 0.074219622100 -0.935163378000 1.245747230000 108 | -0.122871308000 0.991867480000 0.033214028900 -0.003852787620 109 | 0.930025324000 0.103399877000 0.352653693000 -0.811717338000 110 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 111 | 6 9 38 112 | 0.656597812000 0.020654209300 -0.753959266000 0.651125088000 113 | -0.077225439700 0.996212929000 -0.039961155200 0.064899736500 114 | 0.750278983000 0.084463303700 0.655705873000 -0.862039495000 115 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 116 | 6 10 38 117 | 0.526127400000 0.036122468400 -0.849639127000 0.823522412000 118 | -0.087828243600 0.996062824000 -0.012040196900 0.051963153600 119 | 0.845858551000 0.080957079000 0.527227634000 -0.738971602000 120 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 121 | 8 9 38 122 | 0.934006176000 -0.007553989070 -0.357179059000 0.112391548000 123 | 0.030974711600 0.997724793000 0.059898237000 0.005607376550 124 | 0.355914203000 -0.067008748600 0.932115086000 -0.344946682000 125 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 126 | 8 10 38 127 | 0.864103925000 0.004471499660 -0.503293814000 0.322701845000 128 | 0.035066489300 0.996996455000 0.069060993800 0.010149051300 129 | 0.502090033000 -0.077325746900 0.861352309000 -0.316968194000 130 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 131 | 9 11 38 132 | 0.944485242000 0.013927757900 -0.328256286000 0.257112772000 133 | 0.021323495000 0.994394478000 0.103546249000 -0.027757763300 134 | 0.327858877000 -0.104798153000 0.938896935000 0.809666435000 135 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 136 | 9 27 38 137 | 0.090373993900 -0.167252611000 0.981762641000 -2.304867320000 138 | 0.047757025700 0.985388888000 0.163475617000 0.129878864000 139 | -0.994761298000 0.032111507900 0.097039687100 1.624465980000 140 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 141 | 9 28 38 142 | 0.017866470600 -0.133727732000 0.990856239000 -1.888442000000 143 | 0.046589329200 0.990047257000 0.132779595000 0.095022460800 144 | -0.998754084000 0.043788781500 0.023918911200 1.470919780000 145 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 146 | 10 27 38 147 | -0.071681142300 -0.164980870000 0.983688974000 -2.208908800000 148 | 0.062312751900 0.983556869000 0.169499590000 0.086521023300 149 | -0.995478838000 0.073447164500 -0.060222302300 2.058314700000 150 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 151 | 10 28 38 152 | -0.143874989000 -0.130034980000 0.981015768000 -1.822564650000 153 | 0.060654501600 0.988304355000 0.139896255000 0.056949706000 154 | -0.987735621000 0.079629898300 -0.134304005000 1.839076260000 155 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 156 | 13 16 38 157 | 0.900450288000 -0.006787986960 -0.434905482000 0.172282022000 158 | 0.055341010300 0.993540770000 0.099073601400 -0.042362808300 159 | 0.431424265000 -0.113280236000 0.895008869000 0.817518128000 160 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 161 | 13 17 38 162 | 0.769412119000 0.069864736900 -0.634920409000 0.116689970000 163 | -0.032191287700 0.996980113000 0.070691832200 -0.071479768200 164 | 0.637941411000 -0.033952253300 0.769334853000 1.342425390000 165 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 166 | 13 18 38 167 | 0.603170563000 0.126098036000 -0.787582081000 -0.058873191000 168 | -0.020552794600 0.989553375000 0.142693752000 -0.097519596300 169 | 0.797348122000 -0.069881515700 0.599460017000 1.919965510000 170 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 171 | 14 18 38 172 | 0.820728887000 0.138669522000 -0.554234096000 0.251723289000 173 | -0.077836265200 0.988191029000 0.131981621000 -0.133464978000 174 | 0.565991163000 -0.065181469000 0.821830044000 1.294450400000 175 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 176 | 15 18 38 177 | 0.871037140000 0.103339033000 -0.480226057000 0.217027540000 178 | -0.067824074000 0.993557699000 0.090781159900 -0.130929890000 179 | 0.486513321000 -0.046502637200 0.872435480000 1.150704340000 180 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 181 | 17 20 38 182 | 0.521997795000 0.082443470200 -0.848954672000 0.433228935000 183 | -0.032353354800 0.996514068000 0.076882037900 -0.134886988000 184 | 0.852333524000 -0.012665849200 0.522845548000 1.278222790000 185 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 186 | 19 21 38 187 | 0.722288673000 0.127891467000 -0.679662841000 0.464172358000 188 | -0.086888807100 0.991746121000 0.094277586400 -0.079481237100 189 | 0.686110681000 -0.009039126980 0.727440830000 -0.116174227000 190 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 191 | 20 22 38 192 | 0.735131857000 0.105863054000 -0.669606466000 0.213627111000 193 | -0.049705663600 0.993489923000 0.102497958000 0.053084747600 194 | 0.676098703000 -0.042067575300 0.735608813000 -0.373958600000 195 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 196 | 21 23 38 197 | 0.894209699000 0.125200067000 -0.429786880000 0.017996690200 198 | -0.071106388700 0.987628006000 0.139762206000 0.109338259000 199 | 0.441967685000 -0.094415709200 0.892049159000 -0.097771863600 200 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 201 | 21 24 38 202 | 0.736013185000 0.178802635000 -0.652929408000 0.094337471800 203 | -0.069453080600 0.979342979000 0.189900144000 0.018631550400 204 | 0.673395599000 -0.094421504400 0.733227316000 0.483354290000 205 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 206 | 21 25 38 207 | 0.588958187000 0.221060653000 -0.777343568000 0.161931421000 208 | -0.090115512900 0.973826798000 0.208661704000 -0.060596982300 209 | 0.803123514000 -0.052842590300 0.593465000000 0.996887964000 210 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 211 | 22 25 38 212 | 0.831062684000 0.153594677000 -0.534551036000 0.490177831000 213 | -0.072623145100 0.982850423000 0.169499311000 -0.127101230000 214 | 0.551416212000 -0.102043177000 0.827966220000 0.984775031000 215 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 216 | 22 26 38 217 | 0.560758179000 0.215766663000 -0.799372489000 0.571895677000 218 | -0.104842305000 0.976179558000 0.189942828000 -0.172313223000 219 | 0.821314167000 -0.022702848300 0.570024402000 1.549443960000 220 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 221 | 23 26 38 222 | 0.649911770000 0.175678039000 -0.739425862000 0.768608235000 223 | -0.165451352000 0.982286897000 0.087956481100 -0.355080154000 224 | 0.741780855000 0.065175060400 0.667467616000 1.435346190000 225 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 226 | 23 27 38 227 | 0.352583093000 0.211712037000 -0.911516246000 0.692480123000 228 | -0.184274591000 0.970706215000 0.154181133000 -0.489268237000 229 | 0.917455838000 0.113606612000 0.381270250000 1.925259700000 230 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 231 | 24 26 38 232 | 0.834739728000 0.147361855000 -0.530559817000 0.693046127000 233 | -0.151626300000 0.987788412000 0.035798610900 -0.221947725000 234 | 0.529357464000 0.050565087400 0.846891414000 0.797634642000 235 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 236 | 24 27 38 237 | 0.598099733000 0.195746039000 -0.777147908000 0.759843466000 238 | -0.189099109000 0.976801039000 0.100502244000 -0.371487920000 239 | 0.778792190000 0.086847758200 0.621241876000 1.284454180000 240 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 241 | 24 33 38 242 | 0.687481618000 -0.122790593000 0.715744704000 -0.756087059000 243 | 0.229291246000 0.971884943000 -0.053504023700 -0.742554039000 244 | -0.689053068000 0.200897623000 0.696309408000 2.767715620000 245 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 246 | 24 34 38 247 | 0.492579434000 -0.050674574100 0.868790568000 -1.153479820000 248 | 0.278930607000 0.954829167000 -0.102451732000 -0.749883118000 249 | -0.824355140000 0.292799519000 0.484463113000 2.908230830000 250 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 251 | 24 35 38 252 | 0.494748620000 0.060825821500 0.866904599000 -1.170300150000 253 | 0.272533670000 0.936364475000 -0.221235276000 -0.719324571000 254 | -0.825197829000 0.345717463000 0.446687445000 2.872192750000 255 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 256 | 24 36 38 257 | 0.604182762000 0.085681294800 0.792225820000 -1.159813840000 258 | 0.251431740000 0.922912609000 -0.291566204000 -0.746477378000 259 | -0.756137877000 0.375351175000 0.536065202000 2.819012760000 260 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 261 | 24 37 38 262 | 0.543823595000 -0.041907100800 0.838151728000 -1.182471010000 263 | 0.199846743000 0.976484496000 -0.080843743800 -0.726130081000 264 | -0.815055919000 0.211467649000 0.539411943000 2.859989790000 265 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 266 | 25 27 38 267 | 0.743386472000 0.150784878000 -0.651643337000 0.547991383000 268 | -0.151747336000 0.986873857000 0.055243093400 -0.233908596000 269 | 0.651419343000 0.057817624000 0.756511729000 0.880506530000 270 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 271 | 25 30 38 272 | 0.986200182000 -0.026622456300 -0.163407574000 0.485024664000 273 | -0.001613201930 0.985393473000 -0.170282813000 -0.507811271000 274 | 0.165552550000 0.168196243000 0.971751331000 1.685125810000 275 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 276 | 25 31 38 277 | 0.989629064000 -0.050844038400 0.134346490000 0.327331125000 278 | 0.073024375600 0.983465125000 -0.165714411000 -0.654604787000 279 | -0.123701454000 0.173805241000 0.976979092000 2.319959410000 280 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 281 | 25 32 38 282 | 0.745036697000 -0.097381245000 0.659878007000 -0.058905718500 283 | 0.206745594000 0.974278571000 -0.089647401000 -0.696130402000 284 | -0.634176209000 0.203217280000 0.746005509000 2.614538970000 285 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 286 | 25 33 38 287 | 0.531640438000 -0.139918822000 0.835332572000 -0.638974063000 288 | 0.268506720000 0.963229026000 -0.009546881150 -0.691751982000 289 | -0.803282091000 0.229367788000 0.549661110000 2.617541920000 290 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 291 | 25 34 38 292 | 0.312279366000 -0.050997135800 0.948620087000 -1.001904440000 293 | 0.306214404000 0.950663068000 -0.049695234500 -0.722447382000 294 | -0.899284154000 0.306000626000 0.312486597000 2.829812190000 295 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 296 | 25 35 38 297 | 0.314624622000 0.069327834500 0.946680624000 -1.026940710000 298 | 0.299956284000 0.938961455000 -0.168450182000 -0.693010134000 299 | -0.900577203000 0.336961282000 0.274624002000 2.797841530000 300 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 301 | 25 36 38 302 | 0.436100495000 0.100038567000 0.894320139000 -1.025018180000 303 | 0.285529016000 0.927066703000 -0.242933593000 -0.719589268000 304 | -0.853398205000 0.361297845000 0.375729616000 2.743387920000 305 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 306 | 25 37 38 307 | 0.368916659000 -0.058899258900 0.927593869000 -1.040759480000 308 | 0.230336164000 0.972651301000 -0.029846594500 -0.700550105000 309 | -0.900468877000 0.224669455000 0.372394370000 2.788066720000 310 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 311 | 28 29 38 312 | 0.976539009000 -0.004547195950 0.215292558000 0.157161873000 313 | 0.065038697200 0.959314977000 -0.274748636000 -0.093471687900 314 | -0.205283563000 0.282304334000 0.937104514000 -0.040461493000 315 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 316 | 28 30 38 317 | 0.877836117000 -0.063270074200 0.474763370000 0.310289660000 318 | 0.131533426000 0.984972142000 -0.111941905000 -0.118460314000 319 | -0.460546484000 0.160713528000 0.872965268000 0.273633548000 320 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 321 | 29 31 38 322 | 0.835143993000 -0.050052967500 0.547747817000 0.221311344000 323 | -0.023200177200 0.991758187000 0.125998153000 0.122439506000 324 | -0.549540502000 -0.117934785000 0.827100238000 0.987626367000 325 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 326 | 30 32 38 327 | 0.629430088000 -0.063966415300 0.774419965000 -0.382251302000 328 | 0.077224427600 0.996821679000 0.019569549700 -0.014763313200 329 | -0.773211006000 0.047486862700 0.632370406000 1.024108680000 330 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 331 | 30 33 38 332 | 0.390883066000 -0.101569763000 0.914818124000 -0.953823948000 333 | 0.115321731000 0.991464431000 0.060804533300 0.005499482220 334 | -0.913186772000 0.081731124900 0.399262167000 1.121067780000 335 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 336 | 30 34 38 337 | 0.158595022000 -0.001168128650 0.987341711000 -1.276553690000 338 | 0.142171526000 0.989604018000 -0.021665452200 0.020617896500 339 | -0.977053132000 0.143808741000 0.157111908000 1.391873830000 340 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 341 | 30 35 38 342 | 0.160703943000 0.122640772000 0.979352391000 -1.306584820000 343 | 0.135724885000 0.980077365000 -0.145002636000 0.044914383300 344 | -0.977627235000 0.156225625000 0.140857656000 1.359884670000 345 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 346 | 30 36 38 347 | 0.288337277000 0.156975880000 0.944573225000 -1.313660950000 348 | 0.126209616000 0.971632055000 -0.199998546000 0.009513355360 349 | -0.949174064000 0.176881905000 0.260346558000 1.311181040000 350 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 351 | 30 37 38 352 | 0.214376997000 -0.022461401000 0.976491624000 -1.321818970000 353 | 0.065694406600 0.997801849000 0.008529355760 0.036208347600 354 | -0.974538503000 0.062321988900 0.215383380000 1.353927970000 355 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 356 | 31 35 38 357 | 0.444666803000 0.095493380100 0.890590813000 -1.402145110000 358 | 0.122474118000 0.978479638000 -0.166067137000 0.114145690000 359 | -0.887284000000 0.182920048000 0.423401813000 0.291301642000 360 | 0.000000000000 0.000000000000 0.000000000000 1.000000000000 361 | -------------------------------------------------------------------------------- /benchmarks/3DLoMatch/sun3d-mit_lab_hj-lab_hj_tea_nov_2_2012_scan1_erika/gt_overlap.log: -------------------------------------------------------------------------------- 1 | 0,1,0.6983 2 | 0,2,0.1538 3 | 0,3,0.0141 4 | 0,4,0.0006 5 | 0,5,0.0877 6 | 0,6,0.2780 7 | 0,7,0.2465 8 | 0,8,0.3627 9 | 0,9,0.4328 10 | 0,10,0.4738 11 | 0,11,0.1940 12 | 0,12,0.0399 13 | 0,13,0.0220 14 | 0,14,0.0052 15 | 0,15,0.0048 16 | 0,16,0.0051 17 | 0,17,0.0000 18 | 0,18,0.0000 19 | 0,19,0.0000 20 | 0,20,0.0000 21 | 0,21,0.0310 22 | 0,22,0.0380 23 | 0,23,0.0526 24 | 0,24,0.1042 25 | 0,25,0.0453 26 | 0,26,0.1419 27 | 0,27,0.3931 28 | 0,28,0.3217 29 | 0,29,0.0000 30 | 0,30,0.0000 31 | 0,31,0.0000 32 | 0,32,0.0000 33 | 0,33,0.0000 34 | 0,34,0.0000 35 | 0,35,0.0000 36 | 0,36,0.0000 37 | 0,37,0.0000 38 | 1,2,0.4768 39 | 1,3,0.0820 40 | 1,4,0.0484 41 | 1,5,0.2321 42 | 1,6,0.4124 43 | 1,7,0.2067 44 | 1,8,0.3769 45 | 1,9,0.4220 46 | 1,10,0.3575 47 | 1,11,0.0899 48 | 1,12,0.0046 49 | 1,13,0.0031 50 | 1,14,0.0000 51 | 1,15,0.0000 52 | 1,16,0.0000 53 | 1,17,0.0000 54 | 1,18,0.0000 55 | 1,19,0.0000 56 | 1,20,0.0000 57 | 1,21,0.0024 58 | 1,22,0.0040 59 | 1,23,0.0078 60 | 1,24,0.0374 61 | 1,25,0.0125 62 | 1,26,0.1152 63 | 1,27,0.3743 64 | 1,28,0.3775 65 | 1,29,0.0282 66 | 1,30,0.0001 67 | 1,31,0.0000 68 | 1,32,0.0000 69 | 1,33,0.0000 70 | 1,34,0.0000 71 | 1,35,0.0000 72 | 1,36,0.0000 73 | 1,37,0.0000 74 | 2,3,0.5100 75 | 2,4,0.3651 76 | 2,5,0.3231 77 | 2,6,0.2628 78 | 2,7,0.2614 79 | 2,8,0.3450 80 | 2,9,0.1202 81 | 2,10,0.0596 82 | 2,11,0.0054 83 | 2,12,0.0000 84 | 2,13,0.0000 85 | 2,14,0.0000 86 | 2,15,0.0000 87 | 2,16,0.0000 88 | 2,17,0.0000 89 | 2,18,0.0000 90 | 2,19,0.0000 91 | 2,20,0.0000 92 | 2,21,0.0000 93 | 2,22,0.0000 94 | 2,23,0.0000 95 | 2,24,0.0004 96 | 2,25,0.0000 97 | 2,26,0.0177 98 | 2,27,0.0735 99 | 2,28,0.1629 100 | 2,29,0.0674 101 | 2,30,0.1724 102 | 2,31,0.0000 103 | 2,32,0.0000 104 | 2,33,0.0000 105 | 2,34,0.0000 106 | 2,35,0.0000 107 | 2,36,0.0000 108 | 2,37,0.0000 109 | 3,4,0.6330 110 | 3,5,0.2891 111 | 3,6,0.2800 112 | 3,7,0.2325 113 | 3,8,0.1925 114 | 3,9,0.0529 115 | 3,10,0.0140 116 | 3,11,0.0000 117 | 3,12,0.0000 118 | 3,13,0.0000 119 | 3,14,0.0000 120 | 3,15,0.0000 121 | 3,16,0.0000 122 | 3,17,0.0000 123 | 3,18,0.0000 124 | 3,19,0.0000 125 | 3,20,0.0000 126 | 3,21,0.0000 127 | 3,22,0.0000 128 | 3,23,0.0000 129 | 3,24,0.0000 130 | 3,25,0.0000 131 | 3,26,0.0010 132 | 3,27,0.0085 133 | 3,28,0.0998 134 | 3,29,0.0682 135 | 3,30,0.1489 136 | 3,31,0.0000 137 | 3,32,0.0000 138 | 3,33,0.0000 139 | 3,34,0.0000 140 | 3,35,0.0000 141 | 3,36,0.0000 142 | 3,37,0.0000 143 | 4,5,0.6392 144 | 4,6,0.4032 145 | 4,7,0.2834 146 | 4,8,0.2068 147 | 4,9,0.0331 148 | 4,10,0.0013 149 | 4,11,0.0000 150 | 4,12,0.0000 151 | 4,13,0.0000 152 | 4,14,0.0000 153 | 4,15,0.0000 154 | 4,16,0.0000 155 | 4,17,0.0000 156 | 4,18,0.0000 157 | 4,19,0.0000 158 | 4,20,0.0000 159 | 4,21,0.0000 160 | 4,22,0.0000 161 | 4,23,0.0000 162 | 4,24,0.0000 163 | 4,25,0.0000 164 | 4,26,0.0000 165 | 4,27,0.0059 166 | 4,28,0.0347 167 | 4,29,0.0355 168 | 4,30,0.0499 169 | 4,31,0.0000 170 | 4,32,0.0000 171 | 4,33,0.0000 172 | 4,34,0.0000 173 | 4,35,0.0000 174 | 4,36,0.0000 175 | 4,37,0.0000 176 | 5,6,0.7890 177 | 5,7,0.5244 178 | 5,8,0.4097 179 | 5,9,0.1090 180 | 5,10,0.0152 181 | 5,11,0.0000 182 | 5,12,0.0000 183 | 5,13,0.0000 184 | 5,14,0.0000 185 | 5,15,0.0000 186 | 5,16,0.0000 187 | 5,17,0.0000 188 | 5,18,0.0000 189 | 5,19,0.0000 190 | 5,20,0.0000 191 | 5,21,0.0000 192 | 5,22,0.0000 193 | 5,23,0.0000 194 | 5,24,0.0000 195 | 5,25,0.0000 196 | 5,26,0.0042 197 | 5,27,0.0189 198 | 5,28,0.0493 199 | 5,29,0.0144 200 | 5,30,0.0000 201 | 5,31,0.0000 202 | 5,32,0.0000 203 | 5,33,0.0000 204 | 5,34,0.0000 205 | 5,35,0.0000 206 | 5,36,0.0000 207 | 5,37,0.0000 208 | 6,7,0.6664 209 | 6,8,0.5129 210 | 6,9,0.2499 211 | 6,10,0.1559 212 | 6,11,0.0097 213 | 6,12,0.0000 214 | 6,13,0.0000 215 | 6,14,0.0000 216 | 6,15,0.0000 217 | 6,16,0.0000 218 | 6,17,0.0000 219 | 6,18,0.0000 220 | 6,19,0.0000 221 | 6,20,0.0000 222 | 6,21,0.0000 223 | 6,22,0.0000 224 | 6,23,0.0000 225 | 6,24,0.0000 226 | 6,25,0.0000 227 | 6,26,0.0065 228 | 6,27,0.0414 229 | 6,28,0.0385 230 | 6,29,0.0104 231 | 6,30,0.0000 232 | 6,31,0.0000 233 | 6,32,0.0000 234 | 6,33,0.0000 235 | 6,34,0.0000 236 | 6,35,0.0000 237 | 6,36,0.0000 238 | 6,37,0.0000 239 | 7,8,0.6659 240 | 7,9,0.0968 241 | 7,10,0.0807 242 | 7,11,0.0869 243 | 7,12,0.0000 244 | 7,13,0.0000 245 | 7,14,0.0000 246 | 7,15,0.0000 247 | 7,16,0.0000 248 | 7,17,0.0000 249 | 7,18,0.0000 250 | 7,19,0.0000 251 | 7,20,0.0000 252 | 7,21,0.0000 253 | 7,22,0.0000 254 | 7,23,0.0000 255 | 7,24,0.0000 256 | 7,25,0.0000 257 | 7,26,0.0072 258 | 7,27,0.0267 259 | 7,28,0.0386 260 | 7,29,0.0095 261 | 7,30,0.0000 262 | 7,31,0.0000 263 | 7,32,0.0000 264 | 7,33,0.0000 265 | 7,34,0.0000 266 | 7,35,0.0000 267 | 7,36,0.0000 268 | 7,37,0.0000 269 | 8,9,0.2425 270 | 8,10,0.1401 271 | 8,11,0.0713 272 | 8,12,0.0019 273 | 8,13,0.0000 274 | 8,14,0.0000 275 | 8,15,0.0000 276 | 8,16,0.0000 277 | 8,17,0.0000 278 | 8,18,0.0000 279 | 8,19,0.0000 280 | 8,20,0.0000 281 | 8,21,0.0000 282 | 8,22,0.0000 283 | 8,23,0.0001 284 | 8,24,0.0002 285 | 8,25,0.0002 286 | 8,26,0.0073 287 | 8,27,0.0336 288 | 8,28,0.0468 289 | 8,29,0.0047 290 | 8,30,0.0000 291 | 8,31,0.0000 292 | 8,32,0.0000 293 | 8,33,0.0000 294 | 8,34,0.0000 295 | 8,35,0.0000 296 | 8,36,0.0000 297 | 8,37,0.0000 298 | 9,10,0.6296 299 | 9,11,0.2832 300 | 9,12,0.0666 301 | 9,13,0.0009 302 | 9,14,0.0000 303 | 9,15,0.0000 304 | 9,16,0.0000 305 | 9,17,0.0000 306 | 9,18,0.0000 307 | 9,19,0.0000 308 | 9,20,0.0000 309 | 9,21,0.0086 310 | 9,22,0.0424 311 | 9,23,0.0457 312 | 9,24,0.0366 313 | 9,25,0.0265 314 | 9,26,0.0507 315 | 9,27,0.1673 316 | 9,28,0.1784 317 | 9,29,0.0022 318 | 9,30,0.0000 319 | 9,31,0.0000 320 | 9,32,0.0000 321 | 9,33,0.0000 322 | 9,34,0.0000 323 | 9,35,0.0000 324 | 9,36,0.0000 325 | 9,37,0.0000 326 | 10,11,0.3034 327 | 10,12,0.0421 328 | 10,13,0.0019 329 | 10,14,0.0000 330 | 10,15,0.0000 331 | 10,16,0.0000 332 | 10,17,0.0000 333 | 10,18,0.0000 334 | 10,19,0.0000 335 | 10,20,0.0000 336 | 10,21,0.0157 337 | 10,22,0.0722 338 | 10,23,0.0828 339 | 10,24,0.0801 340 | 10,25,0.0607 341 | 10,26,0.0783 342 | 10,27,0.2330 343 | 10,28,0.1576 344 | 10,29,0.0000 345 | 10,30,0.0000 346 | 10,31,0.0000 347 | 10,32,0.0000 348 | 10,33,0.0000 349 | 10,34,0.0000 350 | 10,35,0.0000 351 | 10,36,0.0000 352 | 10,37,0.0000 353 | 11,12,0.3842 354 | 11,13,0.0509 355 | 11,14,0.0018 356 | 11,15,0.0016 357 | 11,16,0.0019 358 | 11,17,0.0000 359 | 11,18,0.0000 360 | 11,19,0.0000 361 | 11,20,0.0000 362 | 11,21,0.0215 363 | 11,22,0.0286 364 | 11,23,0.0292 365 | 11,24,0.0260 366 | 11,25,0.0240 367 | 11,26,0.0232 368 | 11,27,0.0368 369 | 11,28,0.0086 370 | 11,29,0.0000 371 | 11,30,0.0000 372 | 11,31,0.0000 373 | 11,32,0.0000 374 | 11,33,0.0000 375 | 11,34,0.0000 376 | 11,35,0.0000 377 | 11,36,0.0000 378 | 11,37,0.0000 379 | 12,13,0.3239 380 | 12,14,0.0287 381 | 12,15,0.0381 382 | 12,16,0.0433 383 | 12,17,0.0000 384 | 12,18,0.0000 385 | 12,19,0.0000 386 | 12,20,0.0000 387 | 12,21,0.0321 388 | 12,22,0.0359 389 | 12,23,0.0366 390 | 12,24,0.0366 391 | 12,25,0.0299 392 | 12,26,0.0223 393 | 12,27,0.0142 394 | 12,28,0.0000 395 | 12,29,0.0000 396 | 12,30,0.0000 397 | 12,31,0.0000 398 | 12,32,0.0000 399 | 12,33,0.0000 400 | 12,34,0.0000 401 | 12,35,0.0000 402 | 12,36,0.0000 403 | 12,37,0.0000 404 | 13,14,0.3967 405 | 13,15,0.3393 406 | 13,16,0.2876 407 | 13,17,0.1754 408 | 13,18,0.1115 409 | 13,19,0.0028 410 | 13,20,0.0000 411 | 13,21,0.0105 412 | 13,22,0.0115 413 | 13,23,0.0115 414 | 13,24,0.0114 415 | 13,25,0.0079 416 | 13,26,0.0049 417 | 13,27,0.0022 418 | 13,28,0.0000 419 | 13,29,0.0000 420 | 13,30,0.0000 421 | 13,31,0.0000 422 | 13,32,0.0000 423 | 13,33,0.0000 424 | 13,34,0.0000 425 | 13,35,0.0000 426 | 13,36,0.0000 427 | 13,37,0.0000 428 | 14,15,0.8881 429 | 14,16,0.7457 430 | 14,17,0.4629 431 | 14,18,0.2293 432 | 14,19,0.0353 433 | 14,20,0.0000 434 | 14,21,0.0044 435 | 14,22,0.0044 436 | 14,23,0.0043 437 | 14,24,0.0040 438 | 14,25,0.0030 439 | 14,26,0.0007 440 | 14,27,0.0000 441 | 14,28,0.0000 442 | 14,29,0.0000 443 | 14,30,0.0000 444 | 14,31,0.0000 445 | 14,32,0.0000 446 | 14,33,0.0000 447 | 14,34,0.0000 448 | 14,35,0.0000 449 | 14,36,0.0000 450 | 14,37,0.0000 451 | 15,16,0.8778 452 | 15,17,0.5385 453 | 15,18,0.2538 454 | 15,19,0.0490 455 | 15,20,0.0071 456 | 15,21,0.0036 457 | 15,22,0.0036 458 | 15,23,0.0036 459 | 15,24,0.0033 460 | 15,25,0.0025 461 | 15,26,0.0002 462 | 15,27,0.0000 463 | 15,28,0.0000 464 | 15,29,0.0000 465 | 15,30,0.0000 466 | 15,31,0.0000 467 | 15,32,0.0000 468 | 15,33,0.0000 469 | 15,34,0.0000 470 | 15,35,0.0000 471 | 15,36,0.0000 472 | 15,37,0.0000 473 | 16,17,0.6637 474 | 16,18,0.3079 475 | 16,19,0.0749 476 | 16,20,0.0431 477 | 16,21,0.0103 478 | 16,22,0.0044 479 | 16,23,0.0044 480 | 16,24,0.0042 481 | 16,25,0.0032 482 | 16,26,0.0003 483 | 16,27,0.0000 484 | 16,28,0.0000 485 | 16,29,0.0000 486 | 16,30,0.0000 487 | 16,31,0.0000 488 | 16,32,0.0000 489 | 16,33,0.0000 490 | 16,34,0.0000 491 | 16,35,0.0000 492 | 16,36,0.0000 493 | 16,37,0.0000 494 | 17,18,0.5182 495 | 17,19,0.3154 496 | 17,20,0.2819 497 | 17,21,0.0338 498 | 17,22,0.0000 499 | 17,23,0.0000 500 | 17,24,0.0000 501 | 17,25,0.0000 502 | 17,26,0.0000 503 | 17,27,0.0000 504 | 17,28,0.0000 505 | 17,29,0.0000 506 | 17,30,0.0000 507 | 17,31,0.0000 508 | 17,32,0.0000 509 | 17,33,0.0000 510 | 17,34,0.0000 511 | 17,35,0.0000 512 | 17,36,0.0000 513 | 17,37,0.0000 514 | 18,19,0.6182 515 | 18,20,0.3568 516 | 18,21,0.0808 517 | 18,22,0.0000 518 | 18,23,0.0000 519 | 18,24,0.0000 520 | 18,25,0.0000 521 | 18,26,0.0000 522 | 18,27,0.0000 523 | 18,28,0.0000 524 | 18,29,0.0000 525 | 18,30,0.0000 526 | 18,31,0.0000 527 | 18,32,0.0000 528 | 18,33,0.0000 529 | 18,34,0.0000 530 | 18,35,0.0000 531 | 18,36,0.0000 532 | 18,37,0.0000 533 | 19,20,0.6984 534 | 19,21,0.2884 535 | 19,22,0.0655 536 | 19,23,0.0052 537 | 19,24,0.0000 538 | 19,25,0.0000 539 | 19,26,0.0000 540 | 19,27,0.0000 541 | 19,28,0.0000 542 | 19,29,0.0000 543 | 19,30,0.0000 544 | 19,31,0.0000 545 | 19,32,0.0000 546 | 19,33,0.0000 547 | 19,34,0.0000 548 | 19,35,0.0000 549 | 19,36,0.0000 550 | 19,37,0.0000 551 | 20,21,0.4386 552 | 20,22,0.1434 553 | 20,23,0.0487 554 | 20,24,0.0160 555 | 20,25,0.0084 556 | 20,26,0.0003 557 | 20,27,0.0000 558 | 20,28,0.0000 559 | 20,29,0.0000 560 | 20,30,0.0000 561 | 20,31,0.0000 562 | 20,32,0.0000 563 | 20,33,0.0000 564 | 20,34,0.0000 565 | 20,35,0.0000 566 | 20,36,0.0000 567 | 20,37,0.0000 568 | 21,22,0.4197 569 | 21,23,0.2469 570 | 21,24,0.1464 571 | 21,25,0.1165 572 | 21,26,0.0489 573 | 21,27,0.0157 574 | 21,28,0.0004 575 | 21,29,0.0000 576 | 21,30,0.0000 577 | 21,31,0.0000 578 | 21,32,0.0000 579 | 21,33,0.0000 580 | 21,34,0.0004 581 | 21,35,0.0041 582 | 21,36,0.0000 583 | 21,37,0.0000 584 | 22,23,0.6861 585 | 22,24,0.3625 586 | 22,25,0.2918 587 | 22,26,0.1578 588 | 22,27,0.0992 589 | 22,28,0.0066 590 | 22,29,0.0000 591 | 22,30,0.0000 592 | 22,31,0.0000 593 | 22,32,0.0000 594 | 22,33,0.0000 595 | 22,34,0.0000 596 | 22,35,0.0000 597 | 22,36,0.0000 598 | 22,37,0.0000 599 | 23,24,0.5630 600 | 23,25,0.4609 601 | 23,26,0.2676 602 | 23,27,0.1814 603 | 23,28,0.0407 604 | 23,29,0.0000 605 | 23,30,0.0000 606 | 23,31,0.0000 607 | 23,32,0.0000 608 | 23,33,0.0000 609 | 23,34,0.0000 610 | 23,35,0.0000 611 | 23,36,0.0000 612 | 23,37,0.0000 613 | 24,25,0.6586 614 | 24,26,0.2640 615 | 24,27,0.2054 616 | 24,28,0.0963 617 | 24,29,0.0000 618 | 24,30,0.0000 619 | 24,31,0.0000 620 | 24,32,0.0516 621 | 24,33,0.1919 622 | 24,34,0.1384 623 | 24,35,0.1420 624 | 24,36,0.1246 625 | 24,37,0.1587 626 | 25,26,0.3551 627 | 25,27,0.1786 628 | 25,28,0.0466 629 | 25,29,0.0033 630 | 25,30,0.1161 631 | 25,31,0.1050 632 | 25,32,0.1914 633 | 25,33,0.2382 634 | 25,34,0.2224 635 | 25,35,0.1893 636 | 25,36,0.2075 637 | 25,37,0.2094 638 | 26,27,0.7045 639 | 26,28,0.3344 640 | 26,29,0.0000 641 | 26,30,0.0000 642 | 26,31,0.0000 643 | 26,32,0.0000 644 | 26,33,0.0000 645 | 26,34,0.0000 646 | 26,35,0.0000 647 | 26,36,0.0000 648 | 26,37,0.0000 649 | 27,28,0.5784 650 | 27,29,0.0180 651 | 27,30,0.0014 652 | 27,31,0.0000 653 | 27,32,0.0000 654 | 27,33,0.0000 655 | 27,34,0.0000 656 | 27,35,0.0000 657 | 27,36,0.0000 658 | 27,37,0.0000 659 | 28,29,0.1223 660 | 28,30,0.1538 661 | 28,31,0.0012 662 | 28,32,0.0000 663 | 28,33,0.0000 664 | 28,34,0.0000 665 | 28,35,0.0000 666 | 28,36,0.0000 667 | 28,37,0.0000 668 | 29,30,0.5859 669 | 29,31,0.2812 670 | 29,32,0.0460 671 | 29,33,0.0339 672 | 29,34,0.0343 673 | 29,35,0.0393 674 | 29,36,0.0506 675 | 29,37,0.0315 676 | 30,31,0.3801 677 | 30,32,0.1849 678 | 30,33,0.1707 679 | 30,34,0.1292 680 | 30,35,0.1212 681 | 30,36,0.1598 682 | 30,37,0.1377 683 | 31,32,0.4794 684 | 31,33,0.3929 685 | 31,34,0.3085 686 | 31,35,0.2958 687 | 31,36,0.3909 688 | 31,37,0.3200 689 | 32,33,0.9021 690 | 32,34,0.5685 691 | 32,35,0.5605 692 | 32,36,0.9270 693 | 32,37,0.7007 694 | 33,34,0.7599 695 | 33,35,0.6514 696 | 33,36,0.8516 697 | 33,37,0.8799 698 | 34,35,0.8392 699 | 34,36,0.8554 700 | 34,37,0.8323 701 | 35,36,0.8529 702 | 35,37,0.6675 703 | 36,37,0.6923 704 | -------------------------------------------------------------------------------- /common.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from utils.SE3 import * 5 | 6 | 7 | def rigid_transform_3d(A, B, weights=None, weight_threshold=0): 8 | """ 9 | Input: 10 | - A: [bs, num_corr, 3], source point cloud 11 | - B: [bs, num_corr, 3], target point cloud 12 | - weights: [bs, num_corr] weight for each correspondence 13 | - weight_threshold: float, clips points with weight below threshold 14 | Output: 15 | - R, t 16 | """ 17 | bs = A.shape[0] 18 | if weights is None: 19 | weights = torch.ones_like(A[:, :, 0]) 20 | weights[weights < weight_threshold] = 0 21 | # weights = weights / (torch.sum(weights, dim=-1, keepdim=True) + 1e-6) 22 | 23 | # find mean of point cloud 24 | centroid_A = torch.sum(A * weights[:, :, None], dim=1, keepdim=True) / (torch.sum(weights, dim=1, keepdim=True)[:, :, None] + 1e-6) 25 | centroid_B = torch.sum(B * weights[:, :, None], dim=1, keepdim=True) / (torch.sum(weights, dim=1, keepdim=True)[:, :, None] + 1e-6) 26 | 27 | # subtract mean 28 | Am = A - centroid_A 29 | Bm = B - centroid_B 30 | 31 | # construct weight covariance matrix 32 | Weight = torch.diag_embed(weights) 33 | H = Am.permute(0, 2, 1) @ Weight @ Bm 34 | 35 | # find rotation 36 | U, S, Vt = torch.svd(H.cpu()) 37 | U, S, Vt = U.to(weights.device), S.to(weights.device), Vt.to(weights.device) 38 | delta_UV = torch.det(Vt @ U.permute(0, 2, 1)) 39 | eye = torch.eye(3)[None, :, :].repeat(bs, 1, 1).to(A.device) 40 | eye[:, -1, -1] = delta_UV 41 | R = Vt @ eye @ U.permute(0, 2, 1) 42 | t = centroid_B.permute(0,2,1) - R @ centroid_A.permute(0,2,1) 43 | # warp_A = transform(A, integrate_trans(R,t)) 44 | # RMSE = torch.sum( (warp_A - B) ** 2, dim=-1).mean() 45 | return integrate_trans(R, t) 46 | 47 | 48 | def knn(x, k, ignore_self=False, normalized=True): 49 | """ find feature space knn neighbor of x 50 | Input: 51 | - x: [bs, num_corr, num_channels], input features 52 | - k: 53 | - ignore_self: True/False, return knn include self or not. 54 | - normalized: True/False, if the feature x normalized. 55 | Output: 56 | - idx: [bs, num_corr, k], the indices of knn neighbors 57 | """ 58 | inner = 2 * torch.matmul(x, x.transpose(2, 1)) 59 | if normalized: 60 | pairwise_distance = 2 - inner 61 | else: 62 | xx = torch.sum(x ** 2, dim=-1, keepdim=True) 63 | pairwise_distance = xx - inner + xx.transpose(2, 1) 64 | 65 | if ignore_self is False: 66 | idx = pairwise_distance.topk(k=k, dim=-1, largest=False)[1] # (batch_size, num_points, k) 67 | else: 68 | idx = pairwise_distance.topk(k=k + 1, dim=-1, largest=False)[1][:, :, 1:] 69 | return idx 70 | 71 | 72 | class EdgeConv(nn.Module): 73 | def __init__(self, in_dim, out_dim, k, idx=None): 74 | super(EdgeConv, self).__init__() 75 | self.in_dim = in_dim 76 | self.out_dim = out_dim 77 | self.k = k 78 | self.idx = idx 79 | self.conv = nn.Conv2d(in_dim * 2, out_dim, kernel_size=1, bias=False) 80 | 81 | def forward(self, x): 82 | # x: [bs, in_dim, N] 83 | bs = x.shape[0] 84 | num_corr = x.shape[2] 85 | device = x.device 86 | 87 | # if self.idx is None: 88 | self.idx = knn(x.permute(0,2,1), self.k, normalized=False) 89 | 90 | idx_base = torch.arange(0, bs, device=device).view(-1, 1, 1) * num_corr 91 | idx = self.idx + idx_base 92 | idx = idx.view(-1) 93 | 94 | x = x.transpose(2, 1).contiguous() 95 | features = x.view(bs * num_corr, -1)[idx, :] 96 | features = features.view(bs, num_corr, self.k, self.in_dim) 97 | x = x.view(bs, num_corr, 1, self.in_dim).repeat(1, 1, self.k, 1) 98 | 99 | features = torch.cat([features - x, x], dim=3).permute(0, 3, 1, 2).contiguous() 100 | 101 | output = self.conv(features) 102 | output = output.max(dim=-1, keepdim=False)[0] 103 | return output 104 | 105 | 106 | class ContextNormalization(nn.Module): 107 | def __init__(self): 108 | super(ContextNormalization, self).__init__() 109 | 110 | def forward(self, x): 111 | var_eps = 1e-3 112 | mean = torch.mean(x, 2, keepdim=True) 113 | variance = torch.var(x, 2, keepdim=True) 114 | x = (x - mean) / torch.sqrt(variance + var_eps) 115 | return x 116 | 117 | 118 | class PointCN(nn.Module): 119 | def __init__(self, in_dim=6, num_layers=6, num_channels=128, act_pos='post'): 120 | super(PointCN, self).__init__() 121 | assert act_pos == 'pre' or act_pos == 'post' 122 | 123 | modules = [nn.Conv1d(in_dim, num_channels, kernel_size=1, bias=True)] 124 | for i in range(num_layers): 125 | if act_pos == 'pre': 126 | modules.append(ContextNormalization()) 127 | modules.append(nn.BatchNorm1d(num_channels)) 128 | modules.append(nn.ReLU(inplace=True)) 129 | modules.append(nn.Conv1d(num_channels, num_channels, kernel_size=1, bias=True)) 130 | else: 131 | modules.append(nn.Conv1d(num_channels, num_channels, kernel_size=1, bias=True)) 132 | modules.append(ContextNormalization()) 133 | modules.append(nn.BatchNorm1d(num_channels)) 134 | modules.append(nn.ReLU(inplace=True)) 135 | self.encoder = nn.Sequential(*modules) 136 | 137 | def forward(self, x): 138 | features = self.encoder(x) 139 | return features 140 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import time 3 | import os 4 | 5 | arg_lists = [] 6 | parser = argparse.ArgumentParser() 7 | 8 | 9 | def add_argument_group(name): 10 | arg = parser.add_argument_group(name) 11 | arg_lists.append(arg) 12 | return arg 13 | 14 | 15 | def str2bool(v): 16 | return v.lower() in ('true', '1') 17 | 18 | 19 | dataset = '3DMatch' 20 | experiment_id = f"PointDSC_{dataset}_{time.strftime('%m%d%H%M')}" 21 | # snapshot configurations 22 | snapshot_arg = add_argument_group('Snapshot') 23 | snapshot_arg.add_argument('--snapshot_dir', type=str, default=f'snapshot/{experiment_id}') 24 | snapshot_arg.add_argument('--tboard_dir', type=str, default=f'tensorboard/{experiment_id}') 25 | snapshot_arg.add_argument('--snapshot_interval', type=int, default=1) 26 | snapshot_arg.add_argument('--save_dir', type=str, default=os.path.join(f'snapshot/{experiment_id}', 'models/')) 27 | 28 | # Network configurations 29 | net_arg = add_argument_group('Network') 30 | net_arg.add_argument('--in_dim', type=int, default=6) 31 | net_arg.add_argument('--num_layers', type=int, default=12) 32 | net_arg.add_argument('--num_channels', type=int, default=128) 33 | net_arg.add_argument('--num_iterations', type=int, default=10, help='power iteration algorithm') 34 | net_arg.add_argument('--ratio', type=float, default=0.1, help='max ratio of seeding points') 35 | net_arg.add_argument('--k', type=int, default=40, help='size of local neighborhood') 36 | 37 | # Loss configurations 38 | loss_arg = add_argument_group('Loss') 39 | loss_arg.add_argument('--evaluate_interval', type=int, default=1) 40 | loss_arg.add_argument('--balanced', type=str2bool, default=False) 41 | loss_arg.add_argument('--weight_classification', type=float, default=1.0) 42 | loss_arg.add_argument('--weight_spectralmatching', type=float, default=1.0) 43 | loss_arg.add_argument('--weight_transformation', type=float, default=0.0) 44 | loss_arg.add_argument('--transformation_loss_start_epoch', type=int, default=0) 45 | 46 | # Optimizer configurations 47 | opt_arg = add_argument_group('Optimizer') 48 | opt_arg.add_argument('--optimizer', type=str, default='ADAM', choices=['SGD', 'ADAM']) 49 | opt_arg.add_argument('--max_epoch', type=int, default=50) 50 | opt_arg.add_argument('--training_max_iter', type=int, default=3500) 51 | opt_arg.add_argument('--val_max_iter', type=int, default=1000) 52 | opt_arg.add_argument('--lr', type=float, default=1e-4) 53 | opt_arg.add_argument('--weight_decay', type=float, default=1e-6) 54 | opt_arg.add_argument('--momentum', type=float, default=0.9) 55 | opt_arg.add_argument('--scheduler', type=str, default='ExpLR') 56 | opt_arg.add_argument('--scheduler_gamma', type=float, default=0.99) 57 | opt_arg.add_argument('--scheduler_interval', type=int, default=1) 58 | 59 | # Dataset and dataloader configurations 60 | data_arg = add_argument_group('Data') 61 | if dataset == '3DMatch': 62 | data_arg.add_argument('--root', type=str, default='/data/3DMatch') 63 | data_arg.add_argument('--descriptor', type=str, default='fcgf', choices=['d3feat', 'fpfh', 'fcgf']) 64 | data_arg.add_argument('--inlier_threshold', type=float, default=0.10) 65 | net_arg.add_argument('--sigma_d', type=float, default=0.10) 66 | data_arg.add_argument('--downsample', type=float, default=0.03) 67 | data_arg.add_argument('--re_thre', type=float, default=15, help='rotation error thrshold (deg)') 68 | data_arg.add_argument('--te_thre', type=float, default=30, help='translation error thrshold (cm)') 69 | else: 70 | data_arg.add_argument('--root', type=str, default='/data/KITTI') 71 | data_arg.add_argument('--descriptor', type=str, default='fcgf', choices=['fcgf', 'fpfh']) 72 | data_arg.add_argument('--inlier_threshold', type=float, default=1.2) 73 | net_arg.add_argument('--sigma_d', type=float, default=1.2) 74 | data_arg.add_argument('--downsample', type=float, default=0.30) 75 | data_arg.add_argument('--re_thre', type=float, default=5, help='rotation error thrshold (deg)') 76 | data_arg.add_argument('--te_thre', type=float, default=60, help='translation error thrshold (cm)') 77 | 78 | data_arg.add_argument('--num_node', type=int, default=1000) 79 | data_arg.add_argument('--use_mutual', type=str2bool, default=False) 80 | data_arg.add_argument('--augment_axis', type=int, default=3) 81 | data_arg.add_argument('--augment_rotation', type=float, default=1.0, help='rotation angle = num * 2pi') 82 | data_arg.add_argument('--augment_translation', type=float, default=0.5, help='translation = num (m)') 83 | data_arg.add_argument('--batch_size', type=int, default=16) 84 | data_arg.add_argument('--num_workers', type=int, default=16) 85 | 86 | # Other configurations 87 | misc_arg = add_argument_group('Misc') 88 | misc_arg.add_argument('--gpu_mode', type=str2bool, default=True) 89 | misc_arg.add_argument('--verbose', type=str2bool, default=True) 90 | misc_arg.add_argument('--pretrain', type=str, default='') 91 | misc_arg.add_argument('--weights_fixed', type=str2bool, default=False) 92 | 93 | 94 | def get_config(): 95 | args = parser.parse_args() 96 | return args 97 | -------------------------------------------------------------------------------- /config_json/config_3DLoMatch.json: -------------------------------------------------------------------------------- 1 | { 2 | "CUDA_Devices": "2", 3 | "num_iterations": 10, 4 | "dataset": "3DLoMatch", 5 | "ratio": 0.2, 6 | "k1": 30, 7 | "k2": 20, 8 | "data_path": "data/3DLoMatch", 9 | "descriptor": "predator", 10 | "inlier_threshold": 0.1, 11 | "sigma_d": 0.1, 12 | "d_thre": 0.1, 13 | "downsample": 0.05, 14 | "re_thre": 15, 15 | "te_thre": 30, 16 | "num_node": 5000, 17 | "use_mutual": false, 18 | "max_points": 8000, 19 | "nms_radius": 0.1 20 | } 21 | -------------------------------------------------------------------------------- /config_json/config_3DMatch.json: -------------------------------------------------------------------------------- 1 | { 2 | "CUDA_Devices": "2", 3 | "num_iterations": 10, 4 | "dataset": "3DMatch", 5 | "ratio": 0.2, 6 | "k1": 30, 7 | "k2": 20, 8 | "data_path": "data/3DMatch", 9 | "descriptor": "fpfh", 10 | "inlier_threshold": 0.1, 11 | "sigma_d": 0.1, 12 | "d_thre": 0.1, 13 | "downsample": 0.05, 14 | "re_thre": 15, 15 | "te_thre": 30, 16 | "num_node": "all", 17 | "use_mutual": false, 18 | "max_points": 8000, 19 | "nms_radius": 0.1 20 | } 21 | -------------------------------------------------------------------------------- /config_json/config_KITTI.json: -------------------------------------------------------------------------------- 1 | { 2 | "CUDA_Devices": "1", 3 | "num_iterations": 20, 4 | "dataset": "KITTI", 5 | "ratio": 0.2, 6 | "k1": 30, 7 | "k2": 20, 8 | "data_path": "data/KITTI/fpfh_test", 9 | "descriptor": "fpfh", 10 | "inlier_threshold": 0.6, 11 | "d_thre": 0.1, 12 | "downsample": 0.3, 13 | "re_thre": 5, 14 | "te_thre": 60, 15 | "num_node": 8000, 16 | "use_mutual": false, 17 | "max_points": 8000, 18 | "nms_radius": 0.6 19 | } 20 | -------------------------------------------------------------------------------- /dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | import torch.utils.data as data 4 | from utils.SE3 import * 5 | import torch 6 | 7 | 8 | class ThreeDLoader(data.Dataset): 9 | def __init__(self, 10 | root, 11 | descriptor='fcgf', 12 | inlier_threshold=0.10, 13 | num_node=5000, 14 | downsample=0.03, 15 | use_mutual=False, 16 | select_scene=None, 17 | ): 18 | self.root = root 19 | self.descriptor = descriptor 20 | assert descriptor in ['fcgf', 'fpfh'] 21 | self.inlier_threshold = inlier_threshold 22 | self.num_node = num_node 23 | self.use_mutual = use_mutual 24 | self.sigma_spat = 0.1 25 | self.num_iterations = 10 # maximum iteration of power iteration algorithm 26 | self.ratio = 0.1 # the maximum ratio of seeds. 27 | self.nms_radius = 0.1 28 | 29 | # containers 30 | self.gt_trans = {} 31 | 32 | self.scene_list = [ 33 | '7-scenes-redkitchen', 34 | 'sun3d-home_at-home_at_scan1_2013_jan_1', 35 | 'sun3d-home_md-home_md_scan9_2012_sep_30', 36 | 'sun3d-hotel_uc-scan3', 37 | 'sun3d-hotel_umd-maryland_hotel1', 38 | 'sun3d-hotel_umd-maryland_hotel3', 39 | 'sun3d-mit_76_studyroom-76-1studyroom2', 40 | 'sun3d-mit_lab_hj-lab_hj_tea_nov_2_2012_scan1_erika' 41 | ] 42 | if select_scene in self.scene_list: 43 | self.scene_list = [select_scene] 44 | 45 | # load ground truth transformation 46 | for scene in self.scene_list: 47 | scene_path = f'{self.root}/fragments/{scene}' 48 | gt_path = f'{self.root}/gt_result/{scene}-evaluation' 49 | for k, v in self.__loadlog__(gt_path).items(): 50 | self.gt_trans[f'{scene}@{k}'] = v 51 | 52 | def get_data(self, index): 53 | key = list(self.gt_trans.keys())[index] 54 | scene = key.split('@')[0] 55 | src_id = key.split('@')[1].split('_')[0] 56 | tgt_id = key.split('@')[1].split('_')[1] 57 | 58 | # load point coordinates and pre-computed per-point local descriptors 59 | if self.descriptor == 'fcgf': 60 | src_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{src_id}_fcgf.npz") 61 | tgt_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{tgt_id}_fcgf.npz") 62 | src_keypts = src_data['xyz'] 63 | tgt_keypts = tgt_data['xyz'] 64 | src_features = src_data['feature'] 65 | tgt_features = tgt_data['feature'] 66 | elif self.descriptor == 'fpfh': 67 | src_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{src_id}_fpfh.npz") 68 | tgt_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{tgt_id}_fpfh.npz") 69 | src_keypts = src_data['xyz'] 70 | tgt_keypts = tgt_data['xyz'] 71 | src_features = src_data['feature'] 72 | tgt_features = tgt_data['feature'] 73 | src_features = src_features / (np.linalg.norm(src_features, axis=1, keepdims=True) + 1e-6) 74 | tgt_features = tgt_features / (np.linalg.norm(tgt_features, axis=1, keepdims=True) + 1e-6) 75 | 76 | # compute ground truth transformation 77 | gt_trans = np.linalg.inv(self.gt_trans[key]) # the given ground truth trans is target-> source 78 | 79 | return torch.from_numpy(src_keypts.astype(np.float32)).cuda()[None], \ 80 | torch.from_numpy(tgt_keypts.astype(np.float32)).cuda()[None], \ 81 | torch.from_numpy(src_features.astype(np.float32)).cuda()[None], \ 82 | torch.from_numpy(tgt_features.astype(np.float32)).cuda()[None], \ 83 | torch.from_numpy(gt_trans.astype(np.float32)).cuda()[None], \ 84 | 85 | 86 | def __len__(self): 87 | return self.gt_trans.keys().__len__() 88 | 89 | def __loadlog__(self, gtpath): 90 | with open(os.path.join(gtpath, 'gt.log')) as f: 91 | content = f.readlines() 92 | result = {} 93 | i = 0 94 | while i < len(content): 95 | line = content[i].replace("\n", "").split("\t")[0:3] 96 | trans = np.zeros([4, 4]) 97 | trans[0] = np.fromstring(content[i+1], dtype=float, sep=' \t') 98 | trans[1] = np.fromstring(content[i+2], dtype=float, sep=' \t') 99 | trans[2] = np.fromstring(content[i+3], dtype=float, sep=' \t') 100 | trans[3] = np.fromstring(content[i+4], dtype=float, sep=' \t') 101 | i = i + 5 102 | result[f'{int(line[0])}_{int(line[1])}'] = trans 103 | return result 104 | 105 | class ThreeDLoMatchLoader(data.Dataset): 106 | def __init__(self, 107 | root, 108 | descriptor='fcgf', 109 | inlier_threshold=0.10, 110 | num_node=5000, 111 | use_mutual=True, 112 | downsample=0.03, 113 | ): 114 | self.root = root 115 | self.descriptor = descriptor 116 | assert descriptor in ['fcgf', 'fpfh', 'predator'] 117 | self.inlier_threshold = inlier_threshold 118 | self.num_node = num_node 119 | self.use_mutual = use_mutual 120 | self.downsample = downsample 121 | 122 | with open('3DLoMatch.pkl', 'rb') as f: 123 | self.infos = pickle.load(f) 124 | 125 | def get_data(self, index): 126 | 127 | gt_trans = integrate_trans(self.infos['rot'][index], self.infos['trans'][index]) 128 | scene = self.infos['src'][index].split('/')[1] 129 | src_id = self.infos['src'][index].split('/')[-1].split('_')[-1].replace('.pth', '') 130 | tgt_id = self.infos['tgt'][index].split('/')[-1].split('_')[-1].replace('.pth', '') 131 | 132 | # load point coordinates and pre-computed per-point local descriptors 133 | if self.descriptor == 'fcgf': 134 | src_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{src_id}_fcgf.npz") 135 | tgt_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{tgt_id}_fcgf.npz") 136 | src_keypts = src_data['xyz'] 137 | tgt_keypts = tgt_data['xyz'] 138 | src_features = src_data['feature'] 139 | tgt_features = tgt_data['feature'] 140 | 141 | src_keypts = torch.from_numpy(src_keypts.astype(np.float32)).cuda() 142 | tgt_keypts = torch.from_numpy(tgt_keypts.astype(np.float32)).cuda() 143 | src_features = torch.from_numpy(src_features.astype(np.float32)).cuda() 144 | tgt_features = torch.from_numpy(tgt_features.astype(np.float32)).cuda() 145 | gt_trans = torch.from_numpy(gt_trans.astype(np.float32)).cuda() 146 | 147 | elif self.descriptor == 'fpfh': 148 | src_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{src_id}_fpfh.npz") 149 | tgt_data = np.load(f"{self.root}/fragments/{scene}/cloud_bin_{tgt_id}_fpfh.npz") 150 | src_keypts = src_data['xyz'] 151 | tgt_keypts = tgt_data['xyz'] 152 | src_features = src_data['feature'] 153 | tgt_features = tgt_data['feature'] 154 | src_features = src_features / (np.linalg.norm(src_features, axis=1, keepdims=True) + 1e-6) 155 | tgt_features = tgt_features / (np.linalg.norm(tgt_features, axis=1, keepdims=True) + 1e-6) 156 | 157 | src_keypts = torch.from_numpy(src_keypts.astype(np.float32)).cuda() 158 | tgt_keypts = torch.from_numpy(tgt_keypts.astype(np.float32)).cuda() 159 | src_features = torch.from_numpy(src_features.astype(np.float32)).cuda() 160 | tgt_features = torch.from_numpy(tgt_features.astype(np.float32)).cuda() 161 | gt_trans = torch.from_numpy(gt_trans.astype(np.float32)).cuda() 162 | elif self.descriptor == "predator": 163 | data_dict = torch.load( 164 | f'{self.root}/{index}.pth') 165 | len_src = data_dict['len_src'] 166 | src_keypts = data_dict['pcd'][:len_src, :].cuda() 167 | tgt_keypts = data_dict['pcd'][len_src:, :].cuda() 168 | src_features = data_dict['feats'][:len_src].cuda() 169 | tgt_features = data_dict['feats'][len_src:].cuda() 170 | saliency, overlap = data_dict['saliency'], data_dict['overlaps'] 171 | src_overlap, src_saliency = overlap[:len_src], saliency[:len_src] 172 | tgt_overlap, tgt_saliency = overlap[len_src:], saliency[len_src:] 173 | src_scores = src_overlap * src_saliency 174 | tgt_scores = tgt_overlap * tgt_saliency 175 | if (src_keypts.size(0) > self.num_node): 176 | idx = np.arange(src_keypts.size(0)) 177 | probs = (src_scores / src_scores.sum()).numpy().flatten() 178 | idx = np.random.choice(idx, size=self.num_node, replace=False, p=probs) 179 | src_keypts, src_features = src_keypts[idx], src_features[idx] 180 | if (tgt_keypts.size(0) > self.num_node): 181 | idx = np.arange(tgt_keypts.size(0)) 182 | probs = (tgt_scores / tgt_scores.sum()).numpy().flatten() 183 | idx = np.random.choice(idx, size=self.num_node, replace=False, p=probs) 184 | tgt_keypts, tgt_features = tgt_keypts[idx], tgt_features[idx] 185 | gt_trans = integrate_trans(data_dict['rot'], data_dict['trans']).cuda() 186 | 187 | return src_keypts[None], tgt_keypts[None], src_features[None], tgt_features[None], gt_trans[None] 188 | 189 | 190 | def __len__(self): 191 | return len(self.infos['rot']) 192 | 193 | class KITTILoader(data.Dataset): 194 | def __init__(self, 195 | root, 196 | descriptor='fcgf', 197 | inlier_threshold=0.60, 198 | num_node=5000, 199 | use_mutual=True, 200 | downsample=0.30 201 | ): 202 | self.root = root 203 | self.descriptor = descriptor 204 | assert descriptor in ['fcgf', 'fpfh'] 205 | self.inlier_threshold = inlier_threshold 206 | self.num_node = num_node 207 | self.use_mutual = use_mutual 208 | self.downsample = downsample 209 | 210 | # containers 211 | self.ids_list = [] 212 | 213 | for filename in os.listdir(f"{self.root}/"): 214 | self.ids_list.append(os.path.join(f"{self.root}/", filename)) 215 | 216 | # self.ids_list = sorted(self.ids_list, key=lambda x: int(x.split('_')[-1].split('.')[0])) 217 | 218 | def get_data(self, index): 219 | # load meta data 220 | filename = self.ids_list[index] 221 | data = np.load(filename) 222 | src_keypts = data['xyz0'] 223 | tgt_keypts = data['xyz1'] 224 | src_features = data['features0'] 225 | tgt_features = data['features1'] 226 | if self.descriptor == 'fpfh': 227 | src_features = src_features / (np.linalg.norm(src_features, axis=1, keepdims=True) + 1e-6) 228 | tgt_features = tgt_features / (np.linalg.norm(tgt_features, axis=1, keepdims=True) + 1e-6) 229 | 230 | # compute ground truth transformation 231 | gt_trans = data['gt_trans'] 232 | 233 | return torch.from_numpy(src_keypts.astype(np.float32)).cuda()[None], \ 234 | torch.from_numpy(tgt_keypts.astype(np.float32)).cuda()[None], \ 235 | torch.from_numpy(src_features.astype(np.float32)).cuda()[None], \ 236 | torch.from_numpy(tgt_features.astype(np.float32)).cuda()[None], \ 237 | torch.from_numpy(gt_trans.astype(np.float32)).cuda()[None] 238 | 239 | def __len__(self): 240 | return len(self.ids_list) 241 | 242 | 243 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: SC2_PCR 2 | channels: 3 | - pytorch 4 | - open3d-admin 5 | - conda-forge 6 | - defaults 7 | dependencies: 8 | - _libgcc_mutex=0.1=main 9 | - absl-py=0.9.0=py37hc8dfbb8_1 10 | - argon2-cffi=20.1.0=py37h7b6447c_1 11 | - attrs=19.3.0=py_0 12 | - backcall=0.2.0=py_0 13 | - blas=1.0=mkl 14 | - bleach=3.1.5=py_0 15 | - blinker=1.4=py_1 16 | - brotlipy=0.7.0=py37h8f50634_1000 17 | - c-ares=1.16.1=h516909a_0 18 | - ca-certificates=2020.6.20=hecda079_0 19 | - cachetools=4.1.1=py_0 20 | - certifi=2020.6.20=py37hc8dfbb8_0 21 | - cffi=1.14.1=py37he30daa8_0 22 | - chardet=3.0.4=py37hc8dfbb8_1006 23 | - click=7.1.2=pyh9f0ad1d_0 24 | - cryptography=3.0=py37hb09aad4_0 25 | - cudatoolkit=10.1.243=h6bb024c_0 26 | - decorator=4.4.2=py_0 27 | - defusedxml=0.6.0=py_0 28 | - entrypoints=0.3=py37_0 29 | - freetype=2.10.2=h5ab3b9f_0 30 | - google-auth=1.20.1=py_0 31 | - google-auth-oauthlib=0.4.1=py_2 32 | - grpcio=1.31.0=py37hb0870dc_0 33 | - idna=2.10=pyh9f0ad1d_0 34 | - importlib-metadata=1.7.0=py37_0 35 | - importlib_metadata=1.7.0=0 36 | - intel-openmp=2020.1=217 37 | - ipykernel=5.3.4=py37h5ca1d4c_0 38 | - ipython=7.17.0=py37h39e3cac_0 39 | - ipython_genutils=0.2.0=py37_0 40 | - ipywidgets=7.5.1=py_0 41 | - jedi=0.17.2=py37_0 42 | - jinja2=2.11.2=py_0 43 | - joblib=0.16.0=py_0 44 | - jpeg=9b=h024ee3a_2 45 | - jsonschema=3.2.0=py37_1 46 | - jupyter_client=6.1.6=py_0 47 | - jupyter_core=4.6.3=py37_0 48 | - lcms2=2.11=h396b838_0 49 | - ld_impl_linux-64=2.33.1=h53a641e_7 50 | - libedit=3.1.20191231=h14c3975_1 51 | - libffi=3.3=he6710b0_2 52 | - libgcc-ng=9.1.0=hdf63c60_0 53 | - libgfortran-ng=7.3.0=hdf63c60_0 54 | - libpng=1.6.37=hbc83047_0 55 | - libprotobuf=3.12.4=h8b12597_0 56 | - libsodium=1.0.18=h7b6447c_0 57 | - libstdcxx-ng=9.1.0=hdf63c60_0 58 | - libtiff=4.1.0=h2733197_1 59 | - lz4-c=1.9.2=he6710b0_1 60 | - markdown=3.2.2=py_0 61 | - markupsafe=1.1.1=py37h14c3975_1 62 | - mistune=0.8.4=py37h14c3975_1001 63 | - mkl=2020.1=217 64 | - mkl-service=2.3.0=py37he904b0f_0 65 | - mkl_fft=1.1.0=py37h23d657b_0 66 | - mkl_random=1.1.1=py37h0573a6f_0 67 | - nbconvert=5.6.1=py37_1 68 | - nbformat=5.0.7=py_0 69 | - ncurses=6.2=he6710b0_1 70 | - ninja=1.10.0=py37hfd86e86_0 71 | - notebook=6.1.1=py37_0 72 | - numpy=1.19.1=py37hbc911f0_0 73 | - numpy-base=1.19.1=py37hfa32c7d_0 74 | - oauthlib=3.0.1=py_0 75 | - olefile=0.46=py37_0 76 | - open3d=0.9.0.0=py37_0 77 | - openssl=1.1.1g=h516909a_1 78 | - packaging=20.4=py_0 79 | - pandoc=2.10=0 80 | - pandocfilters=1.4.2=py37_1 81 | - parso=0.7.0=py_0 82 | - pexpect=4.8.0=py37_1 83 | - pickleshare=0.7.5=py37_1001 84 | - pillow=7.2.0=py37hb39fc2d_0 85 | - pip=20.2.1=py37_0 86 | - prometheus_client=0.8.0=py_0 87 | - prompt-toolkit=3.0.5=py_0 88 | - protobuf=3.12.4=py37h3340039_0 89 | - ptyprocess=0.6.0=py37_0 90 | - pyasn1=0.4.8=py_0 91 | - pyasn1-modules=0.2.7=py_0 92 | - pycparser=2.20=py_2 93 | - pygments=2.6.1=py_0 94 | - pyjwt=1.7.1=py_0 95 | - pyopenssl=19.1.0=py_1 96 | - pyparsing=2.4.7=py_0 97 | - pyrsistent=0.16.0=py37h7b6447c_0 98 | - pysocks=1.7.1=py37hc8dfbb8_1 99 | - python=3.7.7=hcff3b4d_5 100 | - python-dateutil=2.8.1=py_0 101 | - python_abi=3.7=1_cp37m 102 | - pytorch=1.6.0=py3.7_cuda10.1.243_cudnn7.6.3_0 103 | - pyzmq=19.0.1=py37he6710b0_1 104 | - readline=8.0=h7b6447c_0 105 | - requests=2.24.0=pyh9f0ad1d_0 106 | - requests-oauthlib=1.3.0=pyh9f0ad1d_0 107 | - rsa=4.6=pyh9f0ad1d_0 108 | - scikit-learn=0.23.1=py37h423224d_0 109 | - scipy=1.5.0=py37h0b6359f_0 110 | - send2trash=1.5.0=py37_0 111 | - setuptools=49.3.1=py37_0 112 | - six=1.15.0=py_0 113 | - sqlite=3.32.3=h62c20be_0 114 | - tensorboard=2.3.0=py_0 115 | - tensorboard-plugin-wit=1.6.0=pyh9f0ad1d_0 116 | - tensorboardx=2.1=py_0 117 | - terminado=0.8.3=py37_0 118 | - testpath=0.4.4=py_0 119 | - threadpoolctl=2.1.0=pyh5ca1d4c_0 120 | - tk=8.6.10=hbc83047_0 121 | - torchvision=0.7.0=py37_cu101 122 | - tornado=6.0.4=py37h7b6447c_1 123 | - traitlets=4.3.3=py37_0 124 | - urllib3=1.25.10=py_0 125 | - wcwidth=0.2.5=py_0 126 | - webencodings=0.5.1=py37_1 127 | - werkzeug=1.0.1=pyh9f0ad1d_0 128 | - wheel=0.34.2=py37_0 129 | - widgetsnbextension=3.5.1=py37_0 130 | - xz=5.2.5=h7b6447c_0 131 | - zeromq=4.3.2=he6710b0_2 132 | - zipp=3.1.0=py_0 133 | - zlib=1.2.11=h7b6447c_3 134 | - zstd=1.4.5=h9ceee32_0 135 | - pip: 136 | - easydict==1.9 137 | - tqdm==4.48.2 138 | prefix: ~/anaconda3/envs/SC2_PCR 139 | 140 | -------------------------------------------------------------------------------- /evaluate_metric.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import numpy as np 5 | from sklearn.metrics import recall_score, precision_score, f1_score 6 | from utils.SE3 import * 7 | import warnings 8 | 9 | warnings.filterwarnings('ignore') 10 | 11 | 12 | class TransformationLoss(nn.Module): 13 | def __init__(self, re_thre=15, te_thre=30): 14 | super(TransformationLoss, self).__init__() 15 | self.re_thre = re_thre # rotation error threshold (deg) 16 | self.te_thre = te_thre # translation error threshold (cm) 17 | 18 | # def forward(self, trans, gt_trans, src_keypts, tgt_keypts, probs): 19 | def forward(self, trans, gt_trans, src_keypts, tgt_keypts, probs): 20 | """ 21 | Transformation Loss 22 | Inputs: 23 | - trans: [bs, 4, 4] SE3 transformation matrices 24 | - gt_trans: [bs, 4, 4] ground truth SE3 transformation matrices 25 | - src_keypts: [bs, num_corr, 3] 26 | - tgt_keypts: [bs, num_corr, 3] 27 | - probs: [bs, num_corr] predicted inlier probability 28 | Outputs: 29 | - loss transformation loss 30 | - recall registration recall (re < re_thre & te < te_thre) 31 | - RE rotation error 32 | - TE translation error 33 | - RMSE RMSE under the predicted transformation 34 | """ 35 | bs = trans.shape[0] 36 | R, t = decompose_trans(trans) 37 | gt_R, gt_t = decompose_trans(gt_trans) 38 | 39 | recall = 0 40 | RE = torch.tensor(0.0).to(trans.device) 41 | TE = torch.tensor(0.0).to(trans.device) 42 | RMSE = torch.tensor(0.0).to(trans.device) 43 | loss = torch.tensor(0.0).to(trans.device) 44 | for i in range(bs): 45 | re = torch.acos(torch.clamp((torch.trace(R[i].T @ gt_R[i]) - 1) / 2.0, min=-1, max=1)) 46 | te = torch.sqrt(torch.sum((t[i] - gt_t[i]) ** 2)) 47 | warp_src_keypts = transform(src_keypts[i], trans[i]) 48 | rmse = torch.norm(warp_src_keypts - tgt_keypts, dim=-1).mean() 49 | re = re * 180 / np.pi 50 | te = te * 100 51 | if te < self.te_thre and re < self.re_thre: 52 | recall += 1 53 | RE += re 54 | TE += te 55 | RMSE += rmse 56 | 57 | pred_inliers = torch.where(probs[i] > 0)[0] 58 | if len(pred_inliers) < 1: 59 | loss += torch.tensor(0.0).to(trans.device) 60 | else: 61 | warp_src_keypts = transform(src_keypts[i], trans[i]) 62 | loss += ((warp_src_keypts - tgt_keypts)**2).sum(-1).mean() 63 | 64 | return loss / bs, recall * 100.0 / bs, RE / bs, TE / bs, RMSE / bs 65 | 66 | class ClassificationLoss(nn.Module): 67 | def __init__(self, balanced=True): 68 | super(ClassificationLoss, self).__init__() 69 | self.balanced = balanced 70 | 71 | def forward(self, pred, gt, weight=None): 72 | """ 73 | Classification Loss for the inlier confidence 74 | Inputs: 75 | - pred: [bs, num_corr] predicted logits/labels for the putative correspondences 76 | - gt: [bs, num_corr] ground truth labels 77 | Outputs:(dict) 78 | - loss (weighted) BCE loss for inlier confidence 79 | - precision: inlier precision (# kept inliers / # kepts matches) 80 | - recall: inlier recall (# kept inliers / # all inliers) 81 | - f1: (precision * recall * 2) / (precision + recall) 82 | - logits_true: average logits for inliers 83 | - logits_false: average logits for outliers 84 | """ 85 | num_pos = torch.relu(torch.sum(gt) - 1) + 1 86 | num_neg = torch.relu(torch.sum(1 - gt) - 1) + 1 87 | if weight is not None: 88 | loss = nn.BCEWithLogitsLoss(reduction='none')(pred, gt.float()) 89 | loss = torch.mean(loss * weight) 90 | elif self.balanced is False: 91 | loss = nn.BCEWithLogitsLoss(reduction='mean')(pred, gt.float()) 92 | else: 93 | loss = nn.BCEWithLogitsLoss(pos_weight=num_neg * 1.0 / num_pos, reduction='mean')(pred, gt.float()) 94 | 95 | # compute precision, recall, f1 96 | pred_labels = pred > 0 97 | gt, pred_labels, pred = gt.detach().cpu().numpy(), pred_labels.detach().cpu().numpy(), pred.detach().cpu().numpy() 98 | precision = precision_score(gt[0], pred_labels[0]) 99 | recall = recall_score(gt[0], pred_labels[0]) 100 | f1 = f1_score(gt[0], pred_labels[0]) 101 | mean_logit_true = np.sum(pred * gt) / max(1, np.sum(gt)) 102 | mean_logit_false = np.sum(pred * (1 - gt)) / max(1, np.sum(1 - gt)) 103 | 104 | eval_stats = { 105 | "loss": loss, 106 | "precision": float(precision), 107 | "recall": float(recall), 108 | "f1": float(f1), 109 | "logit_true": float(mean_logit_true), 110 | "logit_false": float(mean_logit_false) 111 | } 112 | return eval_stats 113 | 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /figures/pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ZhiChen902/SC2-PCR/19961eae0a4789442652bfd681589115dc6f0570/figures/pipeline.png -------------------------------------------------------------------------------- /test_3DLoMatch.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | sys.path.append('.') 4 | import argparse 5 | import logging 6 | from tqdm import tqdm 7 | from easydict import EasyDict as edict 8 | from evaluate_metric import TransformationLoss, ClassificationLoss 9 | from dataset import ThreeDLoMatchLoader 10 | from benchmark_utils import set_seed, icp_refine 11 | from benchmark_utils_predator import * 12 | from utils.timer import Timer 13 | from SC2_PCR import Matcher 14 | set_seed() 15 | from utils.SE3 import * 16 | from collections import defaultdict 17 | 18 | def eval_3DLoMatch_scene(loader, matcher, trans_evaluator, cls_evaluator, scene_ind, config): 19 | num_pair = loader.__len__() 20 | final_poses = np.zeros([num_pair, 4, 4]) 21 | 22 | # 0.success, 1.RE, 2.TE, 3.input inlier number, 4.input inlier ratio, 5. output inlier number 23 | # 6. output inlier precision, 7. output inlier recall, 8. output inlier F1 score 9. model_time, 10. data_time 11. scene_ind 24 | stats = np.zeros([num_pair, 12]) 25 | 26 | data_timer, model_timer = Timer(), Timer() 27 | with torch.no_grad(): 28 | error_pair = [] 29 | for i in tqdm(range(num_pair)): 30 | ################################# 31 | # 1. load data 32 | ################################# 33 | data_timer.tic() 34 | src_keypts, tgt_keypts, src_features, tgt_features, gt_trans = loader.get_data(i) 35 | data_time = data_timer.toc() 36 | 37 | ################################# 38 | # 2. match descriptor and compute rigid transformation 39 | ################################# 40 | model_timer.tic() 41 | pred_trans, pred_labels, src_keypts_corr, tgt_keypts_corr = matcher.estimator(src_keypts, tgt_keypts, 42 | src_features, tgt_features) 43 | model_time = model_timer.toc() 44 | 45 | ################################# 46 | # 3. generate the ground-truth classification result 47 | ################################# 48 | frag1_warp = transform(src_keypts_corr, gt_trans) 49 | distance = torch.sum((frag1_warp - tgt_keypts_corr) ** 2, dim=-1) ** 0.5 50 | gt_labels = (distance < config.inlier_threshold).float() 51 | 52 | ################################# 53 | # 4. evaluate result 54 | ################################# 55 | loss, recall, Re, Te, rmse = trans_evaluator(pred_trans, gt_trans, src_keypts_corr, tgt_keypts_corr, 56 | pred_labels) 57 | class_stats = cls_evaluator(pred_labels, gt_labels) 58 | 59 | ################################# 60 | # record the evaluation results. 61 | ################################# 62 | # save statistics 63 | stats[i, 0] = float(recall / 100.0) # success 64 | stats[i, 1] = float(Re) # Re (deg) 65 | stats[i, 2] = float(Te) # Te (cm) 66 | stats[i, 3] = int(torch.sum(gt_labels)) # input inlier number 67 | stats[i, 4] = float(torch.mean(gt_labels.float())) # input inlier ratio 68 | stats[i, 5] = int(torch.sum(gt_labels[pred_labels > 0])) # output inlier number 69 | stats[i, 6] = float(class_stats['precision']) # output inlier precision 70 | stats[i, 7] = float(class_stats['recall']) # output inlier recall 71 | stats[i, 8] = float(class_stats['f1']) # output inlier f1 score 72 | stats[i, 9] = model_time 73 | stats[i, 10] = data_time 74 | stats[i, 11] = scene_ind 75 | final_poses[i] = pred_trans[0].detach().cpu().numpy() 76 | print(error_pair) 77 | 78 | return stats, final_poses 79 | 80 | 81 | def eval_3DLoMatch(config): 82 | loader = ThreeDLoMatchLoader(root=config.data_path, 83 | descriptor=config.descriptor, 84 | inlier_threshold=config.inlier_threshold, 85 | num_node=config.num_node, 86 | use_mutual=config.use_mutual, 87 | ) 88 | matcher = Matcher(inlier_threshold=config.inlier_threshold, 89 | num_node=config.num_node, 90 | use_mutual=config.use_mutual, 91 | d_thre=config.d_thre, 92 | num_iterations=config.num_iterations, 93 | ratio=config.ratio, 94 | nms_radius=config.nms_radius, 95 | max_points=config.max_points, 96 | k1=config.k1, 97 | k2=config.k2, ) 98 | trans_evaluator = TransformationLoss(re_thre=config.re_thre, te_thre=config.te_thre) 99 | cls_evaluator = ClassificationLoss() 100 | 101 | allpair_stats, allpair_poses = eval_3DLoMatch_scene(loader, matcher, trans_evaluator, cls_evaluator, 0, config) 102 | 103 | allpair_average = allpair_stats.mean(0) 104 | allpair_status_ndarray = np.array(allpair_stats, dtype=float) 105 | 106 | benchmark_predator(allpair_poses, gt_folder='benchmarks/3DLoMatch') 107 | 108 | # benchmarking using the registration recall defined in DGR 109 | allpair_average = allpair_stats.mean(0) 110 | correct_pair_average = allpair_stats[allpair_stats[:, 0] == 1].mean(0) 111 | logging.info(f"*" * 40) 112 | logging.info(f"All {allpair_stats.shape[0]} pairs, Mean Reg Recall={allpair_average[0] * 100:.2f}%, Mean Re={correct_pair_average[1]:.2f}, Mean Te={correct_pair_average[2]:.2f}") 113 | logging.info(f"\tInput: Mean Inlier Num={allpair_average[3]:.2f}(ratio={allpair_average[4] * 100:.2f}%)") 114 | logging.info(f"\tOutput: Mean Inlier Num={allpair_average[5]:.2f}(precision={allpair_average[6] * 100:.2f}%, recall={allpair_average[7] * 100:.2f}%, f1={allpair_average[8] * 100:.2f}%)") 115 | logging.info(f"\tMean model time: {allpair_average[9]:.2f}s, Mean data time: {allpair_average[10]:.2f}s") 116 | 117 | # all_stats_npy = np.concatenate([v for k, v in all_stats.items()], axis=0) 118 | 119 | return allpair_stats 120 | 121 | 122 | def benchmark_predator(pred_poses, gt_folder): 123 | scenes = sorted(os.listdir(gt_folder)) 124 | scene_names = [os.path.join(gt_folder,ele) for ele in scenes] 125 | 126 | re_per_scene = defaultdict(list) 127 | te_per_scene = defaultdict(list) 128 | re_all, te_all, precision, recall = [], [], [], [] 129 | n_valids= [] 130 | 131 | short_names=['Kitchen','Home 1','Home 2','Hotel 1','Hotel 2','Hotel 3','Study','MIT Lab'] 132 | logging.info(("Scene\t¦ prec.\t¦ rec.\t¦ re\t¦ te\t¦ samples\t¦")) 133 | 134 | start_ind = 0 135 | for idx,scene in enumerate(scene_names): 136 | # ground truth info 137 | gt_pairs, gt_traj = read_trajectory(os.path.join(scene, "gt.log")) 138 | n_valid=0 139 | for ele in gt_pairs: 140 | diff=abs(int(ele[0])-int(ele[1])) 141 | n_valid+=diff>1 142 | n_valids.append(n_valid) 143 | 144 | n_fragments, gt_traj_cov = read_trajectory_info(os.path.join(scene,"gt.info")) 145 | 146 | # estimated info 147 | # est_pairs, est_traj = read_trajectory(os.path.join(est_folder,scenes[idx],'est.log')) 148 | est_traj = pred_poses[start_ind:start_ind + len(gt_pairs)] 149 | start_ind = start_ind + len(gt_pairs) 150 | 151 | temp_precision, temp_recall,c_flag = evaluate_registration(n_fragments, est_traj, gt_pairs, gt_pairs, gt_traj, gt_traj_cov) 152 | 153 | # Filter out the estimated rotation matrices 154 | ext_gt_traj = extract_corresponding_trajectors(gt_pairs,gt_pairs, gt_traj) 155 | 156 | re = rotation_error(torch.from_numpy(ext_gt_traj[:,0:3,0:3]), torch.from_numpy(est_traj[:,0:3,0:3])).cpu().numpy()[np.array(c_flag)==0] 157 | te = translation_error(torch.from_numpy(ext_gt_traj[:,0:3,3:4]), torch.from_numpy(est_traj[:,0:3,3:4])).cpu().numpy()[np.array(c_flag)==0] 158 | 159 | re_per_scene['mean'].append(np.mean(re)) 160 | re_per_scene['median'].append(np.median(re)) 161 | re_per_scene['min'].append(np.min(re)) 162 | re_per_scene['max'].append(np.max(re)) 163 | 164 | te_per_scene['mean'].append(np.mean(te)) 165 | te_per_scene['median'].append(np.median(te)) 166 | te_per_scene['min'].append(np.min(te)) 167 | te_per_scene['max'].append(np.max(te)) 168 | 169 | 170 | re_all.extend(re.reshape(-1).tolist()) 171 | te_all.extend(te.reshape(-1).tolist()) 172 | 173 | precision.append(temp_precision) 174 | recall.append(temp_recall) 175 | 176 | logging.info("{}\t¦ {:.3f}\t¦ {:.3f}\t¦ {:.3f}\t¦ {:.3f}\t¦ {:3d}¦".format(short_names[idx], temp_precision, temp_recall, np.median(re), np.median(te), n_valid)) 177 | # np.save(f'{est_folder}/{scenes[idx]}/flag.npy',c_flag) 178 | 179 | weighted_precision = (np.array(n_valids) * np.array(precision)).sum() / np.sum(n_valids) 180 | 181 | logging.info("Mean precision: {:.3f}: +- {:.3f}".format(np.mean(precision),np.std(precision))) 182 | logging.info("Weighted precision: {:.3f}".format(weighted_precision)) 183 | 184 | logging.info("Mean median RRE: {:.3f}: +- {:.3f}".format(np.mean(re_per_scene['median']), np.std(re_per_scene['median']))) 185 | logging.info("Mean median RTE: {:.3F}: +- {:.3f}".format(np.mean(te_per_scene['median']),np.std(te_per_scene['median']))) 186 | 187 | 188 | if __name__ == '__main__': 189 | from config import str2bool 190 | 191 | parser = argparse.ArgumentParser() 192 | parser.add_argument('--config_path', default='', type=str, help='snapshot dir') 193 | parser.add_argument('--solver', default='SVD', type=str, choices=['SVD', 'RANSAC']) 194 | parser.add_argument('--use_icp', default=False, type=str2bool) 195 | parser.add_argument('--save_npy', default=False, type=str2bool) 196 | args = parser.parse_args() 197 | 198 | config_path = args.config_path 199 | config = json.load(open(config_path, 'r')) 200 | config = edict(config) 201 | 202 | import os 203 | os.environ['CUDA_VISIBLE_DEVICES'] = config.CUDA_Devices 204 | if not os.path.exists("./logs"): 205 | os.makedirs("./logs") 206 | 207 | log_filename = f'logs/3DLoMatch-{config.descriptor}.log' 208 | logging.basicConfig(level=logging.INFO, 209 | filename=log_filename, 210 | filemode='a', 211 | format="") 212 | logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) 213 | 214 | # evaluate on the test set 215 | stats = eval_3DLoMatch(config) 216 | if args.save_npy: 217 | save_path = log_filename.replace('.log', '.npy') 218 | np.save(save_path, stats) 219 | print(f"Save the stats in {save_path}") 220 | -------------------------------------------------------------------------------- /test_3DMatch.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | sys.path.append('.') 4 | import argparse 5 | import logging 6 | import torch 7 | import numpy as np 8 | from tqdm import tqdm 9 | from easydict import EasyDict as edict 10 | from evaluate_metric import TransformationLoss, ClassificationLoss 11 | from dataset import ThreeDLoader 12 | from benchmark_utils import set_seed, icp_refine 13 | from utils.timer import Timer 14 | from SC2_PCR import Matcher 15 | set_seed() 16 | from utils.SE3 import transform 17 | 18 | def eval_3DMatch_scene(loader, matcher, trans_evaluator, cls_evaluator, scene, scene_ind, config, use_icp): 19 | """ 20 | Evaluate our model on 3DMatch testset [scene] 21 | """ 22 | num_pair = loader.__len__() 23 | 24 | # 0.success, 1.RE, 2.TE, 3.input inlier number, 4.input inlier ratio, 5. output inlier number 25 | # 6. output inlier precision, 7. output inlier recall, 8. output inlier F1 score 9. model_time, 10. data_time 11. scene_ind 26 | stats = np.zeros([num_pair, 12]) 27 | 28 | data_timer, model_timer = Timer(), Timer() 29 | with torch.no_grad(): 30 | error_pair = [] 31 | for i in tqdm(range(num_pair)): 32 | ################################# 33 | # 1. load data 34 | ################################# 35 | data_timer.tic() 36 | src_keypts, tgt_keypts, src_features, tgt_features, gt_trans = loader.get_data(i) 37 | data_time = data_timer.toc() 38 | 39 | ################################# 40 | # 2. match descriptor and compute rigid transformation 41 | ################################# 42 | model_timer.tic() 43 | pred_trans, pred_labels, src_keypts_corr, tgt_keypts_corr = matcher.estimator(src_keypts, tgt_keypts, src_features, tgt_features) 44 | model_time = model_timer.toc() 45 | 46 | ################################# 47 | # 3. generate the ground-truth classification result 48 | ################################# 49 | frag1_warp = transform(src_keypts_corr, gt_trans) 50 | distance = torch.sum((frag1_warp - tgt_keypts_corr) ** 2, dim = -1) ** 0.5 51 | gt_labels = (distance < config.inlier_threshold).float() 52 | 53 | ################################# 54 | # 4. evaluate result 55 | ################################# 56 | loss, recall, Re, Te, rmse = trans_evaluator(pred_trans, gt_trans, src_keypts_corr, tgt_keypts_corr, pred_labels) 57 | class_stats = cls_evaluator(pred_labels, gt_labels) 58 | 59 | ################################# 60 | # 5. svae the result 61 | ################################# 62 | stats[i, 0] = float(recall / 100.0) # success 63 | stats[i, 1] = float(Re) # Re (deg) 64 | stats[i, 2] = float(Te) # Te (cm) 65 | stats[i, 3] = int(torch.sum(gt_labels)) # input inlier number 66 | stats[i, 4] = float(torch.mean(gt_labels.float())) # input inlier ratio 67 | stats[i, 5] = int(torch.sum(gt_labels[pred_labels > 0])) # output inlier number 68 | stats[i, 6] = float(class_stats['precision']) # output inlier precision 69 | stats[i, 7] = float(class_stats['recall']) # output inlier recall 70 | stats[i, 8] = float(class_stats['f1']) # output inlier f1 score 71 | stats[i, 9] = model_time 72 | stats[i, 10] = data_time 73 | stats[i, 11] = scene_ind 74 | print(error_pair) 75 | 76 | return stats 77 | 78 | 79 | def eval_3DMatch(config, use_icp): 80 | """ 81 | Collect the evaluation results on each scene of 3DMatch testset, write the result to a .log file. 82 | """ 83 | scene_list = [ 84 | '7-scenes-redkitchen', 85 | 'sun3d-home_at-home_at_scan1_2013_jan_1', 86 | 'sun3d-home_md-home_md_scan9_2012_sep_30', 87 | 'sun3d-hotel_uc-scan3', 88 | 'sun3d-hotel_umd-maryland_hotel1', 89 | 'sun3d-hotel_umd-maryland_hotel3', 90 | 'sun3d-mit_76_studyroom-76-1studyroom2', 91 | 'sun3d-mit_lab_hj-lab_hj_tea_nov_2_2012_scan1_erika' 92 | ] 93 | all_stats = {} 94 | for scene_ind, scene in enumerate(scene_list): 95 | loader = ThreeDLoader(root=config.data_path, 96 | descriptor=config.descriptor, 97 | inlier_threshold=config.inlier_threshold, 98 | num_node=config.num_node, 99 | use_mutual=config.use_mutual, 100 | select_scene=scene, 101 | ) 102 | matcher = Matcher(inlier_threshold=config.inlier_threshold, 103 | num_node=config.num_node, 104 | use_mutual=config.use_mutual, 105 | d_thre=config.d_thre, 106 | num_iterations=config.num_iterations, 107 | ratio=config.ratio, 108 | nms_radius=config.nms_radius, 109 | max_points=config.max_points, 110 | k1=config.k1, 111 | k2=config.k2,) 112 | trans_evaluator = TransformationLoss(re_thre=config.re_thre, te_thre=config.te_thre) 113 | cls_evaluator = ClassificationLoss() 114 | 115 | scene_stats = eval_3DMatch_scene(loader, matcher, trans_evaluator, cls_evaluator, scene, scene_ind, config, use_icp) 116 | all_stats[scene] = scene_stats 117 | logging.info(f"Max memory allicated: {torch.cuda.max_memory_allocated() / 1024 ** 3:.2f}GB") 118 | 119 | # result for each scene 120 | scene_vals = np.zeros([len(scene_list), 12]) 121 | scene_ind = 0 122 | for scene, stats in all_stats.items(): 123 | correct_pair = np.where(stats[:, 0] == 1) 124 | scene_vals[scene_ind] = stats.mean(0) 125 | # for Re and Te, we only average over the successfully matched pairs. 126 | scene_vals[scene_ind, 1] = stats[correct_pair].mean(0)[1] 127 | scene_vals[scene_ind, 2] = stats[correct_pair].mean(0)[2] 128 | logging.info(f"Scene {scene_ind}th:" 129 | f" Reg Recall={scene_vals[scene_ind, 0] * 100:.2f}% " 130 | f" Mean RE={scene_vals[scene_ind, 1]:.2f} " 131 | f" Mean TE={scene_vals[scene_ind, 2]:.2f} " 132 | f" Mean Precision={scene_vals[scene_ind, 6] * 100:.2f}% " 133 | f" Mean Recall={scene_vals[scene_ind, 7] * 100:.2f}% " 134 | f" Mean F1={scene_vals[scene_ind, 8] * 100:.2f}%" 135 | ) 136 | scene_ind += 1 137 | 138 | # scene level average 139 | average = scene_vals.mean(0) 140 | logging.info(f"All {len(scene_list)} scenes, Mean Reg Recall={average[0] * 100:.2f}%, Mean Re={average[1]:.2f}, Mean Te={average[2]:.2f}") 141 | logging.info(f"\tInput: Mean Inlier Num={average[3]:.2f}(ratio={average[4] * 100:.2f}%)") 142 | logging.info(f"\tOutput: Mean Inlier Num={average[5]:.2f}(precision={average[6] * 100:.2f}%, recall={average[7] * 100:.2f}%, f1={average[8] * 100:.2f}%)") 143 | logging.info(f"\tMean model time: {average[9]:.2f}s, Mean data time: {average[10]:.2f}s") 144 | 145 | # pair level average 146 | stats_list = [stats for _, stats in all_stats.items()] 147 | allpair_stats = np.concatenate(stats_list, axis=0) 148 | allpair_average = allpair_stats.mean(0) 149 | 150 | correct_pair_average = allpair_stats[allpair_stats[:, 0] == 1].mean(0) 151 | logging.info(f"*" * 40) 152 | logging.info(f"All {allpair_stats.shape[0]} pairs, Mean Reg Recall={allpair_average[0] * 100:.2f}%, Mean Re={correct_pair_average[1]:.2f}, Mean Te={correct_pair_average[2]:.2f}") 153 | logging.info(f"\tInput: Mean Inlier Num={allpair_average[3]:.2f}(ratio={allpair_average[4] * 100:.2f}%)") 154 | logging.info(f"\tOutput: Mean Inlier Num={allpair_average[5]:.2f}(precision={allpair_average[6] * 100:.2f}%, recall={allpair_average[7] * 100:.2f}%, f1={allpair_average[8] * 100:.2f}%)") 155 | logging.info(f"\tMean model time: {allpair_average[9]:.2f}s, Mean data time: {allpair_average[10]:.2f}s") 156 | 157 | all_stats_npy = np.concatenate([v for k, v in all_stats.items()], axis=0) 158 | return all_stats_npy 159 | 160 | 161 | if __name__ == '__main__': 162 | from config import str2bool 163 | 164 | parser = argparse.ArgumentParser() 165 | parser.add_argument('--config_path', default='', type=str, help='snapshot dir') 166 | parser.add_argument('--solver', default='SVD', type=str, choices=['SVD', 'RANSAC']) 167 | parser.add_argument('--use_icp', default=False, type=str2bool) 168 | parser.add_argument('--save_npy', default=False, type=str2bool) 169 | args = parser.parse_args() 170 | 171 | config_path = args.config_path 172 | config = json.load(open(config_path, 'r')) 173 | config = edict(config) 174 | 175 | import os 176 | os.environ['CUDA_VISIBLE_DEVICES'] = config.CUDA_Devices 177 | if not os.path.exists("./logs"): 178 | os.makedirs("./logs") 179 | log_filename = f'logs/3DMatch-{config.descriptor}.log' 180 | logging.basicConfig(level=logging.INFO, 181 | filename=log_filename, 182 | filemode='a', 183 | format="") 184 | logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) 185 | 186 | # evaluate on the test set 187 | stats = eval_3DMatch(config, args.use_icp) 188 | if args.save_npy: 189 | save_path = log_filename.replace('.log', '.npy') 190 | np.save(save_path, stats) 191 | print(f"Save the stats in {save_path}") 192 | -------------------------------------------------------------------------------- /test_KITTI.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | sys.path.append('.') 4 | import argparse 5 | import logging 6 | from tqdm import tqdm 7 | from easydict import EasyDict as edict 8 | from evaluate_metric import TransformationLoss, ClassificationLoss 9 | from dataset import KITTILoader 10 | from benchmark_utils import set_seed, icp_refine 11 | from benchmark_utils_predator import * 12 | from utils.timer import Timer 13 | from SC2_PCR import Matcher 14 | set_seed() 15 | from utils.SE3 import * 16 | 17 | 18 | def eval_KITTI_per_pair(loader, matcher, trans_evaluator, cls_evaluator, config): 19 | """ 20 | Evaluate our model on KITTI testset. 21 | """ 22 | num_pair = loader.__len__() 23 | # 0.success, 1.RE, 2.TE, 3.input inlier number, 4.input inlier ratio, 5. output inlier number 24 | # 6. output inlier precision, 7. output inlier recall, 8. output inlier F1 score 9. model_time, 10. data_time 11. scene_ind 25 | stats = np.zeros([num_pair, 12]) 26 | 27 | data_timer, model_timer = Timer(), Timer() 28 | with torch.no_grad(): 29 | for i in tqdm(range(num_pair)): 30 | ################################# 31 | # 1. load data 32 | ################################# 33 | data_timer.tic() 34 | src_keypts, tgt_keypts, src_features, tgt_features, gt_trans = loader.get_data(i) 35 | data_time = data_timer.toc() 36 | 37 | ################################# 38 | # 2. match descriptor and compute rigid transformation 39 | ################################# 40 | model_timer.tic() 41 | pred_trans, pred_labels, src_keypts_corr, tgt_keypts_corr = matcher.estimator(src_keypts, tgt_keypts, 42 | src_features, 43 | tgt_features) 44 | model_time = model_timer.toc() 45 | 46 | ################################# 47 | # 3. generate the ground-truth classification result 48 | ################################# 49 | frag1_warp = transform(src_keypts_corr, gt_trans) 50 | distance = torch.sum((frag1_warp - tgt_keypts_corr) ** 2, dim=-1) ** 0.5 51 | gt_labels = (distance < config.inlier_threshold).float() 52 | 53 | ################################# 54 | # 4. evaluate result 55 | ################################# 56 | loss, recall, Re, Te, rmse = trans_evaluator(pred_trans, gt_trans, src_keypts_corr, tgt_keypts_corr, 57 | pred_labels) 58 | class_stats = cls_evaluator(pred_labels, gt_labels) 59 | 60 | # save statistics 61 | stats[i, 0] = float(recall / 100.0) # success 62 | stats[i, 1] = float(Re) # Re (deg) 63 | stats[i, 2] = float(Te) # Te (cm) 64 | stats[i, 3] = int(torch.sum(gt_labels)) # input inlier number 65 | stats[i, 4] = float(torch.mean(gt_labels.float())) # input inlier ratio 66 | stats[i, 5] = int(torch.sum(gt_labels[pred_labels > 0])) # output inlier number 67 | stats[i, 6] = float(class_stats['precision']) # output inlier precision 68 | stats[i, 7] = float(class_stats['recall']) # output inlier recall 69 | stats[i, 8] = float(class_stats['f1']) # output inlier f1 score 70 | stats[i, 9] = model_time 71 | stats[i, 10] = data_time 72 | stats[i, 11] = -1 73 | 74 | if recall == 0: 75 | from benchmark_utils import rot_to_euler 76 | R_gt, t_gt = gt_trans[0][:3, :3], gt_trans[0][:3, -1] 77 | euler = rot_to_euler(R_gt.detach().cpu().numpy()) 78 | 79 | input_ir = float(torch.mean(gt_labels.float())) 80 | input_i = int(torch.sum(gt_labels)) 81 | output_i = int(torch.sum(gt_labels[pred_labels > 0])) 82 | logging.info(f"Pair {i}, GT Rot: {euler[0]:.2f}, {euler[1]:.2f}, {euler[2]:.2f}, Trans: {t_gt[0]:.2f}, {t_gt[1]:.2f}, {t_gt[2]:.2f}, RE: {float(Re):.2f}, TE: {float(Te):.2f}") 83 | logging.info((f"\tInput Inlier Ratio :{input_ir*100:.2f}%(#={input_i}), Output: IP={float(class_stats['precision'])*100:.2f}%(#={output_i}) IR={float(class_stats['recall'])*100:.2f}%")) 84 | 85 | return stats 86 | 87 | def eval_KITTI(config): 88 | loader = KITTILoader(root=config.data_path, 89 | descriptor=config.descriptor, 90 | inlier_threshold=config.inlier_threshold, 91 | num_node=config.num_node, 92 | use_mutual=config.use_mutual, 93 | ) 94 | matcher = Matcher(inlier_threshold=config.inlier_threshold, 95 | num_node=config.num_node, 96 | use_mutual=config.use_mutual, 97 | d_thre=config.d_thre, 98 | num_iterations=config.num_iterations, 99 | ratio=config.ratio, 100 | nms_radius=config.nms_radius, 101 | max_points=config.max_points, 102 | k1=config.k1, 103 | k2=config.k2, ) 104 | trans_evaluator = TransformationLoss(re_thre=config.re_thre, te_thre=config.te_thre) 105 | cls_evaluator = ClassificationLoss() 106 | 107 | stats = eval_KITTI_per_pair(loader, matcher, trans_evaluator, cls_evaluator, config) 108 | logging.info(f"Max memory allicated: {torch.cuda.max_memory_allocated() / 1024 ** 3:.2f}GB") 109 | 110 | # pair level average 111 | allpair_stats = stats 112 | allpair_average = allpair_stats.mean(0) 113 | correct_pair_average = allpair_stats[allpair_stats[:, 0] == 1].mean(0) 114 | logging.info(f"*"*40) 115 | logging.info(f"All {allpair_stats.shape[0]} pairs, Mean Success Rate={allpair_average[0]*100:.2f}%, Mean Re={correct_pair_average[1]:.2f}, Mean Te={correct_pair_average[2]:.2f}") 116 | logging.info(f"\tInput: Mean Inlier Num={allpair_average[3]:.2f}(ratio={allpair_average[4]*100:.2f}%)") 117 | logging.info(f"\tOutput: Mean Inlier Num={allpair_average[5]:.2f}(precision={allpair_average[6]*100:.2f}%, recall={allpair_average[7]*100:.2f}%, f1={allpair_average[8]*100:.2f}%)") 118 | logging.info(f"\tMean model time: {allpair_average[9]:.2f}s, Mean data time: {allpair_average[10]:.2f}s") 119 | 120 | return allpair_stats 121 | 122 | if __name__ == '__main__': 123 | from config import str2bool 124 | parser = argparse.ArgumentParser() 125 | parser.add_argument('--config_path', default='', type=str, help='snapshot dir') 126 | parser.add_argument('--solver', default='SVD', type=str, choices=['SVD', 'RANSAC']) 127 | parser.add_argument('--use_icp', default=False, type=str2bool) 128 | parser.add_argument('--save_npz', default=False, type=str2bool) 129 | args = parser.parse_args() 130 | 131 | config_path = args.config_path 132 | config = json.load(open(config_path, 'r')) 133 | config = edict(config) 134 | 135 | import os 136 | os.environ['CUDA_VISIBLE_DEVICES'] = config.CUDA_Devices 137 | if not os.path.exists("./logs"): 138 | os.makedirs("./logs") 139 | log_filename = f'logs/KITTI-{config.descriptor}.log' 140 | logging.basicConfig(level=logging.INFO, 141 | filename=log_filename, 142 | filemode='a', 143 | format="") 144 | logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) 145 | 146 | stats = eval_KITTI(config) 147 | 148 | if args.save_npz: 149 | save_path = log_filename.replace('.log', '.npy') 150 | np.save(save_path, stats) 151 | print(f"Save the stats in {save_path}") 152 | -------------------------------------------------------------------------------- /utils/SE3.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import random 3 | import numpy as np 4 | 5 | def rotation_matrix(num_axis, augment_rotation): 6 | """ 7 | Sample rotation matrix along [num_axis] axis and [0 - augment_rotation] angle 8 | Input 9 | - num_axis: rotate along how many axis 10 | - augment_rotation: rotate by how many angle 11 | Output 12 | - R: [3, 3] rotation matrix 13 | """ 14 | assert num_axis == 1 or num_axis == 3 or num_axis == 0 15 | if num_axis == 0: 16 | return np.eye(3) 17 | angles = np.random.rand(3) * 2 * np.pi * augment_rotation 18 | Rx = np.array([[1, 0, 0], 19 | [0, np.cos(angles[0]), -np.sin(angles[0])], 20 | [0, np.sin(angles[0]), np.cos(angles[0])]]) 21 | Ry = np.array([[np.cos(angles[1]), 0, np.sin(angles[1])], 22 | [0, 1, 0], 23 | [-np.sin(angles[1]), 0, np.cos(angles[1])]]) 24 | Rz = np.array([[np.cos(angles[2]), -np.sin(angles[2]), 0], 25 | [np.sin(angles[2]), np.cos(angles[2]), 0], 26 | [0, 0, 1]]) 27 | # R = Rx @ Ry @ Rz 28 | if num_axis == 1: 29 | return random.choice([Rx, Ry, Rz]) 30 | return Rx @ Ry @ Rz 31 | 32 | def translation_matrix(augment_translation): 33 | """ 34 | Sample translation matrix along 3 axis and [augment_translation] meter 35 | Input 36 | - augment_translation: translate by how many meters 37 | Output 38 | - t: [3, 1] translation matrix 39 | """ 40 | T = np.random.rand(3) * augment_translation 41 | return T.reshape(3, 1) 42 | 43 | def transform(pts, trans): 44 | """ 45 | Applies the SE3 transformations, support torch.Tensor and np.ndarry. Equation: trans_pts = R @ pts + t 46 | Input 47 | - pts: [num_pts, 3] or [bs, num_pts, 3], pts to be transformed 48 | - trans: [4, 4] or [bs, 4, 4], SE3 transformation matrix 49 | Output 50 | - pts: [num_pts, 3] or [bs, num_pts, 3] transformed pts 51 | """ 52 | if len(pts.shape) == 3: 53 | trans_pts = trans[:, :3, :3] @ pts.permute(0,2,1) + trans[:, :3, 3:4] 54 | return trans_pts.permute(0,2,1) 55 | else: 56 | trans_pts = trans[:3, :3] @ pts.T + trans[:3, 3:4] 57 | return trans_pts.T 58 | 59 | def decompose_trans(trans): 60 | """ 61 | Decompose SE3 transformations into R and t, support torch.Tensor and np.ndarry. 62 | Input 63 | - trans: [4, 4] or [bs, 4, 4], SE3 transformation matrix 64 | Output 65 | - R: [3, 3] or [bs, 3, 3], rotation matrix 66 | - t: [3, 1] or [bs, 3, 1], translation matrix 67 | """ 68 | if len(trans.shape) == 3: 69 | return trans[:, :3, :3], trans[:, :3, 3:4] 70 | else: 71 | return trans[:3, :3], trans[:3, 3:4] 72 | 73 | def integrate_trans(R, t): 74 | """ 75 | Integrate SE3 transformations from R and t, support torch.Tensor and np.ndarry. 76 | Input 77 | - R: [3, 3] or [bs, 3, 3], rotation matrix 78 | - t: [3, 1] or [bs, 3, 1], translation matrix 79 | Output 80 | - trans: [4, 4] or [bs, 4, 4], SE3 transformation matrix 81 | """ 82 | if len(R.shape) == 3: 83 | if isinstance(R, torch.Tensor): 84 | trans = torch.eye(4)[None].repeat(R.shape[0], 1, 1).to(R.device) 85 | else: 86 | trans = np.eye(4)[None] 87 | trans[:, :3, :3] = R 88 | trans[:, :3, 3:4] = t.view([-1, 3, 1]) 89 | else: 90 | if isinstance(R, torch.Tensor): 91 | trans = torch.eye(4).to(R.device) 92 | else: 93 | trans = np.eye(4) 94 | trans[:3, :3] = R 95 | trans[:3, 3:4] = t 96 | return trans 97 | 98 | def concatenate(trans1, trans2): 99 | """ 100 | Concatenate two SE3 transformations, support torch.Tensor and np.ndarry. 101 | Input 102 | - trans1: [4, 4] or [bs, 4, 4], SE3 transformation matrix 103 | - trans2: [4, 4] or [bs, 4, 4], SE3 transformation matrix 104 | Output: 105 | - trans1 @ trans2 106 | """ 107 | R1, t1 = decompose_trans(trans1) 108 | R2, t2 = decompose_trans(trans2) 109 | R_cat = R1 @ R2 110 | t_cat = R1 @ t2 + t1 111 | trans_cat = integrate_trans(R_cat, t_cat) 112 | return trans_cat 113 | -------------------------------------------------------------------------------- /utils/libpmc.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ZhiChen902/SC2-PCR/19961eae0a4789442652bfd681589115dc6f0570/utils/libpmc.so -------------------------------------------------------------------------------- /utils/max_clique.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | Code borrowed from https://github.com/ryanrossi/pmc/blob/master/pmc.py 5 | Find maxumum clique in given dimacs-format graph 6 | based on: 7 | http://www.m-hikari.com/ams/ams-2014/ams-1-4-2014/mamatAMS1-4-2014-3.pdf 8 | ''' 9 | import os 10 | import numpy as np 11 | from numpy.ctypeslib import ndpointer 12 | import ctypes 13 | 14 | def pmc(ei,ej,nnodes,nnedges): #ei, ej is edge list whose index starts from 0 15 | degrees = np.zeros(nnodes,dtype = np.int32) 16 | new_ei = [] 17 | new_ej = [] 18 | for i in range(nnedges): 19 | degrees[ei[i]] += 1 20 | if ej[i] <= ei[i] + 1: 21 | new_ei.append(ei[i]) 22 | new_ej.append(ej[i]) 23 | maxd = max(degrees) 24 | offset = 0 25 | new_ei = np.array(new_ei,dtype = np.int32) 26 | new_ej = np.array(new_ej,dtype = np.int32) 27 | outsize = maxd 28 | output = np.zeros(maxd,dtype = np.int32) 29 | lib = ctypes.cdll.LoadLibrary(os.path.abspath("utils/libpmc.so")) 30 | fun = lib.max_clique 31 | #call C function 32 | fun.restype = np.int32 33 | fun.argtypes = [ctypes.c_int32,ndpointer(ctypes.c_int32, flags="C_CONTIGUOUS"), 34 | ndpointer(ctypes.c_int32, flags="C_CONTIGUOUS"),ctypes.c_int32, 35 | ctypes.c_int32,ndpointer(ctypes.c_int32, flags="C_CONTIGUOUS")] 36 | clique_size = fun(len(new_ei),new_ei,new_ej,offset,outsize,output) 37 | max_clique = np.empty(clique_size,dtype = np.int32) 38 | max_clique[:]=[output[i] for i in range(clique_size)] 39 | 40 | return max_clique 41 | -------------------------------------------------------------------------------- /utils/pointcloud.py: -------------------------------------------------------------------------------- 1 | import open3d as o3d 2 | import torch 3 | 4 | 5 | def make_point_cloud(pts): 6 | if isinstance(pts, torch.Tensor): 7 | pts = pts.detach().cpu().numpy() 8 | pcd = o3d.geometry.PointCloud() 9 | pcd.points = o3d.utility.Vector3dVector(pts) 10 | return pcd 11 | 12 | def make_feature(data, dim, npts): 13 | if isinstance(data, torch.Tensor): 14 | data = data.detach().cpu().numpy() 15 | feature = o3d.registration.Feature() 16 | feature.resize(dim, npts) 17 | feature.data = data.astype('d').transpose() 18 | return feature 19 | 20 | def estimate_normal(pcd, radius=0.06, max_nn=30): 21 | pcd.estimate_normals(search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=radius, max_nn=max_nn)) -------------------------------------------------------------------------------- /utils/sinkhorn.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | ''' 4 | Reference: 5 | https://github.com/magicleap/SuperGluePretrainedNetwork/blob/c0626d58c843ee0464b0fa1dd4de4059bfae0ab4/models/superglue.py#L150 6 | ''' 7 | 8 | 9 | def log_sinkhorn_iterations(Z, log_mu, log_nu, iters: int): 10 | ''' 11 | Perform Sinkhorn Normalization in Log-space for stability 12 | :param Z: 13 | :param log_mu: 14 | :param log_nu: 15 | :param iters: 16 | :return: 17 | ''' 18 | u, v = torch.zeros_like(log_mu), torch.zeros_like(log_nu) 19 | for _ in range(iters): 20 | u = log_mu - torch.logsumexp(Z + v.unsqueeze(1), dim=2) 21 | v = log_nu - torch.logsumexp(Z + u.unsqueeze(2), dim=1) 22 | 23 | return Z + u.unsqueeze(2) + v.unsqueeze(1) 24 | 25 | 26 | def log_optimal_transport(scores, bins0=None, bins1=None, alpha=None, iters=100): 27 | ''' 28 | Perform Differentiable Optimal Transport in Log-space for stability 29 | :param scores: 30 | :param alpha: 31 | :param iters: 32 | :return: 33 | ''' 34 | 35 | b, m, n = scores.shape 36 | one = scores.new_tensor(1) 37 | ms, ns = (m * one).to(scores), (n * one).to(scores) 38 | 39 | if bins0 is None: 40 | bins0 = alpha.expand(b, m, 1) 41 | if bins1 is None: 42 | bins1 = alpha.expand(b, 1, n) 43 | 44 | alpha = alpha.expand(b, 1, 1) 45 | 46 | couplings = torch.cat([torch.cat([scores, bins0], -1), 47 | torch.cat([bins1, alpha], -1)], 1) 48 | 49 | norm = - (ms + ns).log() 50 | log_mu = torch.cat([norm.expand(m), ns.log()[None] + norm]) 51 | log_nu = torch.cat([norm.expand(n), ms.log()[None] + norm]) 52 | log_mu, log_nu = log_mu[None].expand(b, -1), log_nu[None].expand(b, -1) 53 | 54 | Z = log_sinkhorn_iterations(couplings, log_mu, log_nu, iters) 55 | Z = Z - norm #multiply probabilities by M + N 56 | return Z 57 | 58 | 59 | def rpmnet_sinkhorn(log_score, bins0, bins1, iters: int): 60 | b, m, n = log_score.shape 61 | alpha = torch.zeros(size=(b, 1, 1)).cuda() 62 | log_score_padded = torch.cat([torch.cat([log_score, bins0], -1), 63 | torch.cat([bins1, alpha], -1)], 1) 64 | 65 | for i in range(iters): 66 | #Row Normalization 67 | log_score_padded = torch.cat(( 68 | log_score_padded[:, :-1, :] - (torch.logsumexp(log_score_padded[:, :-1, :], dim=2, keepdim=True)), 69 | log_score_padded[:, -1, None, :]), 70 | dim=1) 71 | 72 | #Column Normalization 73 | log_score_padded = torch.cat(( 74 | log_score_padded[:, :, :-1] - (torch.logsumexp(log_score_padded[:, :, :-1], dim=1, keepdim=True)), 75 | log_score_padded[:, :, -1, None]), 76 | dim=2) 77 | 78 | 79 | return log_score_padded 80 | 81 | -------------------------------------------------------------------------------- /utils/timer.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | 4 | class AverageMeter(object): 5 | """Computes and stores the average and current value""" 6 | 7 | def __init__(self): 8 | self.reset() 9 | 10 | def reset(self): 11 | self.val = 0 12 | self.avg = 0 13 | self.sum = 0.0 14 | self.sq_sum = 0.0 15 | self.count = 0 16 | 17 | def update(self, val, n=1): 18 | self.val = val 19 | self.sum += val * n 20 | self.count += n 21 | self.avg = self.sum / self.count 22 | self.sq_sum += val**2 * n 23 | self.var = self.sq_sum / self.count - self.avg ** 2 24 | 25 | 26 | class Timer(object): 27 | """A simple timer.""" 28 | 29 | def __init__(self): 30 | self.total_time = 0. 31 | self.calls = 0 32 | self.start_time = 0. 33 | self.diff = 0. 34 | self.avg = 0. 35 | 36 | def reset(self): 37 | self.total_time = 0 38 | self.calls = 0 39 | self.start_time = 0 40 | self.diff = 0 41 | self.avg = 0 42 | 43 | def tic(self): 44 | # using time.time instead of time.clock because time time.clock 45 | # does not normalize for multithreading 46 | self.start_time = time.time() 47 | 48 | def toc(self, average=True): 49 | self.diff = time.time() - self.start_time 50 | self.total_time += self.diff 51 | self.calls += 1 52 | self.avg = self.total_time / self.calls 53 | if average: 54 | return self.avg 55 | else: 56 | return self.diff --------------------------------------------------------------------------------