├── outputs └── best_weights │ └── readme.txt ├── RandomSampler.py ├── LocalFeatureAggregation.py ├── configs.py ├── README.md ├── test.py ├── data ├── val.txt ├── test.txt └── train.txt ├── net.py ├── process_data2accelerate.py ├── dataset.py ├── metrics.py ├── pytorch_utils.py ├── train.py └── utils.py /outputs/best_weights/readme.txt: -------------------------------------------------------------------------------- 1 | download the predtrained model and put it here 2 | -------------------------------------------------------------------------------- /RandomSampler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: UTF-8 -*- 3 | import os 4 | import numpy as np 5 | from tqdm import tqdm 6 | import configs as cfg 7 | tcfg = cfg.CONFIGS['Train'] 8 | 9 | def random_subsample(points, n_samples): 10 | if tcfg.remove_plane: 11 | zmin = np.min(points[:, 2]) 12 | nthr = zmin + tcfg.plane_threshold 13 | plane = points[points[:, 2] < nthr] 14 | points = points[points[:, 2] >= nthr] #remove the ground in each PC 15 | 16 | if points.shape[0]==0: 17 | points = np.zeros((n_samples,points.shape[1])) 18 | if n_samples < points.shape[0]: 19 | random_indices = np.random.choice(points.shape[0], n_samples, replace=False) 20 | points = points[random_indices, :] 21 | if n_samples > points.shape[0]: 22 | np.random.shuffle(points) 23 | apd = np.zeros((n_samples-points.shape[0], points.shape[1])) 24 | points = np.vstack((points, apd)) 25 | return points, plane 26 | 27 | def process_data(root, name, n=tcfg.n_samples): 28 | path = os.path.join(root, name) 29 | sname = name + '_split_plane_' + str(n) + '_thr_' + str(tcfg.plane_threshold) 30 | 31 | for dir in tqdm(os.listdir(path)): 32 | for file in os.listdir(os.path.join(path, dir)): 33 | 34 | sp = os.path.join(root, sname, dir, file) 35 | spp = os.path.join(root, sname, dir, file.replace('.txt', '_plane.txt')) 36 | 37 | if os.path.exists(sp) and os.path.exists(spp): 38 | continue 39 | else: 40 | pc = np.loadtxt(os.path.join(path, dir, file), skiprows=2) 41 | pc, plane = random_subsample(pc, n) 42 | 43 | if not os.path.exists(os.path.join(root, sname, dir)): 44 | os.makedirs(os.path.join(root, sname, dir)) 45 | 46 | np.savetxt(sp, pc, fmt="%.8f %.8f %.8f %.8f %.8f %.8f %.0f") 47 | head = '//X Y Z Rf Gf Bf label\n' 48 | with open(sp, 'r+') as f: 49 | content = f.read() 50 | f.seek(0, 0) 51 | f.write(head + (str(len(pc))+'\n') + content) 52 | 53 | np.savetxt(spp, plane, fmt="%.8f %.8f %.8f %.8f %.8f %.8f %.0f") 54 | 55 | 56 | def generate_removed_plane_dataset_train(root): 57 | print('removing planes of train and val datasets...') 58 | process_data(root, 'train_seg', n=tcfg.n_samples) 59 | print('all point clouds have been removed planes') 60 | 61 | def generate_removed_plane_dataset_test(root): 62 | print('removing planes of test datasets...') 63 | process_data(root, 'test_seg', n=tcfg.n_samples) 64 | print('all point clouds have been removed planes') 65 | 66 | if __name__ == '__main__': 67 | generate_removed_plane_dataset(tcfg.path.data_root) 68 | 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /LocalFeatureAggregation.py: -------------------------------------------------------------------------------- 1 | from torch.autograd import Variable 2 | import torch 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | import pytorch_utils as pt_utils 6 | import numpy as np 7 | import configs as cfg 8 | tcfg = cfg.CONFIGS['Train'] 9 | 10 | def gather_neighbour(pc, neighbor_idx): 11 | """ 12 | gather the coordinates or features of neighboring points 13 | pc: [B, C, N, 1] 14 | neighbor_idx: [B, N, K] 15 | """ 16 | pc = pc.transpose(2, 1).squeeze(-1) 17 | batch_size = pc.shape[0] 18 | num_points = pc.shape[1] 19 | d = pc.shape[2] 20 | index_input = neighbor_idx.reshape(batch_size, -1) 21 | features = torch.gather(pc, 1, index_input.unsqueeze(-1).repeat(1, 1, pc.shape[2])) 22 | features = features.reshape(batch_size, num_points, neighbor_idx.shape[-1], d) #b* n *k *d 23 | features = features.permute(0, 3, 1, 2) # b*c*n*k 24 | return features 25 | 26 | class SPE(nn.Module): 27 | def __init__(self, d_in, d_out): 28 | super().__init__() 29 | self.mlp2 = pt_utils.Conv2d(d_in, d_out, kernel_size=(1, 1), bn=True) 30 | 31 | 32 | def forward(self, feature, neigh_idx): 33 | f_neigh = gather_neighbour(feature, neigh_idx) 34 | f_neigh = torch.cat((feature, f_neigh), -1) 35 | f_agg2 = self.mlp2(f_neigh) 36 | f_agg2 = torch.sum(f_agg2, -1, keepdim=True) 37 | return f_agg2 38 | 39 | class LFE(nn.Module): 40 | def __init__(self, d_in, d_out): 41 | super().__init__() 42 | self.mlp1 = pt_utils.Conv2d(d_in, d_in, kernel_size=(1, 1), bn=True) 43 | self.mlp2 = pt_utils.Conv2d(d_in, d_out, kernel_size=(1, 1), bn=True) 44 | self.mlp3 = pt_utils.Conv2d(d_in, d_out, kernel_size=(1, 1), bn=True) 45 | 46 | def forward(self, feature, neigh_idx): 47 | f_neigh = gather_neighbour(feature, neigh_idx) 48 | f_neigh = self.mlp1(f_neigh) 49 | f_neigh = torch.sum(f_neigh, dim=-1, keepdim=True) 50 | f_neigh = self.mlp2(f_neigh) 51 | feature = self.mlp3(feature) 52 | f_agg = f_neigh + feature 53 | return f_agg 54 | 55 | class LFA(nn.Module): 56 | def __init__(self, d_in, d_out): 57 | super().__init__() 58 | self.spe = SPE(d_in, d_out) 59 | self.lfe = LFE(d_in, d_out) 60 | self.mlp = pt_utils.Conv2d(d_out, d_out, kernel_size=(1, 1), bn=True) 61 | 62 | def forward(self, feature, neigh_idx): 63 | spe = self.spe(feature, neigh_idx) 64 | lfe = self.lfe(feature, neigh_idx) 65 | f_agg = spe + lfe 66 | f_agg = self.mlp(f_agg) 67 | return f_agg 68 | 69 | 70 | if __name__ == '__main__': 71 | 72 | xyz = Variable(torch.rand(4,1024,3)) 73 | feature = Variable(torch.rand(4,64,1024,1)) 74 | nidx = np.random.randint(0,1024,size=[4,1024,16]) 75 | nidx = torch.Tensor(nidx).type(torch.int64) 76 | net = LFA(64, 128) 77 | y = net(feature, nidx) 78 | print('out', y.shape) 79 | 80 | -------------------------------------------------------------------------------- /configs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: UTF-8 -*- 3 | import ml_collections as mlc 4 | import numpy as np 5 | import os 6 | 7 | def train_cfg(): 8 | 9 | """returns training configuration.""" 10 | 11 | cfg = mlc.ConfigDict() 12 | cfg.resume = False 13 | cfg.display = True 14 | cfg.print_rate = 5 15 | cfg.batch_size = 8 16 | cfg.epoch = 40 17 | 18 | 19 | # network setting 20 | cfg.use_rgb = False # use 'RGB' or 'XYZRGB' as input dimensions 21 | if cfg.use_rgb: 22 | cfg.in_dim = 6 23 | else: 24 | cfg.in_dim = 3 25 | cfg.out_dim = 64 26 | cfg.sub_sampling_ratio = [4, 4, 4, 4] # down sample rate of the input point clouds of each layer 27 | cfg.down_rate = np.prod(cfg.sub_sampling_ratio) 28 | cfg.num_layers = len(cfg.sub_sampling_ratio) 29 | cfg.k_neighbors = 16 # The k value in LFA module 30 | 31 | 32 | # dataset setting 33 | cfg.n_samples = 8192 # the point number of the input point clouds 34 | cfg.remove_plane = True # if remove the ground plane of PCs 35 | cfg.plane_threshold = 0.50 36 | cfg.norm_data = True 37 | 38 | # path 39 | cfg.path = mlc.ConfigDict() 40 | cfg.path.data_root = 'F:\\WZXData\\SHREC2021-3DCD\\dataset7' 41 | cfg.path.test_dataset = os.path.join(cfg.path.data_root, 'test_seg') + '_split_plane_' + str(cfg.n_samples) + '_thr_' + str(cfg.plane_threshold) 42 | cfg.path.train_dataset = os.path.join(cfg.path.data_root, 'train_seg') + '_split_plane_' + str(cfg.n_samples) + '_thr_' + str(cfg.plane_threshold) 43 | cfg.path.val_dataset = cfg.path.train_dataset 44 | cfg.if_prepare_data = True # saving processed PCs to .npy to accelerate training phase. 45 | cfg.path.prepare_data = cfg.path.data_root + '/prapared_data_' + str(cfg.n_samples) + '_thr_' + str(cfg.plane_threshold) + '_' + str(cfg.k_neighbors) 46 | cfg.path.save_txt = './data' 47 | cfg.path.train_txt = './data/train.txt' 48 | cfg.path.val_txt = './data/val.txt' 49 | cfg.path.test_txt = './data/test.txt' 50 | cfg.path.outputs = './outputs' 51 | cfg.path.weights_save_dir = './outputs/weights' 52 | cfg.path.best_weights_save_dir = './outputs/best_weights' 53 | cfg.path.val_prediction = './outputs/val_prediction' 54 | cfg.path.test_prediction = './outputs/test_prediction' 55 | cfg.path.test_prediction_PCs = './outputs/test_prediction_PCs' 56 | cfg.path.feature = './outputs/feature' 57 | 58 | 59 | # optimizer setting 60 | cfg.optimizer = mlc.ConfigDict() 61 | cfg.optimizer.lr = 0.001 62 | cfg.optimizer.momentum = 0.9 63 | cfg.optimizer.weight_decay = 0.0005 64 | cfg.optimizer.lr_step_size = 1 65 | cfg.optimizer.gamma = 0.95 66 | 67 | # validation and testing setting 68 | cfg.save_prediction = True # if save the prediction results. 69 | cfg.criterion = 'miou' # criterion for selecting models: 'miou' or 'oa' 70 | 71 | return cfg 72 | 73 | CONFIGS = { 74 | 'Train': train_cfg(), 75 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # [3DCDNet: An End-to-end Point-based Method and A New Dataset for Street Level Point Cloud Change Detection](https://ieeexplore.ieee.org/document/10184135?source=authoralert) 2 | 3 | ![image](https://github.com/wangle53/3DCDNet/assets/79884379/5a5efd38-c2e4-4f60-b670-28b6b51adc08) 4 | ![image](https://github.com/wangle53/3DCDNet/assets/79884379/7f28673f-579c-43f8-9082-643ea74e6045) 5 | 6 | 7 | 8 | ## Requirement 9 | ``` 10 | python 3.7.4 11 | torch 1.8.10 12 | visdom 0.1.8.9 13 | torchvision 0.9.0 14 | ``` 15 | ## SLPCCD Dataset 16 | This dataset is developed from SHREC2O21 (T. Ku, S. Galanakis, B. Boom et al., SHREC 2021: 3D Point cloud change detection for street scenes, Computers & Graphics, https://doi.org/10.1016/j.cag.2021.07.004). It is a new 3D change detection benchmark dataset and aims to provide opportunities for researchers to develop novel 3D change detection algorithms. The dataset is available at [[Google Drive]](https://drive.google.com/drive/folders/15Wom0FQ6K6RcGxfLAnS-ELDrpZq-xYH1?usp=sharing) and [[Baiduyun]](https://pan.baidu.com/s/1onEEmQKkt7aXTLKJVB7agQ?pwd=8epz) (the password is: 8epz). 17 | ## Pretrained Model 18 | The pretrained model for SLPCCD is available at [[Google Drive]](https://drive.google.com/drive/folders/15Wom0FQ6K6RcGxfLAnS-ELDrpZq-xYH1?usp=sharing) and [[Baiduyun]](https://pan.baidu.com/s/1onEEmQKkt7aXTLKJVB7agQ?pwd=8epz) (the password is: 8epz). 19 | ## Test 20 | Before test, please download datasets and pretrained models. Change path to your data path in configs.py. Copy pretrained models to folder './outputs/best_weights', and run the following command: 21 | ``` 22 | cd 3DCDNet_ROOT 23 | python test.py 24 | ``` 25 | ## Training 26 | Before training, please download datasets and revise dataset path in configs.py to your path. 27 | ``` 28 | cd 3DCDNet_ROOT 29 | python -m visdom.server 30 | python train.py 31 | ``` 32 | To display training processing, open 'http://localhost:8097' in your browser. 33 | ## Experiments on Urb3DCD dataset 34 | The experiments on Urb3DCD dataset can be found from [this link](https://github.com/wangle53/3DCDNet-Urb3DCD). 35 | ## Citing 3DCDNet 36 | If you use this repository or would like to refer the paper, please use the following BibTex entry.## Citing TransCD 37 | ``` 38 | @ARTICLE{10184135, 39 | author={Wang, Zhixue and Zhang, Yu and Luo, Lin and Yang, Kai and Xie, Liming}, 40 | journal={IEEE Transactions on Geoscience and Remote Sensing}, 41 | title={An End-to-End Point-Based Method and a New Dataset for Street-Level Point Cloud Change Detection}, 42 | year={2023}, 43 | volume={61}, 44 | number={}, 45 | pages={1-15}, 46 | doi={10.1109/TGRS.2023.3295386}} 47 | ``` 48 | ## Reference 49 | -T. Ku, S. Galanakis, B. Boom et al., SHREC 2021: 3D Point cloud change detection for street scenes, Computers & Graphics, https://doi.org/10.1016/j.cag.2021.07.004 50 | -HU, Qingyong, et al. Randla-net: Efficient semantic segmentation of large-scale point clouds. In: Proceedings of the IEEE/CVF conference on computer vision and pattern recognition. 2020. p. 11108-11117. 51 | ## More 52 | [My personal google web](https://scholar.google.com/citations?user=qdkY0jcAAAAJ&hl=zh-TW) 53 |

54 | Google Scholar 55 |

56 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: UTF-8 -*- 3 | import os 4 | import time 5 | import shutil 6 | import torch 7 | import torch.nn as nn 8 | from torch.utils.data import Dataset, DataLoader 9 | import numpy as np 10 | 11 | import utils as utl 12 | import configs as cfg 13 | import metrics as mc 14 | from net import Siam3DCDNet 15 | from dataset import CDDataset 16 | from tqdm import tqdm 17 | 18 | 19 | def test_network(tcfg): 20 | test_txt = tcfg.path['test_txt'] 21 | test_data = CDDataset(tcfg.path['test_dataset'], tcfg.path['test_txt'], tcfg.n_samples, 'test', tcfg.path.prepare_data) 22 | test_dataloader = DataLoader(test_data, batch_size=1, shuffle=False) 23 | 24 | best_model_path = os.path.join(tcfg.path['best_weights_save_dir'], 'best_net.pth') 25 | pretrained_dict = torch.load(best_model_path)['model_state_dict'] 26 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 27 | net = Siam3DCDNet(tcfg.in_dim, tcfg.out_dim).to(device=device) 28 | net.load_state_dict(pretrained_dict,False) 29 | torch.no_grad() 30 | net.eval() 31 | dur = 0 32 | iou_calc = mc.IoUCalculator() 33 | tqdm_loader = tqdm(test_dataloader, total=len(test_dataloader)) 34 | for _, data in enumerate(tqdm_loader): 35 | batch_data0, batch_data1, dir_name, pc0_name, pc1_name, raw_data = data 36 | p0, p0_neighbors_idx, p0_pool_idx, p0_unsam_idx, lb0, knearest_idx0, raw_length0 = [i for i in batch_data0.values()] 37 | p1, p1_neighbors_idx, p1_pool_idx, p1_unsam_idx, lb1, knearest_idx1, raw_length1 = [i for i in batch_data1.values()] 38 | p0 = [i.to(device, dtype=torch.float) for i in p0] 39 | p0_neighbors_idx = [i.to(device, dtype=torch.long) for i in p0_neighbors_idx] 40 | p0_neighbors_idx = [i.to(device, dtype=torch.long) for i in p0_neighbors_idx] 41 | p0_pool_idx = [i.to(device, dtype=torch.long) for i in p0_pool_idx] 42 | p0_unsam_idx = [i.to(device, dtype=torch.long) for i in p0_unsam_idx] 43 | p1 = [i.to(device, dtype=torch.float) for i in p1] 44 | p1_neighbors_idx = [i.to(device, dtype=torch.long) for i in p1_neighbors_idx] 45 | p1_pool_idx = [i.to(device, dtype=torch.long) for i in p1_pool_idx] 46 | p1_unsam_idx = [i.to(device, dtype=torch.long) for i in p1_unsam_idx] 47 | knearest_idx = [knearest_idx0.to(device, dtype=torch.long), knearest_idx1.to(device, dtype=torch.long)] 48 | 49 | lb0 = lb0.squeeze(-1).to(device, dtype=torch.long) 50 | lb1 = lb1.squeeze(-1).to(device, dtype=torch.long) 51 | t0 = time.time() 52 | out0, out1 = net([p0, p0_neighbors_idx, p0_pool_idx, p0_unsam_idx], 53 | [p1, p1_neighbors_idx, p1_pool_idx, p1_unsam_idx], 54 | knearest_idx) 55 | dur += time.time()-t0 56 | out0 = out0.max(dim=-1)[1]; out1 = out1.max(dim=-1)[1]; 57 | 58 | iou_calc.add_data(out0.squeeze(0), out1.squeeze(0), lb0.squeeze(0), lb1.squeeze(0)) 59 | if tcfg.save_prediction: 60 | utl.save_prediction3(raw_data[0], raw_data[1], 61 | lb0, lb1, 62 | out0.squeeze(-1), out1.squeeze(-1), 63 | os.path.join(tcfg.path['test_prediction_PCs'], str(dir_name[0])), 64 | pc0_name, pc1_name, 65 | tcfg.path['test_dataset']) 66 | 67 | iou = iou_calc.metrics() 68 | for k, v in iou.items(): 69 | print(k, v) 70 | with open(os.path.join(tcfg.path['outputs'], 'test_IoU.txt'),'a') as f: 71 | f.write('Time:{},miou:{:.6f},oa:{:.6f},iou_list:{}'.format(time.strftime('%Y-%m-%d %H:%M:%S'), \ 72 | iou['miou'], iou['oa'], iou['iou_list'])) 73 | f.write('\n') 74 | print('FPS: ', len(test_dataloader)/dur) 75 | 76 | 77 | if __name__ == '__main__': 78 | 79 | tcfg = cfg.CONFIGS['Train'] 80 | test_network(tcfg) 81 | 82 | 83 | 84 | 85 | -------------------------------------------------------------------------------- /data/val.txt: -------------------------------------------------------------------------------- 1 | 0_1\point2016.txt 0_1\point2020_seg.txt 2 | 0_10\point2016.txt 0_10\point2020.txt 3 | 0_4\point2016.txt 0_4\point2020.txt 4 | 0_8\point2016.txt 0_8\point2020_seg.txt 5 | 10_12\point2016_seg.txt 10_12\point2020.txt 6 | 10_4\point2016.txt 10_4\point2020.txt 7 | 11_3\point2016.txt 11_3\point2020.txt 8 | 13_4\point2016.txt 13_4\point2020.txt 9 | 14_10\point2016.txt 14_10\point2020.txt 10 | 14_3\point2016.txt 14_3\point2020.txt 11 | 14_4\point2016_seg.txt 14_4\point2020.txt 12 | 14_5\point2016.txt 14_5\point2020_seg.txt 13 | 17_1\point2016.txt 17_1\point2020.txt 14 | 19_5\point2016_seg.txt 19_5\point2020.txt 15 | 21_3\point2016.txt 21_3\point2020_seg.txt 16 | 23_5\point2016_seg.txt 23_5\point2020.txt 17 | 24_13\point2016_seg.txt 24_13\point2020.txt 18 | 24_18\point2016_seg.txt 24_18\point2020_seg.txt 19 | 24_4\point2016_seg.txt 24_4\point2020_seg.txt 20 | 24_8\point2016_seg.txt 24_8\point2020.txt 21 | 25_11\point2016_seg.txt 25_11\point2020.txt 22 | 25_22\point2016_seg.txt 25_22\point2020_seg.txt 23 | 25_7\point2016_seg.txt 25_7\point2020.txt 24 | 26_11\point2016_seg.txt 26_11\point2020_seg.txt 25 | 28_5\point2016_seg.txt 28_5\point2020_seg.txt 26 | 28_6\point2016_seg.txt 28_6\point2020.txt 27 | 28_7\point2016.txt 28_7\point2020_seg.txt 28 | 29_2\point2016.txt 29_2\point2020.txt 29 | 2_1\point2016.txt 2_1\point2020.txt 30 | 34_1\point2016.txt 34_1\point2020.txt 31 | 34_10\point2016.txt 34_10\point2020.txt 32 | 34_3\point2016.txt 34_3\point2020_seg.txt 33 | 34_4\point2016.txt 34_4\point2020.txt 34 | 34_8\point2016.txt 34_8\point2020_seg.txt 35 | 37_1\point2016.txt 37_1\point2020_seg.txt 36 | 38_11\point2016_seg.txt 38_11\point2020_seg.txt 37 | 38_4\point2016.txt 38_4\point2020.txt 38 | 39_6\point2016.txt 39_6\point2020_seg.txt 39 | 40_2\point2016_seg.txt 40_2\point2020.txt 40 | 40_6\point2016_seg.txt 40_6\point2020.txt 41 | 40_7\point2016.txt 40_7\point2020_seg.txt 42 | 42_2\point2016.txt 42_2\point2020.txt 43 | 42_4\point2016_seg.txt 42_4\point2020_seg.txt 44 | 43_1\point2016.txt 43_1\point2020_seg.txt 45 | 44_1\point2016.txt 44_1\point2020.txt 46 | 45_1\point2016.txt 45_1\point2020_seg.txt 47 | 46_4\point2016.txt 46_4\point2020.txt 48 | 47_4\point2016.txt 47_4\point2020.txt 49 | 48_5\point2016_seg.txt 48_5\point2020.txt 50 | 49_1\point2016.txt 49_1\point2020.txt 51 | 4_1\point2016.txt 4_1\point2020.txt 52 | 50_10\point2016.txt 50_10\point2020_seg.txt 53 | 50_9\point2016.txt 50_9\point2020_seg.txt 54 | 51_2\point2016_seg.txt 51_2\point2020_seg.txt 55 | 51_5\point2016_seg.txt 51_5\point2020_seg.txt 56 | 52_3\point2016_seg.txt 52_3\point2020_seg.txt 57 | 53_4\point2016_seg.txt 53_4\point2020.txt 58 | 53_8\point2016_seg.txt 53_8\point2020.txt 59 | 53_9\point2016_seg.txt 53_9\point2020_seg.txt 60 | 56_1\point2016.txt 56_1\point2020.txt 61 | 56_6\point2016.txt 56_6\point2020.txt 62 | 57_2\point2016.txt 57_2\point2020.txt 63 | 59_1\point2016.txt 59_1\point2020.txt 64 | 60_1\point2016.txt 60_1\point2020.txt 65 | 60_2\point2016.txt 60_2\point2020.txt 66 | 62_2\point2016.txt 62_2\point2020.txt 67 | 63_5\point2016.txt 63_5\point2020.txt 68 | 64_10\point2016.txt 64_10\point2020.txt 69 | 64_12\point2016.txt 64_12\point2020.txt 70 | 64_13\point2016.txt 64_13\point2020.txt 71 | 64_16\point2016.txt 64_16\point2020.txt 72 | 64_18\point2016.txt 64_18\point2020.txt 73 | 65_10\point2016.txt 65_10\point2020.txt 74 | 65_4\point2016.txt 65_4\point2020.txt 75 | 65_6\point2016.txt 65_6\point2020_seg.txt 76 | 66_15\point2016.txt 66_15\point2020.txt 77 | 66_17\point2016_seg.txt 66_17\point2020.txt 78 | 66_20\point2016_seg.txt 66_20\point2020_seg.txt 79 | 66_24\point2016.txt 66_24\point2020.txt 80 | 66_27\point2016.txt 66_27\point2020.txt 81 | 66_29\point2016.txt 66_29\point2020.txt 82 | 66_30\point2016_seg.txt 66_30\point2020.txt 83 | 66_31\point2016_seg.txt 66_31\point2020_seg.txt 84 | 67_9\point2016.txt 67_9\point2020.txt 85 | 68_12\point2016_seg.txt 68_12\point2020.txt 86 | 68_2\point2016.txt 68_2\point2020.txt 87 | 68_5\point2016.txt 68_5\point2020.txt 88 | 69_3\point2016.txt 69_3\point2020.txt 89 | 69_8\point2016.txt 69_8\point2020.txt 90 | 69_9\point2016_seg.txt 69_9\point2020_seg.txt 91 | 6_1\point2016_seg.txt 6_1\point2020_seg.txt 92 | 73_1\point2016.txt 73_1\point2020_seg.txt 93 | 76_2\point2016.txt 76_2\point2020.txt 94 | 9_1\point2016.txt 9_1\point2020.txt 95 | 9_3\point2016.txt 9_3\point2020.txt 96 | -------------------------------------------------------------------------------- /net.py: -------------------------------------------------------------------------------- 1 | from torch.autograd import Variable 2 | import torch 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | import pytorch_utils as pt_utils 6 | import numpy as np 7 | from LocalFeatureAggregation import LFA, gather_neighbour 8 | import configs as cfg 9 | tcfg = cfg.CONFIGS['Train'] 10 | 11 | class C3Dnet(nn.Module): 12 | def __init__(self, in_d, out_d): 13 | super(C3Dnet, self).__init__() 14 | self.in_d = in_d 15 | self.out_d = out_d 16 | self.fc0 = pt_utils.Conv1d(self.in_d, 64, kernel_size=1, bn=True) 17 | self.block1 = LFA(64, 128) 18 | self.block2 = LFA(128, 256) 19 | self.block3 = LFA(256, 512) 20 | self.block4 = LFA(512, 1024) 21 | self.dt = pt_utils.Conv2d(1024, 1024, kernel_size=(1, 1), bn=True) 22 | self.d4 = pt_utils.Conv2d(1024*2, 512, kernel_size=(1, 1), bn=True) 23 | self.d3 = pt_utils.Conv2d(512*2, 256, kernel_size=(1, 1), bn=True) 24 | self.d2 = pt_utils.Conv2d(256*2, 128, kernel_size=(1, 1), bn=True) 25 | self.d1 = pt_utils.Conv2d(128*2, 64, kernel_size=(1, 1), bn=True) 26 | self.d0 = pt_utils.Conv2d(64, self.out_d, kernel_size=(1, 1), bn=True) 27 | 28 | def forward(self, end_points): 29 | xyz, neigh_idx, pool_idx, unsam_idx = end_points # xyz: B*C*N 30 | # encoder 31 | out0 = self.fc0(xyz[0].permute(0, 2, 1)) 32 | out0 = out0.unsqueeze(dim=3) # out:B*C*N*1 33 | out1 = self.block1(out0, neigh_idx[0]) 34 | out1p = self.random_sample(out1, pool_idx[0]) 35 | out2 = self.block2(out1p, neigh_idx[1]) 36 | out2p = self.random_sample(out2, pool_idx[1]) 37 | out3 = self.block3(out2p, neigh_idx[2]) 38 | out3p = self.random_sample(out3, pool_idx[2]) 39 | out4 = self.block4(out3p, neigh_idx[3]) 40 | out4p = self.random_sample(out4, pool_idx[3]) 41 | 42 | out = self.dt(out4p) 43 | out = torch.cat((out, out4p), 1) 44 | out =self.d4(out) 45 | out = self.nearest_interpolation(out, unsam_idx[3]) 46 | out = torch.cat((out, out3p), 1) 47 | out = self.d3(out) 48 | out = self.nearest_interpolation(out, unsam_idx[2]) 49 | out = torch.cat((out, out2p), 1) 50 | out = self.d2(out) 51 | out = self.nearest_interpolation(out, unsam_idx[1]) 52 | out = torch.cat((out, out1p), 1) 53 | out = self.d1(out) 54 | out = self.nearest_interpolation(out, unsam_idx[0]) 55 | out = self.d0(out) 56 | 57 | return out 58 | 59 | 60 | @staticmethod 61 | def random_sample(feature, pool_idx): 62 | """ 63 | :param feature: [B, d, N, 1] input features matrix 64 | :param pool_idx: [B, N, K] N' < N, N' is the selected position after pooling 65 | :return: pool_features = [B, d, N'] pooled features matrix 66 | """ 67 | feature = feature.squeeze(dim=3) # batch*channel*npoints 68 | k = pool_idx.shape[-1] 69 | d = feature.shape[1] 70 | batch_size = pool_idx.shape[0] 71 | pool_idx = pool_idx.reshape(batch_size, -1) # batch*(npoints*nsamples) 72 | pool_features = torch.gather(feature, 2, pool_idx.unsqueeze(1).repeat(1, feature.shape[1], 1)) 73 | pool_features = pool_features.reshape(batch_size, d, -1, k) 74 | pool_features = pool_features.max(dim=3, keepdim=True)[0] # batch*channel*npoints*1 75 | return pool_features 76 | 77 | @staticmethod 78 | def nearest_interpolation(feature, interp_idx): 79 | """ 80 | :param feature: [B, d, N, 1] input features matrix 81 | :param interp_idx: [B, up_num_points, 1] nearest neighbour index 82 | :return: [B, d, up_num_points, 1] interpolated features matrix 83 | """ 84 | feature = feature.squeeze(dim=3) # batch*channel*npoints 85 | batch_size = interp_idx.shape[0] 86 | up_num_points = interp_idx.shape[1] 87 | interp_idx = interp_idx.reshape(batch_size, up_num_points) 88 | interpolated_features = torch.gather(feature, 2, interp_idx.unsqueeze(1).repeat(1, feature.shape[1], 1)) 89 | interpolated_features = interpolated_features.unsqueeze(3) # batch*channel*npoints*1 90 | return interpolated_features 91 | 92 | class Siam3DCDNet(nn.Module): 93 | def __init__(self, in_d, out_d): 94 | super(Siam3DCDNet, self).__init__() 95 | self.net = C3Dnet(in_d, out_d) 96 | self.mlp1 = pt_utils.Conv1d(64, 32, kernel_size=1, bn=True) 97 | self.mlp2 = pt_utils.Conv1d(32, 2, kernel_size=1, bias=False, bn=False, activation=None) 98 | 99 | def forward(self, end_points0, end_points1, knearest_idx): 100 | out0= self.net(end_points0) 101 | out1 = self.net(end_points1) 102 | knearest_01, knearest_10 = knearest_idx 103 | fout0 = self.nearest_feature_difference(out0, out1, knearest_01) 104 | fout1 = self.nearest_feature_difference(out1, out0, knearest_10) 105 | fout0 = self.mlp1(fout0.squeeze(-1)) 106 | fout1 = self.mlp1(fout1.squeeze(-1)) 107 | fout0 = self.mlp2(fout0) 108 | fout1 = self.mlp2(fout1) 109 | fout0 = F.log_softmax(fout0.transpose(2, 1), dim=-1) 110 | fout1 = F.log_softmax(fout1.transpose(2, 1), dim=-1) 111 | return fout0, fout1 112 | 113 | @staticmethod 114 | def nearest_feature_difference(raw, query, nearest_idx): 115 | nearest_features = gather_neighbour(query, nearest_idx) 116 | fused_features = torch.mean(torch.abs(raw - nearest_features), -1) 117 | return fused_features 118 | 119 | 120 | 121 | 122 | -------------------------------------------------------------------------------- /data/test.txt: -------------------------------------------------------------------------------- 1 | 0_1\point2016.txt 0_1\point2020.txt 2 | 10_1\point2016.txt 10_1\point2020.txt 3 | 10_2\point2016_seg.txt 10_2\point2020.txt 4 | 10_3\point2016.txt 10_3\point2020.txt 5 | 10_4\point2016.txt 10_4\point2020.txt 6 | 11_1\point2016.txt 11_1\point2020_seg.txt 7 | 13_1\point2016_seg.txt 13_1\point2020.txt 8 | 13_2\point2016.txt 13_2\point2020.txt 9 | 13_3\point2016.txt 13_3\point2020_seg.txt 10 | 14_1\point2016.txt 14_1\point2020.txt 11 | 14_2\point2016.txt 14_2\point2020.txt 12 | 14_3\point2016.txt 14_3\point2020.txt 13 | 14_4\point2016.txt 14_4\point2020.txt 14 | 14_5\point2016_seg.txt 14_5\point2020.txt 15 | 16_1\point2016.txt 16_1\point2020.txt 16 | 17_1\point2016.txt 17_1\point2020.txt 17 | 19_1\point2016_seg.txt 19_1\point2020.txt 18 | 19_2\point2016_seg.txt 19_2\point2020_seg.txt 19 | 19_3\point2016_seg.txt 19_3\point2020_seg.txt 20 | 22_1\point2016_seg.txt 22_1\point2020.txt 21 | 22_2\point2016_seg.txt 22_2\point2020.txt 22 | 22_3\point2016_seg.txt 22_3\point2020_seg.txt 23 | 24_1\point2016_seg.txt 24_1\point2020.txt 24 | 24_2\point2016_seg.txt 24_2\point2020.txt 25 | 24_5\point2016_seg.txt 24_5\point2020.txt 26 | 25_1\point2016_seg.txt 25_1\point2020.txt 27 | 25_10\point2016.txt 25_10\point2020_seg.txt 28 | 25_2\point2016_seg.txt 25_2\point2020.txt 29 | 25_3\point2016.txt 25_3\point2020.txt 30 | 25_4\point2016_seg.txt 25_4\point2020_seg.txt 31 | 25_5\point2016_seg.txt 25_5\point2020_seg.txt 32 | 25_6\point2016.txt 25_6\point2020.txt 33 | 25_7\point2016.txt 25_7\point2020.txt 34 | 25_8\point2016_seg.txt 25_8\point2020.txt 35 | 25_9\point2016.txt 25_9\point2020.txt 36 | 26_1\point2016_seg.txt 26_1\point2020_seg.txt 37 | 26_2\point2016_seg.txt 26_2\point2020.txt 38 | 26_3\point2016_seg.txt 26_3\point2020_seg.txt 39 | 27_1\point2016.txt 27_1\point2020.txt 40 | 28_1\point2016.txt 28_1\point2020.txt 41 | 28_3\point2016.txt 28_3\point2020_seg.txt 42 | 28_4\point2016.txt 28_4\point2020_seg.txt 43 | 29_1\point2016.txt 29_1\point2020.txt 44 | 29_2\point2016.txt 29_2\point2020_seg.txt 45 | 30_1\point2016.txt 30_1\point2020.txt 46 | 30_2\point2016.txt 30_2\point2020.txt 47 | 31_1\point2016_seg.txt 31_1\point2020_seg.txt 48 | 33_1\point2016.txt 33_1\point2020.txt 49 | 33_3\point2016.txt 33_3\point2020.txt 50 | 33_4\point2016_seg.txt 33_4\point2020.txt 51 | 33_6\point2016.txt 33_6\point2020.txt 52 | 34_1\point2016.txt 34_1\point2020.txt 53 | 34_2\point2016_seg.txt 34_2\point2020_seg.txt 54 | 34_3\point2016.txt 34_3\point2020_seg.txt 55 | 38_1\point2016_seg.txt 38_1\point2020_seg.txt 56 | 39_1\point2016_seg.txt 39_1\point2020_seg.txt 57 | 39_2\point2016_seg.txt 39_2\point2020_seg.txt 58 | 40_1\point2016.txt 40_1\point2020_seg.txt 59 | 40_2\point2016_seg.txt 40_2\point2020_seg.txt 60 | 43_1\point2016_seg.txt 43_1\point2020_seg.txt 61 | 43_2\point2016_seg.txt 43_2\point2020_seg.txt 62 | 46_1\point2016.txt 46_1\point2020.txt 63 | 46_2\point2016.txt 46_2\point2020_seg.txt 64 | 46_3\point2016_seg.txt 46_3\point2020_seg.txt 65 | 47_1\point2016.txt 47_1\point2020_seg.txt 66 | 47_2\point2016.txt 47_2\point2020_seg.txt 67 | 48_1\point2016_seg.txt 48_1\point2020.txt 68 | 49_1\point2016.txt 49_1\point2020.txt 69 | 49_2\point2016.txt 49_2\point2020.txt 70 | 50_1\point2016.txt 50_1\point2020_seg.txt 71 | 50_2\point2016_seg.txt 50_2\point2020_seg.txt 72 | 54_1\point2016_seg.txt 54_1\point2020_seg.txt 73 | 55_1\point2016.txt 55_1\point2020_seg.txt 74 | 56_1\point2016.txt 56_1\point2020.txt 75 | 57_1\point2016.txt 57_1\point2020.txt 76 | 57_2\point2016.txt 57_2\point2020.txt 77 | 57_3\point2016.txt 57_3\point2020.txt 78 | 57_4\point2016.txt 57_4\point2020.txt 79 | 57_5\point2016.txt 57_5\point2020.txt 80 | 5_1\point2016_seg.txt 5_1\point2020.txt 81 | 64_1\point2016.txt 64_1\point2020_seg.txt 82 | 64_2\point2016_seg.txt 64_2\point2020.txt 83 | 64_3\point2016_seg.txt 64_3\point2020.txt 84 | 64_4\point2016.txt 64_4\point2020_seg.txt 85 | 64_5\point2016_seg.txt 64_5\point2020.txt 86 | 64_6\point2016_seg.txt 64_6\point2020.txt 87 | 64_7\point2016_seg.txt 64_7\point2020_seg.txt 88 | 64_8\point2016.txt 64_8\point2020.txt 89 | 64_9\point2016_seg.txt 64_9\point2020.txt 90 | 65_1\point2016.txt 65_1\point2020.txt 91 | 65_2\point2016.txt 65_2\point2020.txt 92 | 65_3\point2016.txt 65_3\point2020.txt 93 | 65_4\point2016.txt 65_4\point2020.txt 94 | 65_5\point2016.txt 65_5\point2020.txt 95 | 66_1\point2016.txt 66_1\point2020.txt 96 | 67_1\point2016.txt 67_1\point2020.txt 97 | 67_2\point2016_seg.txt 67_2\point2020.txt 98 | 68_1\point2016.txt 68_1\point2020.txt 99 | 68_2\point2016.txt 68_2\point2020.txt 100 | 69_1\point2016_seg.txt 69_1\point2020_seg.txt 101 | 69_2\point2016.txt 69_2\point2020.txt 102 | 70_1\point2016.txt 70_1\point2020_seg.txt 103 | 70_2\point2016.txt 70_2\point2020_seg.txt 104 | 71_1\point2016_seg.txt 71_1\point2020_seg.txt 105 | 72_1\point2016_seg.txt 72_1\point2020.txt 106 | 72_2\point2016.txt 72_2\point2020.txt 107 | 72_3\point2016.txt 72_3\point2020.txt 108 | 72_4\point2016.txt 72_4\point2020_seg.txt 109 | 72_5\point2016.txt 72_5\point2020_seg.txt 110 | 72_6\point2016.txt 72_6\point2020_seg.txt 111 | 73_1\point2016_seg.txt 73_1\point2020_seg.txt 112 | 73_2\point2016.txt 73_2\point2020_seg.txt 113 | 73_3\point2016.txt 73_3\point2020.txt 114 | 73_4\point2016.txt 73_4\point2020.txt 115 | 75_1\point2016.txt 75_1\point2020.txt 116 | 76_1\point2016.txt 76_1\point2020.txt 117 | 76_2\point2016.txt 76_2\point2020_seg.txt 118 | 78_1\point2016.txt 78_1\point2020.txt 119 | 79_1\point2016_seg.txt 79_1\point2020.txt 120 | 79_2\point2016.txt 79_2\point2020.txt 121 | 79_3\point2016_seg.txt 79_3\point2020.txt 122 | 79_4\point2016_seg.txt 79_4\point2020.txt 123 | 79_5\point2016_seg.txt 79_5\point2020.txt 124 | 79_6\point2016_seg.txt 79_6\point2020.txt 125 | 79_7\point2016_seg.txt 79_7\point2020_seg.txt 126 | 7_1\point2016.txt 7_1\point2020.txt 127 | 7_2\point2016.txt 7_2\point2020.txt 128 | 7_3\point2016.txt 7_3\point2020.txt 129 | -------------------------------------------------------------------------------- /process_data2accelerate.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | import os.path as osp 4 | import random 5 | import numpy as np 6 | import configs as cfg 7 | import utils 8 | from tqdm import tqdm 9 | tcfg = cfg.CONFIGS['Train'] 10 | import time 11 | 12 | 13 | def norm_data(p16, p20): 14 | p16_raw, p20_raw = p16, p20 15 | point_pair = np.vstack((p16, p20)) 16 | idx = np.where(point_pair==0) 17 | point_set = point_pair[idx, :] 18 | point_set = point_set - np.expand_dims(np.mean(point_set, axis=0), 0) # center 19 | dist = np.max(np.sqrt(np.sum(point_set ** 2, axis=1)), 0) 20 | point_set = point_set / (dist + 1e-8) # scale 21 | point_pair[idx, :] = point_set 22 | p16 = point_pair[:tcfg.n_samples,:] 23 | p20 = point_pair[tcfg.n_samples:,:] 24 | return p16, p20, p16_raw, p20_raw 25 | 26 | 27 | class PrepareData(): 28 | def __init__(self, flag, data_path, txt_path, n_samples): 29 | super(PrepareData, self).__init__() 30 | self.flag = flag 31 | self.data_path = data_path 32 | self.txt_path = txt_path 33 | self.n_samples = n_samples 34 | with open(self.txt_path, 'r') as f: 35 | self.list = f.readlines() 36 | self.file_size = len(self.list) 37 | 38 | def load_and_pp_data(self): 39 | for idx in tqdm(range(self.file_size)): 40 | query_path = os.path.join(tcfg.path.prepare_data, self.flag, str(idx)+'.npy') 41 | if os.path.exists(query_path): 42 | continue 43 | p16_path = osp.join(self.data_path, self.list[idx].split(' ')[0]) 44 | p20_path = osp.join(self.data_path, self.list[idx].split(' ')[1].strip()) 45 | dir_name = self.list[idx].split(' ')[0].split('\\')[0] 46 | pc0_name = self.list[idx].split(' ')[0].split('\\')[-1] 47 | pc1_name = self.list[idx].split(' ')[1].split('\\')[-1].strip() 48 | p16, p20, p16_raw_length, p20_raw_length = utils.align_length(p16_path, p20_path, self.n_samples) 49 | p16_data = p16[:, :-1]; p20_data = p20[:, :-1]; 50 | if tcfg.norm_data: 51 | p16_data, p20_data, p16_raw, p20_raw = norm_data(p16_data, p20_data) 52 | label16, label20 = self.generate_label(p16, p20) 53 | batch_data16 = self.process_data(p16_data) 54 | batch_data20 = self.process_data(p20_data) 55 | 56 | p16ofp20 = utils.search_k_neighbors(p20_data[:, :3], p16_data[:, :3], tcfg.k_neighbors) 57 | p20ofp16 = utils.search_k_neighbors(p16_data[:, :3], p20_data[:, :3], tcfg.k_neighbors) 58 | 59 | inputs16 = {}; inputs20 = {}; 60 | inputs16['xyz'] = [torch.from_numpy(data).float() for data in batch_data16[0]] 61 | inputs16['neighbors_idx'] = [torch.from_numpy(data).long() for data in batch_data16[1]] 62 | inputs16['pool_idx'] = [torch.from_numpy(data).long() for data in batch_data16[2]] 63 | inputs16['unsam_idx'] = [torch.from_numpy(data).long() for data in batch_data16[3]] 64 | inputs16['label'] = torch.from_numpy(label16).long() 65 | inputs16['knearst_idx_in_another_pc'] = torch.from_numpy(p16ofp20).long() 66 | inputs16['raw_length'] = p16_raw_length 67 | inputs20['xyz'] = [torch.from_numpy(data).float() for data in batch_data20[0]] 68 | inputs20['neighbors_idx'] = [torch.from_numpy(data).long() for data in batch_data20[1]] 69 | inputs20['pool_idx'] = [torch.from_numpy(data).long() for data in batch_data20[2]] 70 | inputs20['unsam_idx'] = [torch.from_numpy(data).long() for data in batch_data20[3]] 71 | inputs20['label'] = torch.from_numpy(label20).long() 72 | inputs20['knearst_idx_in_another_pc'] = torch.from_numpy(p20ofp16).long() 73 | inputs20['raw_length'] = p20_raw_length 74 | ppdata = inputs16, inputs20, dir_name, pc0_name, pc1_name, [p16_raw, p20_raw] 75 | save = os.path.join(tcfg.path.prepare_data, self.flag) 76 | if not os.path.exists(save): 77 | os.makedirs(save) 78 | np.save(os.path.join(save, str(idx)+'.npy'), ppdata) 79 | 80 | def process_data(self, pc_data, subsam_rate=tcfg.sub_sampling_ratio): 81 | if pc_data.shape[1] == 3: 82 | xyz = pc_data 83 | else: 84 | xyz = pc_data[:, :3] 85 | input_points = [] 86 | input_points.append(pc_data) 87 | neighbors_idx = [] 88 | pool_idx = [] 89 | upsam_idx = [] 90 | for i in range(tcfg.num_layers): 91 | k_neigh_idx = utils.search_k_neighbors(xyz, xyz, tcfg.k_neighbors) 92 | sub_pc_data = pc_data[:pc_data.shape[0]//subsam_rate[i], :] 93 | sub_xyz = xyz[:xyz.shape[0]//subsam_rate[i], :] 94 | sub_idx = k_neigh_idx[:pc_data.shape[0]//subsam_rate[i], :] 95 | up_idx = utils.search_k_neighbors(sub_xyz, xyz, 1) 96 | input_points.append(sub_pc_data) 97 | neighbors_idx.append(k_neigh_idx) 98 | pool_idx.append(sub_idx) 99 | upsam_idx.append(up_idx) 100 | pc_data = sub_pc_data 101 | xyz = sub_xyz 102 | inputs_list = [input_points, neighbors_idx, pool_idx, upsam_idx] 103 | 104 | return inputs_list 105 | 106 | def generate_label(self, p16, p20): 107 | label16 = np.expand_dims(p16[:, -1], 1) 108 | label20 = np.expand_dims(p20[:, -1], 1) 109 | return label16, label20 110 | 111 | def prepare_data_train(): 112 | 113 | pptrain = PrepareData('train', tcfg.path['train_dataset'], tcfg.path['train_txt'], tcfg.n_samples) 114 | ppval = PrepareData('val', tcfg.path['val_dataset'], tcfg.path['val_txt'], tcfg.n_samples) 115 | print('processing training data...') 116 | pptrain.load_and_pp_data() 117 | print('processing val data...') 118 | ppval.load_and_pp_data() 119 | print('data preparation finished.') 120 | 121 | def prepare_data_test(): 122 | 123 | pptest = PrepareData('test', tcfg.path['test_dataset'], tcfg.path['test_txt'], tcfg.n_samples) 124 | print('processing testing data...') 125 | pptest.load_and_pp_data() 126 | 127 | if __name__ == '__main__': 128 | prepare_data() -------------------------------------------------------------------------------- /dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | import os.path as osp 4 | import random 5 | from torch.utils.data import Dataset, DataLoader 6 | import numpy as np 7 | import configs as cfg 8 | import utils 9 | from tqdm import tqdm 10 | from process_data2accelerate import prepare_data_train, prepare_data_test, norm_data 11 | from RandomSampler import generate_removed_plane_dataset_train, generate_removed_plane_dataset_test 12 | tcfg = cfg.CONFIGS['Train'] 13 | import time 14 | 15 | class CDDataset(Dataset): 16 | def __init__(self, data_path, txt_path, n_samples, flag, ppdata_path,): 17 | super(CDDataset, self).__init__() 18 | self.data_path = data_path 19 | self.txt_path = txt_path 20 | self.n_samples = n_samples 21 | self.flag = flag 22 | self.ppdata_path = ppdata_path 23 | if self.flag=='train' or self.flag=='test': 24 | if tcfg.remove_plane: 25 | if self.flag == 'train': 26 | generate_removed_plane_dataset_train(tcfg.path.data_root) 27 | if self.flag == 'test': 28 | generate_removed_plane_dataset_test(tcfg.path.data_root) 29 | if tcfg.if_prepare_data: 30 | if self.flag == 'train': 31 | prepare_data_train() 32 | if self.flag == 'test': 33 | prepare_data_test() 34 | with open(self.txt_path, 'r') as f: 35 | self.list = f.readlines() 36 | self.file_size = len(self.list) 37 | 38 | def __getitem__(self, idx): 39 | if not tcfg.if_prepare_data: 40 | p16_path = osp.join(self.data_path, self.list[idx].split(' ')[0]) 41 | p20_path = osp.join(self.data_path, self.list[idx].split(' ')[1].strip()) 42 | dir_name = self.list[idx].split(' ')[0].split('\\')[0] 43 | pc0_name = self.list[idx].split(' ')[0].split('\\')[-1] 44 | pc1_name = self.list[idx].split(' ')[1].split('\\')[-1].strip() 45 | p16, p20, p16_raw_length, p20_raw_length = utils.align_length(p16_path, p20_path, self.n_samples) 46 | p16_data = p16[:, :-1]; p20_data = p20[:, :-1]; 47 | if tcfg.norm_data: 48 | p16_data, p20_data, p16_raw, p20_raw = norm_data(p16_data, p20_data) 49 | label16, label20 = self.generate_label(p16, p20) 50 | 51 | batch_data16 = self.process_data(p16_data) 52 | batch_data20 = self.process_data(p20_data) 53 | 54 | p16ofp20 = utils.search_k_neighbors(p20_data[:, :3], p16_data[:, :3], tcfg.k_neighbors) 55 | p20ofp16 = utils.search_k_neighbors(p16_data[:, :3], p20_data[:, :3], tcfg.k_neighbors) 56 | 57 | inputs16 = {}; inputs20 = {}; 58 | inputs16['xyz'] = [torch.from_numpy(data).float() for data in batch_data16[0]] 59 | inputs16['neighbors_idx'] = [torch.from_numpy(data).long() for data in batch_data16[1]] 60 | inputs16['pool_idx'] = [torch.from_numpy(data).long() for data in batch_data16[2]] 61 | inputs16['unsam_idx'] = [torch.from_numpy(data).long() for data in batch_data16[3]] 62 | inputs16['label'] = torch.from_numpy(label16).long() 63 | inputs16['knearst_idx_in_another_pc'] = torch.from_numpy(p16ofp20).long() 64 | inputs16['raw_length'] = p16_raw_length 65 | inputs20['xyz'] = [torch.from_numpy(data).float() for data in batch_data20[0]] 66 | inputs20['neighbors_idx'] = [torch.from_numpy(data).long() for data in batch_data20[1]] 67 | inputs20['pool_idx'] = [torch.from_numpy(data).long() for data in batch_data20[2]] 68 | inputs20['unsam_idx'] = [torch.from_numpy(data).long() for data in batch_data20[3]] 69 | inputs20['label'] = torch.from_numpy(label20).long() 70 | inputs20['knearst_idx_in_another_pc'] = torch.from_numpy(p20ofp16).long() 71 | inputs20['raw_length'] = p20_raw_length 72 | 73 | if not tcfg.use_rgb: 74 | inputs16['xyz'] = [i[:, :3] for i in inputs16['xyz']] 75 | inputs20['xyz'] = [i[:, :3] for i in inputs20['xyz']] 76 | 77 | return inputs16, inputs20, dir_name, pc0_name, pc1_name, [p16_raw, p20_raw] 78 | 79 | else: 80 | ppdata = np.load(os.path.join(self.ppdata_path, self.flag, str(idx)+'.npy'), allow_pickle=True) 81 | inputs16, inputs20, dir_name, pc0_name, pc1_name, [p16_raw, p20_raw] = ppdata 82 | if not tcfg.use_rgb: 83 | inputs16['xyz'] = [i[:, :3] for i in inputs16['xyz']] 84 | inputs20['xyz'] = [i[:, :3] for i in inputs20['xyz']] 85 | return inputs16, inputs20, dir_name, pc0_name, pc1_name, [p16_raw, p20_raw] 86 | 87 | def __len__(self): 88 | return self.file_size 89 | 90 | def process_data(self, pc_data, subsam_rate=tcfg.sub_sampling_ratio): 91 | if pc_data.shape[1] == 3: 92 | xyz = pc_data 93 | else: 94 | xyz = pc_data[:, :3] 95 | input_points = [] 96 | input_points.append(pc_data) 97 | neighbors_idx = [] 98 | pool_idx = [] 99 | upsam_idx = [] 100 | for i in range(tcfg.num_layers): 101 | k_neigh_idx = utils.search_k_neighbors(xyz, xyz, tcfg.k_neighbors) 102 | sub_pc_data = pc_data[:pc_data.shape[0]//subsam_rate[i], :] 103 | sub_xyz = xyz[:xyz.shape[0]//subsam_rate[i], :] 104 | sub_idx = k_neigh_idx[:pc_data.shape[0]//subsam_rate[i], :] 105 | up_idx = utils.search_k_neighbors(sub_xyz, xyz, 1) 106 | input_points.append(sub_pc_data) 107 | neighbors_idx.append(k_neigh_idx) 108 | pool_idx.append(sub_idx) 109 | upsam_idx.append(up_idx) 110 | pc_data = sub_pc_data 111 | xyz = sub_xyz 112 | inputs_list = [input_points, neighbors_idx, pool_idx, upsam_idx] 113 | 114 | return inputs_list 115 | 116 | def generate_label(self, p16, p20): 117 | label16 = np.expand_dims(p16[:, -1], 1) 118 | label20 = np.expand_dims(p20[:, -1], 1) 119 | return label16, label20 120 | 121 | if __name__ == '__main__': 122 | 123 | train_data = CDDataset(tcfg.path['train_dataset'], tcfg.path['train_txt'], tcfg.n_samples, 'train', tcfg.path.prepare_data) 124 | train_dataloader = DataLoader(train_data, batch_size=2, shuffle=False) 125 | for i, data in enumerate(train_dataloader): 126 | data0, data1, _, _, _, _ = data 127 | 128 | print(data0['xyz'][0].shape, 129 | data0['neighbors_idx'][0].shape, 130 | data0['pool_idx'][0].shape, 131 | data0['unsam_idx'][0].shape, 132 | data0['label'].shape, 133 | data0['raw_length']) 134 | xyz = data0['xyz'] 135 | for j in range(5): 136 | print(xyz[j].shape) 137 | break 138 | """ 139 | output: 140 | torch.Size([2, 8192, 3]) torch.Size([2, 8192, 16]) torch.Size([2, 2048, 16]) torch.Size([2, 8192, 1]) torch.Size([2, 8192, 1]) tensor([8192, 8192]) 141 | torch.Size([2, 8192, 3]) 142 | torch.Size([2, 2048, 3]) 143 | torch.Size([2, 512, 3]) 144 | torch.Size([2, 128, 3]) 145 | torch.Size([2, 32, 3]) 146 | 147 | """ -------------------------------------------------------------------------------- /metrics.py: -------------------------------------------------------------------------------- 1 | import os 2 | from sklearn.metrics import roc_curve, auc, average_precision_score, f1_score, confusion_matrix 3 | import threading 4 | from scipy.optimize import brentq 5 | from scipy.interpolate import interp1d 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | from torch.nn import functional as F 9 | import torch 10 | import configs as cfg 11 | tcfg = cfg.CONFIGS['Train'] 12 | 13 | def evaluate(labels, scores, metric, best_auc): 14 | if metric == 'roc': 15 | return roc(labels, scores, best_auc) 16 | elif metric == 'auprc': 17 | return auprc(labels, scores) 18 | elif metric == 'f1_score': 19 | threshold = 0.50 20 | scores[scores >= threshold] = 1 21 | scores[scores < threshold] = 0 22 | return f1_score(labels.cpu(), scores.cpu()) 23 | else: 24 | raise NotImplementedError("Check the evaluation metric.") 25 | 26 | ## 27 | def roc(labels, scores, best_auc, saveto='./outputs', ): 28 | """Compute ROC curve and ROC area for each class""" 29 | fpr = dict() 30 | tpr = dict() 31 | roc_auc = dict() 32 | 33 | labels = labels.cpu() 34 | scores = scores.cpu() 35 | 36 | # True/False Positive Rates. 37 | fpr, tpr, _ = roc_curve(labels, scores) 38 | roc_auc = auc(fpr, tpr) 39 | 40 | # Equal Error Rate 41 | eer = brentq(lambda x: 1. - x - interp1d(fpr, tpr)(x), 0., 1.) 42 | 43 | if saveto: 44 | plt.figure() 45 | lw = 2 46 | plt.plot(fpr, tpr, color='darkorange', lw=lw, label='(AUC = %0.3f, EER = %0.3f)' % (roc_auc, eer)) 47 | # plt.plot([eer], [1-eer], marker='o', markersize=5, color="navy") 48 | plt.plot([0, 1], [1, 0], color='navy', lw=1, linestyle=':') 49 | plt.xlim([0.0, 1.0]) 50 | plt.ylim([0.0, 1.05]) 51 | plt.xlabel('False Positive Rate') 52 | plt.ylabel('True Positive Rate') 53 | plt.title('Receiver operating characteristic') 54 | plt.legend(loc="lower right") 55 | plt.savefig(os.path.join(saveto, "Current_Epoch_ROC.pdf")) 56 | if roc_auc>best_auc: 57 | plt.savefig(os.path.join(saveto, "Best_ROC.pdf")) 58 | plt.close() 59 | 60 | return roc_auc 61 | 62 | def auprc(labels, scores): 63 | ap = average_precision_score(labels.cpu(), scores.cpu()) 64 | return ap 65 | 66 | class cal_distance(torch.nn.Module): 67 | def __init__(self): 68 | super(cal_distance, self).__init__() 69 | 70 | def forward(self, output1, output2): 71 | euclidean_distance = F.pairwise_distance(output1, output2,p=2)#L2 distance 72 | return euclidean_distance 73 | 74 | def confuse_matrix(score0, score1, lb0, lb1, raw_length, pc0_name, pc1_name): 75 | def cal_matrix(score, lb, length): 76 | threshold = 0.5 77 | lb = lb.detach().squeeze(0).cpu().numpy() 78 | score = np.array(score.detach().squeeze(0).cpu()) 79 | if length < tcfg.n_samples: 80 | score = score[0:length] 81 | lb = lb[0:length] 82 | score[score<=threshold] = 0.0 83 | score[score>threshold] = 1.0 84 | tp = np.sum(lb*score) 85 | fn = lb-score 86 | fn[fn<0]=0 87 | fn = np.sum(fn) 88 | tn = lb+score 89 | tn[tn>0]=-1 90 | tn[tn>=0]=1 91 | tn[tn<0]=0 92 | tn = np.sum(tn) 93 | fp = score - lb 94 | fp[fp<0] = 0 95 | fp = np.sum(fp) 96 | return tp, fp, tn, fn 97 | p0_raw_length, p1_raw_length = raw_length 98 | tp0, fp0, tn0, fn0 = cal_matrix(score0, lb0, p0_raw_length) 99 | tp1, fp1, tn1, fn1 = cal_matrix(score1, lb0, p1_raw_length) 100 | 101 | return tp0+tp1, fp0+fp1, tn0+tn1, fn0+fn1 102 | 103 | def confuse_matrix2(score0, score1, lb0, lb1, raw_length, pc0_name, pc1_name): 104 | def cal_matrix(score, lb, length): 105 | threshold = 0.5 106 | lb = lb.detach().squeeze(0).cpu().numpy() 107 | score = np.array(score.detach().squeeze(0).cpu()) 108 | if length < tcfg.n_samples: 109 | score = score[0:length] 110 | lb = lb[0:length] 111 | score[score<=threshold] = 0.0 112 | score[score>threshold] = 1.0 113 | tp = np.sum(lb*score) 114 | fn = lb-score 115 | fn[fn<0]=0 116 | fn = np.sum(fn) 117 | tn = lb+score 118 | tn[tn>0]=-1 119 | tn[tn>=0]=1 120 | tn[tn<0]=0 121 | tn = np.sum(tn) 122 | fp = score - lb 123 | fp[fp<0] = 0 124 | fp = np.sum(fp) 125 | return tp, fp, tn, fn 126 | p0_raw_length, p1_raw_length = raw_length 127 | tp0, fp0, tn0, fn0 = cal_matrix(score0, lb0, p0_raw_length) 128 | tp1, fp1, tn1, fn1 = cal_matrix(score1, lb0, p1_raw_length) 129 | 130 | return tp0, fp0, tn0, fn0, tp1, fp1, tn1, fn1 131 | 132 | def eva_metrics(TP, FP, TN, FN): 133 | precision = TP/(TP+FP+1e-8) 134 | oa = (TP+TN)/(TP+FN+TN+FP+1e-8) 135 | recall = TP/(TP+FN+1e-8) 136 | f1 = 2*precision*recall/(precision+recall+1e-8) 137 | iou = TP/(FN+TP+FP+1e-8) 138 | P = ((TP+FP)*(TP+FN)+(FN+TN)*(FP+TN))/((TP+TN+FP+FN)**2+1e-8) 139 | kappa = (oa-P)/(1-P+1e-8) 140 | results = {'iou':iou,'precision':precision,'oa':oa,'recall':recall,'f1':f1,'kappa':kappa} 141 | return results 142 | 143 | class IoUCalculator: 144 | def __init__(self,): 145 | self.num_classes = 3 146 | self.gt_classes = [0 for _ in range(self.num_classes)] 147 | self.positive_classes = [0 for _ in range(self.num_classes)] 148 | self.true_positive_classes = [0 for _ in range(self.num_classes)] 149 | self.lock = threading.Lock() 150 | self.val_total_correct = 0 151 | self.val_total_seen = 0 152 | 153 | def add_data(self, logits0, logits1, labels0, labels1): 154 | pred0 = logits0.detach().cpu().numpy() 155 | pred1 = logits1.detach().cpu().numpy() 156 | pred_valid = np.hstack((pred0, pred1*2)) 157 | 158 | lb0 = labels0.detach().cpu().numpy() 159 | lb1 = labels1.detach().cpu().numpy() 160 | labels_valid = np.hstack((lb0, lb1*2)) 161 | 162 | correct = np.sum(pred_valid == labels_valid) 163 | self.val_total_correct += correct 164 | self.val_total_seen += len(labels_valid) 165 | 166 | conf_matrix = confusion_matrix(y_true=labels_valid, y_pred=pred_valid, labels=np.arange(0, self.num_classes, 1)) 167 | self.lock.acquire() 168 | self.gt_classes += np.sum(conf_matrix, axis=1) 169 | self.positive_classes += np.sum(conf_matrix, axis=0) 170 | self.true_positive_classes += np.diagonal(conf_matrix) 171 | self.lock.release() 172 | 173 | def compute_iou(self): 174 | iou_list = [] 175 | for n in range(0, self.num_classes, 1): 176 | if float(self.gt_classes[n] + self.positive_classes[n] - self.true_positive_classes[n]) != 0: 177 | iou = self.true_positive_classes[n] / \ 178 | float(self.gt_classes[n] + self.positive_classes[n] - self.true_positive_classes[n]) 179 | iou_list.append(iou) 180 | else: 181 | iou_list.append(0.0) 182 | mean_iou = sum(iou_list) / float(self.num_classes) 183 | return mean_iou, iou_list 184 | 185 | def comput_oa(self): 186 | return self.val_total_correct/self.val_total_seen 187 | 188 | def metrics(self): 189 | mean_iou, iou_list = self.compute_iou() 190 | oa = self.comput_oa() 191 | return {'miou':mean_iou, 'iou_list':iou_list, 'oa':oa} 192 | -------------------------------------------------------------------------------- /pytorch_utils.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | from typing import List, Tuple 3 | 4 | 5 | class SharedMLP(nn.Sequential): 6 | 7 | def __init__( 8 | self, 9 | args: List[int], 10 | *, 11 | bn: bool = False, 12 | activation=nn.ReLU(inplace=True), 13 | preact: bool = False, 14 | first: bool = False, 15 | name: str = "", 16 | instance_norm: bool = False 17 | ): 18 | super().__init__() 19 | 20 | for i in range(len(args) - 1): 21 | self.add_module( 22 | name + 'layer{}'.format(i), 23 | Conv2d( 24 | args[i], 25 | args[i + 1], 26 | bn=(not first or not preact or (i != 0)) and bn, 27 | activation=activation 28 | if (not first or not preact or (i != 0)) else None, 29 | preact=preact, 30 | instance_norm=instance_norm 31 | ) 32 | ) 33 | 34 | 35 | class _ConvBase(nn.Sequential): 36 | 37 | def __init__( 38 | self, 39 | in_size, 40 | out_size, 41 | kernel_size, 42 | stride, 43 | padding, 44 | activation, 45 | bn, 46 | init, 47 | conv=None, 48 | batch_norm=None, 49 | bias=True, 50 | preact=False, 51 | name="", 52 | instance_norm=False, 53 | instance_norm_func=None 54 | ): 55 | super().__init__() 56 | 57 | bias = bias and (not bn) 58 | conv_unit = conv( 59 | in_size, 60 | out_size, 61 | kernel_size=kernel_size, 62 | stride=stride, 63 | padding=padding, 64 | bias=bias 65 | ) 66 | init(conv_unit.weight) 67 | if bias: 68 | nn.init.constant_(conv_unit.bias, 0) 69 | 70 | if bn: 71 | if not preact: 72 | bn_unit = batch_norm(out_size) 73 | else: 74 | bn_unit = batch_norm(in_size) 75 | if instance_norm: 76 | if not preact: 77 | in_unit = instance_norm_func(out_size, affine=False, track_running_stats=False) 78 | else: 79 | in_unit = instance_norm_func(in_size, affine=False, track_running_stats=False) 80 | 81 | if preact: 82 | if bn: 83 | self.add_module(name + 'bn', bn_unit) 84 | 85 | if activation is not None: 86 | self.add_module(name + 'activation', activation) 87 | 88 | if not bn and instance_norm: 89 | self.add_module(name + 'in', in_unit) 90 | 91 | self.add_module(name + 'conv', conv_unit) 92 | 93 | if not preact: 94 | if bn: 95 | self.add_module(name + 'bn', bn_unit) 96 | 97 | if activation is not None: 98 | self.add_module(name + 'activation', activation) 99 | 100 | if not bn and instance_norm: 101 | self.add_module(name + 'in', in_unit) 102 | 103 | 104 | class _BNBase(nn.Sequential): 105 | 106 | def __init__(self, in_size, batch_norm=None, name=""): 107 | super().__init__() 108 | self.add_module(name + "bn", batch_norm(in_size, eps=1e-6, momentum=0.99)) 109 | 110 | nn.init.constant_(self[0].weight, 1.0) 111 | nn.init.constant_(self[0].bias, 0) 112 | 113 | 114 | class BatchNorm1d(_BNBase): 115 | 116 | def __init__(self, in_size: int, *, name: str = ""): 117 | super().__init__(in_size, batch_norm=nn.BatchNorm1d, name=name) 118 | 119 | 120 | class BatchNorm2d(_BNBase): 121 | 122 | def __init__(self, in_size: int, name: str = ""): 123 | super().__init__(in_size, batch_norm=nn.BatchNorm2d, name=name) 124 | 125 | 126 | class Conv1d(_ConvBase): 127 | 128 | def __init__( 129 | self, 130 | in_size: int, 131 | out_size: int, 132 | *, 133 | kernel_size: int = 1, 134 | stride: int = 1, 135 | padding: int = 0, 136 | activation=nn.LeakyReLU(negative_slope=0.2, inplace=True), 137 | # activation=nn.ReLU(inplace=True), 138 | # activation=nn.Sigmoid(), 139 | bn: bool = False, 140 | init=nn.init.kaiming_normal_, 141 | bias: bool = True, 142 | preact: bool = False, 143 | name: str = "", 144 | instance_norm=False 145 | ): 146 | super().__init__( 147 | in_size, 148 | out_size, 149 | kernel_size, 150 | stride, 151 | padding, 152 | activation, 153 | bn, 154 | init, 155 | conv=nn.Conv1d, 156 | batch_norm=BatchNorm1d, 157 | bias=bias, 158 | preact=preact, 159 | name=name, 160 | instance_norm=instance_norm, 161 | instance_norm_func=nn.InstanceNorm1d 162 | ) 163 | 164 | 165 | class Conv2d(_ConvBase): 166 | 167 | def __init__( 168 | self, 169 | in_size: int, 170 | out_size: int, 171 | *, 172 | kernel_size: Tuple[int, int] = (1, 1), 173 | stride: Tuple[int, int] = (1, 1), 174 | padding: Tuple[int, int] = (0, 0), 175 | activation=nn.LeakyReLU(negative_slope=0.2, inplace=True), 176 | # activation=nn.ReLU(inplace=True), 177 | # activation=nn.Sigmoid(), 178 | bn: bool = False, 179 | init=nn.init.kaiming_normal_, 180 | bias: bool = True, 181 | preact: bool = False, 182 | name: str = "", 183 | instance_norm=False 184 | ): 185 | super().__init__( 186 | in_size, 187 | out_size, 188 | kernel_size, 189 | stride, 190 | padding, 191 | activation, 192 | bn, 193 | init, 194 | conv=nn.Conv2d, 195 | batch_norm=BatchNorm2d, 196 | bias=bias, 197 | preact=preact, 198 | name=name, 199 | instance_norm=instance_norm, 200 | instance_norm_func=nn.InstanceNorm2d 201 | ) 202 | 203 | 204 | class FC(nn.Sequential): 205 | 206 | def __init__( 207 | self, 208 | in_size: int, 209 | out_size: int, 210 | *, 211 | activation=nn.ReLU(inplace=True), 212 | bn: bool = False, 213 | init=None, 214 | preact: bool = False, 215 | name: str = "" 216 | ): 217 | super().__init__() 218 | 219 | fc = nn.Linear(in_size, out_size, bias=not bn) 220 | if init is not None: 221 | init(fc.weight) 222 | if not bn: 223 | nn.init.constant(fc.bias, 0) 224 | 225 | if preact: 226 | if bn: 227 | self.add_module(name + 'bn', BatchNorm1d(in_size)) 228 | 229 | if activation is not None: 230 | self.add_module(name + 'activation', activation) 231 | 232 | self.add_module(name + 'fc', fc) 233 | 234 | if not preact: 235 | if bn: 236 | self.add_module(name + 'bn', BatchNorm1d(out_size)) 237 | 238 | if activation is not None: 239 | self.add_module(name + 'activation', activation) 240 | 241 | 242 | def set_bn_momentum_default(bn_momentum): 243 | 244 | def fn(m): 245 | if isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d)): 246 | m.momentum = bn_momentum 247 | 248 | return fn 249 | 250 | 251 | class BNMomentumScheduler(object): 252 | 253 | def __init__( 254 | self, model, bn_lambda, last_epoch=-1, 255 | setter=set_bn_momentum_default 256 | ): 257 | if not isinstance(model, nn.Module): 258 | raise RuntimeError( 259 | "Class '{}' is not a PyTorch nn Module".format( 260 | type(model).__name__ 261 | ) 262 | ) 263 | 264 | self.model = model 265 | self.setter = setter 266 | self.lmbd = bn_lambda 267 | 268 | self.step(last_epoch + 1) 269 | self.last_epoch = last_epoch 270 | 271 | def step(self, epoch=None): 272 | if epoch is None: 273 | epoch = self.last_epoch + 1 274 | 275 | self.last_epoch = epoch 276 | self.model.apply(self.setter(self.lmbd(epoch))) 277 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: UTF-8 -*- 3 | import os 4 | import time 5 | import visdom 6 | import shutil 7 | import torch 8 | import torch.nn as nn 9 | import torch.nn.functional as F 10 | from torch.utils.data import Dataset, DataLoader 11 | import torch.optim as optim 12 | import numpy as np 13 | 14 | import utils as utl 15 | import configs as cfg 16 | import metrics as mc 17 | from net import Siam3DCDNet 18 | from dataset import CDDataset 19 | import test 20 | 21 | 22 | def train_network(tcfg, vis): 23 | 24 | utl.save_cfg(tcfg, tcfg.path['outputs']) 25 | weights_save_dir = tcfg.path['weights_save_dir'] 26 | 27 | init_epoch = 0 28 | best_metric = -0.001 29 | total_steps = 0 30 | 31 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 32 | torch.backends.cudnn.benchmark = True 33 | 34 | train_txt = tcfg.path['train_txt'] 35 | val_txt = tcfg.path['val_txt'] 36 | train_data = CDDataset(tcfg.path['train_dataset'], tcfg.path['train_txt'], tcfg.n_samples, 'train', tcfg.path.prepare_data) 37 | train_dataloader = DataLoader(train_data, batch_size=tcfg.batch_size, shuffle=True) 38 | val_data = CDDataset(tcfg.path['val_dataset'], tcfg.path['val_txt'], tcfg.n_samples, 'val', tcfg.path.prepare_data) 39 | val_dataloader = DataLoader(val_data, batch_size=1, shuffle=False) 40 | 41 | if not tcfg.resume: 42 | if torch.cuda.device_count() > 1: 43 | print("Let's use", torch.cuda.device_count(), "GPUs!") 44 | net = nn.DataParallel(Siam3DCDNet(tcfg.in_dim, tcfg.out_dim)).to(device) 45 | else: 46 | net = Siam3DCDNet(tcfg.in_dim, tcfg.out_dim).to(device) 47 | 48 | if tcfg.resume: 49 | net = Siam3DCDNet(tcfg.in_dim, tcfg.out_dim).to(device) 50 | assert os.path.exists(os.path.join(weights_save_dir, 'current_net.pth')), 'There is not found any saved weights' 51 | print("\nLoading pre-trained networks...") 52 | init_epoch = torch.load(os.path.join(weights_save_dir, 'current_net.pth'))['epoch'] 53 | net.load_state_dict(torch.load(os.path.join(weights_save_dir, 'current_net.pth'))['model_state_dict']) 54 | with open(os.path.join(tcfg.path['outputs'], 'val_metric.txt')) as f: 55 | lines = f.readlines() 56 | best_metric = float(lines[-1].strip().split(':')[-1]) 57 | print("\tDone.\n") 58 | if torch.cuda.device_count() > 1: 59 | net = nn.DataParallel(net).to(device) 60 | 61 | weight = torch.from_numpy(np.array(2.0)).to(device) 62 | bce = nn.BCELoss(weight=weight) 63 | optimizer = optim.Adam(net.parameters(), lr=tcfg.optimizer['lr'], betas=(0.5, 0.999)) 64 | # optimizer = optim.SGD(net.parameters(), lr=tcfg.optimizer['lr'], momentum=tcfg.optimizer['momentum'], weight_decay=tcfg.optimizer['weight_decay']) 65 | scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=tcfg.optimizer['lr_step_size'], gamma=tcfg.optimizer['gamma']) 66 | start_time = time.time() 67 | for epoch in range(init_epoch, tcfg.epoch): 68 | loss = [] 69 | net.train() 70 | epoch_iter = 0 71 | for i, data in enumerate(train_dataloader): 72 | 73 | batch_data0, batch_data1, _, _, _, _ = data 74 | 75 | p0, p0_neighbors_idx, p0_pool_idx, p0_unsam_idx, lb0, knearest_idx0, _ = [i for i in batch_data0.values()] 76 | p1, p1_neighbors_idx, p1_pool_idx, p1_unsam_idx, lb1, knearest_idx1, _ = [i for i in batch_data1.values()] 77 | p0 = [i.to(device) for i in p0] 78 | p0_neighbors_idx = [i.to(device) for i in p0_neighbors_idx] 79 | p0_pool_idx = [i.to(device) for i in p0_pool_idx] 80 | p0_unsam_idx = [i.to(device) for i in p0_unsam_idx] 81 | p1 = [i.to(device) for i in p1] 82 | p1_neighbors_idx = [i.to(device) for i in p1_neighbors_idx] 83 | p1_pool_idx = [i.to(device) for i in p1_pool_idx] 84 | p1_unsam_idx = [i.to(device) for i in p1_unsam_idx] 85 | knearest_idx0 = knearest_idx0.to(device) 86 | knearest_idx1 = knearest_idx1.to(device) 87 | knearest_idx = [knearest_idx0, knearest_idx1] 88 | 89 | lb0 = lb0.squeeze(-1).to(device, dtype=torch.long) 90 | lb1 = lb1.squeeze(-1).to(device, dtype=torch.long) 91 | 92 | fused_lb0 = torch.max(lb0, torch.gather(lb1, 1, knearest_idx0[:,:,0].squeeze(-1))) 93 | fused_lb1 = torch.max(lb1, torch.gather(lb0, 1, knearest_idx1[:,:,0].squeeze(-1))) 94 | 95 | epoch_iter += tcfg.batch_size 96 | total_steps += tcfg.batch_size 97 | # forward 98 | out0, out1 = net([p0, p0_neighbors_idx, p0_pool_idx, p0_unsam_idx], 99 | [p1, p1_neighbors_idx, p1_pool_idx, p1_unsam_idx], 100 | knearest_idx) 101 | err = F.nll_loss(out0.reshape(-1, 2), lb0.reshape(-1)) + F.nll_loss(out1.reshape(-1, 2), lb1.reshape(-1)) 102 | # backward 103 | optimizer.zero_grad() 104 | err.backward() 105 | optimizer.step() 106 | 107 | errors = utl.get_errors(err) 108 | loss.append(err.item()) 109 | 110 | counter_ratio = float(epoch_iter) / len(train_dataloader.dataset) 111 | if (i % tcfg.print_rate == 0 and i>0): 112 | print('Time:{},epoch:{},iteration:{},loss:{}'.format(time.strftime('%Y-%m-%d %H:%M:%S'), epoch, i, np.mean(loss))) 113 | with open(os.path.join(tcfg.path['outputs'],'train_loss.txt'),'a') as f: 114 | f.write('Time:{},epoch:{}, iteration:{}, loss:{}'.format(time.strftime('%Y-%m-%d %H:%M:%S'), epoch, i, np.mean(loss))) 115 | f.write('\n') 116 | if tcfg.display: 117 | utl.plot_current_errors(epoch, counter_ratio, errors, vis) 118 | utl.mkdir(weights_save_dir) 119 | utl.save_weights(epoch, net, optimizer, weights_save_dir, 'net') 120 | scheduler.step() 121 | duration = time.time() - start_time 122 | print('training duration: {}, lr: {}'.format(duration, optimizer.state_dict()['param_groups'][0]['lr'])) 123 | 124 | 125 | # val_phase 126 | print('Validationg................') 127 | with net.eval() and torch.no_grad(): 128 | TP = 0 129 | FN = 0 130 | FP = 0 131 | TN = 0 132 | iou_calc = mc.IoUCalculator() 133 | for k, data in enumerate(val_dataloader): 134 | 135 | batch_data0, batch_data1, dir_name, pc0_name, pc1_name, raw_data = data 136 | p0, p0_neighbors_idx, p0_pool_idx, p0_unsam_idx, lb0, knearest_idx0, raw_length0 = [i for i in batch_data0.values()] 137 | p1, p1_neighbors_idx, p1_pool_idx, p1_unsam_idx, lb1, knearest_idx1, raw_length1 = [i for i in batch_data1.values()] 138 | p0 = [i.to(device) for i in p0] 139 | p0_neighbors_idx = [i.to(device) for i in p0_neighbors_idx] 140 | p0_pool_idx = [i.to(device) for i in p0_pool_idx] 141 | p0_unsam_idx = [i.to(device) for i in p0_unsam_idx] 142 | p1 = [i.to(device) for i in p1] 143 | p1_neighbors_idx = [i.to(device) for i in p1_neighbors_idx] 144 | p1_pool_idx = [i.to(device) for i in p1_pool_idx] 145 | p1_unsam_idx = [i.to(device) for i in p1_unsam_idx] 146 | knearest_idx = [knearest_idx0.to(device), knearest_idx1.to(device)] 147 | 148 | lb0 = lb0.squeeze(-1).to(device) 149 | lb1 = lb1.squeeze(-1).to(device) 150 | 151 | time_i = time.time() 152 | v_out0, v_out1 = net([p0, p0_neighbors_idx, p0_pool_idx, p0_unsam_idx], 153 | [p1, p1_neighbors_idx, p1_pool_idx, p1_unsam_idx], 154 | knearest_idx) 155 | v_out0 = v_out0.max(dim=-1)[1]; v_out1 = v_out1.max(dim=-1)[1]; 156 | iou_calc.add_data(v_out0.squeeze(0), v_out1.squeeze(0), lb0.squeeze(0), lb1.squeeze(0)) 157 | # if tcfg.save_prediction: # if save validation prediction 158 | if False: 159 | utl.save_prediction2(raw_data[0], raw_data[1], 160 | lb0, lb1, 161 | v_out0.squeeze(-1), v_out1.squeeze(-1), 162 | tcfg.plane_threshold, os.path.join(tcfg.path['val_prediction'], str(dir_name[0])), 163 | pc0_name, pc1_name, 164 | tcfg.path['train_dataset']) 165 | metrics = iou_calc.metrics() 166 | criterion = tcfg.criterion 167 | 168 | cur_metric = metrics[criterion] 169 | utl.mkdir(tcfg.path['best_weights_save_dir']) 170 | if cur_metric > best_metric: 171 | best_metric = cur_metric 172 | shutil.copy(os.path.join(tcfg.path['weights_save_dir'],'current_net.pth'),os.path.join(tcfg.path['best_weights_save_dir'], 'best_net.pth')) 173 | with open(os.path.join(tcfg.path['outputs'],'val_metric.txt'),'a') as f: 174 | f.write('Time:{},current_epoch:{},criterion: {}, current_metric:{},best_metrci:{}'.format(time.strftime('%Y-%m-%d %H:%M:%S'), \ 175 | epoch, criterion, cur_metric, best_metric)) 176 | f.write('\n') 177 | with open(os.path.join(tcfg.path['outputs'], 'val_performance.txt'),'a') as f: 178 | f.write('Time:{},current_epoch:{},miou:{:.6f},oa:{:.6f},iou_list:{}'.format(time.strftime('%Y-%m-%d %H:%M:%S'), \ 179 | epoch,metrics['miou'], metrics['oa'], metrics['iou_list'])) 180 | f.write('\n') 181 | print('{}: current metric {}, best metric {}'.format(criterion, cur_metric, best_metric)) 182 | 183 | if __name__ == '__main__': 184 | 185 | tcfg = cfg.CONFIGS['Train'] 186 | if tcfg.display: 187 | vis = visdom.Visdom(server="http://localhost", port=8097) 188 | else: 189 | vis = None 190 | 191 | train_network(tcfg, vis) 192 | test.test_network(tcfg) 193 | 194 | 195 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import os 3 | import os.path as osp 4 | import random 5 | import time 6 | import torch 7 | from sklearn.neighbors import KDTree 8 | import open3d as o3d 9 | from collections import OrderedDict 10 | import configs as cfg 11 | tcfg = cfg.CONFIGS['Train'] 12 | 13 | # np.random.seed(0) 14 | 15 | def mkdir(path): 16 | if not osp.exists(path): 17 | os.makedirs(path) 18 | 19 | def txt2sample(path): 20 | 21 | index = ['X', 'Y', 'Z', 'Rf', 'Gf', 'Bf', 'label'] 22 | with open(path, 'r') as f: 23 | lines = f.readlines() 24 | head = lines[0][2:].strip('\n').split(' ') 25 | ids = tuple([head.index(i) for i in index]) 26 | points = np.loadtxt(path, skiprows=2, usecols = ids) 27 | 28 | return points 29 | 30 | 31 | def random_subsample(points, n_samples): 32 | """ 33 | random subsample points when input points has larger length than n_samples 34 | or add zeros for points which has smaller length than n_samples. 35 | """ 36 | if points.shape[0]==0: 37 | # print('No points found at this center replacing with dummy') 38 | points = np.zeros((n_samples,points.shape[1])) 39 | if n_samples < points.shape[0]: 40 | random_indices = np.random.choice(points.shape[0], n_samples, replace=False) 41 | points = points[random_indices, :] 42 | if n_samples > points.shape[0]: 43 | apd = np.zeros((n_samples-points.shape[0], points.shape[1])) 44 | points = np.vstack((points, apd)) 45 | return points 46 | 47 | 48 | def align_length(p0_path, p1_path, length): 49 | """ 50 | output a pair of points with the same length. 51 | """ 52 | p0 = txt2sample(p0_path) 53 | p1 = txt2sample(p1_path) 54 | p0_raw_length = p0.shape[0] 55 | p1_raw_length = p1.shape[0] 56 | if p0.shape[0] != length: 57 | p0 = random_subsample(p0, length) 58 | if p1.shape[0] != length: 59 | p1 = random_subsample(p1, length) 60 | return p0, p1, p0_raw_length, p1_raw_length 61 | 62 | def get_errors(err): 63 | """ Get netD and netG errors. 64 | 65 | Returns: 66 | [OrderedDict]: Dictionary containing errors. 67 | """ 68 | errors = OrderedDict([ 69 | ('err', err.item()), 70 | ]) 71 | 72 | return errors 73 | 74 | def plot_current_errors(epoch, counter_ratio, errors,vis): 75 | """Plot current errros. 76 | 77 | Args: 78 | epoch (int): Current epoch 79 | counter_ratio (float): Ratio to plot the range between two epoch. 80 | errors (OrderedDict): Error for the current epoch. 81 | """ 82 | 83 | plot_data = {} 84 | plot_data = {'X': [], 'Y': [], 'legend': list(errors.keys())} 85 | plot_data['X'].append(epoch + counter_ratio) 86 | plot_data['Y'].append([errors[k] for k in plot_data['legend']]) 87 | 88 | vis.line(win='wire train loss', update='append', 89 | X = np.stack([np.array(plot_data['X'])] * len(plot_data['legend']), 1), 90 | Y = np.array(plot_data['Y']), 91 | opts={ 92 | 'title': 'Change Detection' + ' loss over time', 93 | 'legend': plot_data['legend'], 94 | 'xlabel': 'Epoch', 95 | 'ylabel': 'Loss' 96 | }) 97 | 98 | def save_weights(epoch,net,optimizer,save_path, model_name): 99 | if isinstance(net, torch.nn.DataParallel): 100 | checkpoint = { 101 | 'epoch': epoch, 102 | 'model_state_dict': net.module.state_dict(), 103 | 'optimizer_state_dict': optimizer.state_dict(), 104 | 'learning_rate': optimizer.state_dict()['param_groups'][0]['lr'], 105 | } 106 | else: 107 | checkpoint = { 108 | 'epoch': epoch, 109 | 'model_state_dict': net.state_dict(), 110 | 'optimizer_state_dict': optimizer.state_dict(), 111 | 'learning_rate': optimizer.state_dict()['param_groups'][0]['lr'], 112 | } 113 | torch.save(checkpoint,os.path.join(save_path,'current_%s.pth'%(model_name))) 114 | if epoch % 1 == 0: 115 | torch.save(checkpoint,os.path.join(save_path,'%d_%s.pth'%(epoch,model_name))) 116 | 117 | def plot_performance( epoch, performance, vis): 118 | """ Plot performance 119 | 120 | Args: 121 | epoch (int): Current epoch 122 | counter_ratio (float): Ratio to plot the range between two epoch. 123 | performance (OrderedDict): Performance for the current epoch. 124 | """ 125 | plot_res = [] 126 | plot_res = {'X': [], 'Y': [], 'legend': list(performance.keys())} 127 | plot_res['X'].append(epoch) 128 | plot_res['Y'].append([performance[k] for k in plot_res['legend']]) 129 | vis.line(win='AUC', update='append', 130 | X=np.stack([np.array(plot_res['X'])] * len(plot_res['legend']), 1), 131 | Y=np.array(plot_res['Y']), 132 | opts={ 133 | 'title': 'Testing ' + 'Performance Metrics', 134 | 'legend': plot_res['legend'], 135 | 'xlabel': 'Epoch', 136 | 'ylabel': 'Stats' 137 | }, 138 | ) 139 | 140 | def save_cfg(cfg, path): 141 | mkdir(path) 142 | if not cfg.resume: 143 | if os.path.exists(os.path.join(path, 'configure.txt')): 144 | os.remove(os.path.join(path, 'configure.txt')) 145 | if os.path.exists(os.path.join(path, 'train_loss.txt')): 146 | os.remove(os.path.join(path, 'train_loss.txt')) 147 | if os.path.exists(os.path.join(path, 'val_metric.txt')): 148 | os.remove(os.path.join(path, 'val_metric.txt')) 149 | if os.path.exists(os.path.join(path, 'val_performance.txt')): 150 | os.remove(os.path.join(path, 'val_performance.txt')) 151 | if os.path.exists(os.path.join(path, 'test_performance.txt')): 152 | os.remove(os.path.join(path, 'test_performance.txt')) 153 | with open(os.path.join(path, 'configure.txt'), 'a') as f: 154 | f.write('---------------{}----------------'.format(time.strftime('%Y-%m-%d %H:%M:%S'))) 155 | f.write('\n') 156 | f.write('----------------Network and training configure-----------------') 157 | f.write('\n') 158 | for k in cfg: 159 | f.write(str(k)+':') 160 | f.write(str(cfg[k])) 161 | f.write('\n') 162 | 163 | def save_prediction2(p0, p1, lb0, lb1, scores0, scores1, path, pc0_name, pc1_name, data_path): 164 | p0_name, p1_name = pc0_name[0], pc1_name[0] 165 | mkdir(path) 166 | p0 = p0[:, :, :3].squeeze(0).detach().cpu().numpy() 167 | p1 = p1[:, :, :3].squeeze(0).detach().cpu().numpy() 168 | # p00 = np.loadtxt(osp.join(data_path, path.split('\\')[-1], p0_name), usecols=(0, 1, 2, 6), skiprows=2) 169 | # p11 = np.loadtxt(osp.join(data_path, path.split('\\')[-1], p1_name), usecols=(0, 1, 2, 6), skiprows=2) 170 | lb0 = lb0.transpose(1,0).detach().cpu().numpy() 171 | lb1 = lb1.transpose(1,0).detach().cpu().numpy() 172 | scores0 = scores0.transpose(1,0).detach().cpu().numpy() 173 | scores1 = scores1.transpose(1,0).detach().cpu().numpy() 174 | 175 | p0 = np.hstack((p0, lb0, scores0)) 176 | if tcfg.remove_plane: 177 | plane0 = np.loadtxt(osp.join(data_path, path.split('\\')[-1], p0_name.replace('.txt', '_plane.txt')), usecols=(0, 1, 2, -1)) 178 | apl0 = np.zeros((plane0.shape[0], 1)) 179 | plane0 = np.hstack((plane0, apl0)) 180 | p0 = np.vstack((p0, plane0)) 181 | 182 | p1 = np.hstack((p1, lb1, scores1)) 183 | if tcfg.remove_plane: 184 | plane1 = np.loadtxt(osp.join(data_path, path.split('\\')[-1], p1_name.replace('.txt', '_plane.txt')), usecols=(0, 1, 2, -1)) 185 | apl1 = np.zeros((plane1.shape[0], 1)) 186 | plane1 = np.hstack((plane1, apl1)) 187 | p1 = np.vstack((p1, plane1)) 188 | 189 | np.savetxt(osp.join(path, p0_name), p0, fmt="%.8f %.8f %.8f %.0f %.0f") 190 | np.savetxt(osp.join(path, p1_name), p1, fmt="%.8f %.8f %.8f %.0f %.0f") 191 | 192 | head = '//X Y Z label scores prediction\n' 193 | with open(osp.join(path,p0_name), 'r+') as f: 194 | content = f.read() 195 | f.seek(0, 0) 196 | f.write(head + (str(len(p0))+'\n') + content) 197 | with open(osp.join(path,p1_name), 'r+') as f: 198 | content = f.read() 199 | f.seek(0, 0) 200 | f.write(head + (str(len(p1))+'\n') + content) 201 | 202 | def save_prediction3(p0, p1, lb0, lb1, scores0, scores1, path, pc0_name, pc1_name, data_path): 203 | p0_name, p1_name = pc0_name[0], pc1_name[0] 204 | mkdir(path) 205 | p0 = p0[:, :, :3].squeeze(0).detach().cpu().numpy() 206 | p1 = p1[:, :, :3].squeeze(0).detach().cpu().numpy() 207 | lb0 = lb0.transpose(1,0).detach().cpu().numpy() 208 | lb1 = lb1.transpose(1,0).detach().cpu().numpy() 209 | scores0 = scores0.transpose(1,0).detach().cpu().numpy() 210 | scores1 = scores1.transpose(1,0).detach().cpu().numpy() 211 | p0 = np.hstack((p0, lb0, scores0)) 212 | if tcfg.remove_plane: 213 | plane0 = np.loadtxt(osp.join(data_path, path.split('\\')[-1], p0_name.replace('.txt', '_plane.txt')), usecols=(0, 1, 2, -1)) 214 | apl0 = np.zeros((plane0.shape[0], 1)) 215 | plane0 = np.hstack((plane0, apl0)) 216 | p0 = np.vstack((p0, plane0)) 217 | 218 | p1 = np.hstack((p1, lb1*2, scores1*2)) 219 | if tcfg.remove_plane: 220 | plane1 = np.loadtxt(osp.join(data_path, path.split('\\')[-1], p1_name.replace('.txt', '_plane.txt')), usecols=(0, 1, 2, -1)) 221 | apl1 = np.zeros((plane1.shape[0], 1)) 222 | plane1 = np.hstack((plane1, apl1)) 223 | p1 = np.vstack((p1, plane1)) 224 | 225 | np.savetxt(osp.join(path, p0_name), p0, fmt="%.8f %.8f %.8f %.0f %.0f") 226 | np.savetxt(osp.join(path, p1_name), p1, fmt="%.8f %.8f %.8f %.0f %.0f") 227 | 228 | head = '//X Y Z label scores prediction\n' 229 | with open(osp.join(path,p0_name), 'r+') as f: 230 | content = f.read() 231 | f.seek(0, 0) 232 | f.write(head + (str(len(p0))+'\n') + content) 233 | with open(osp.join(path,p1_name), 'r+') as f: 234 | content = f.read() 235 | f.seek(0, 0) 236 | f.write(head + (str(len(p1))+'\n') + content) 237 | 238 | def search_k_neighbors(raw, query, k): 239 | search_tree = KDTree(raw) 240 | _, neigh_idx = search_tree.query(query, k) 241 | return neigh_idx 242 | 243 | -------------------------------------------------------------------------------- /data/train.txt: -------------------------------------------------------------------------------- 1 | 0_11\point2016.txt 0_11\point2020_seg.txt 2 | 0_2\point2016.txt 0_2\point2020.txt 3 | 0_3\point2016.txt 0_3\point2020.txt 4 | 0_5\point2016.txt 0_5\point2020.txt 5 | 0_7\point2016.txt 0_7\point2020.txt 6 | 0_9\point2016.txt 0_9\point2020_seg.txt 7 | 10_1\point2016_seg.txt 10_1\point2020_seg.txt 8 | 10_13\point2016.txt 10_13\point2020_seg.txt 9 | 10_2\point2016.txt 10_2\point2020.txt 10 | 10_3\point2016.txt 10_3\point2020.txt 11 | 10_5\point2016.txt 10_5\point2020_seg.txt 12 | 10_6\point2016.txt 10_6\point2020.txt 13 | 10_7\point2016.txt 10_7\point2020.txt 14 | 10_8\point2016.txt 10_8\point2020.txt 15 | 10_9\point2016.txt 10_9\point2020.txt 16 | 11_1\point2016_seg.txt 11_1\point2020.txt 17 | 11_11\point2016.txt 11_11\point2020.txt 18 | 11_12\point2016_seg.txt 11_12\point2020_seg.txt 19 | 11_2\point2016.txt 11_2\point2020.txt 20 | 11_4\point2016.txt 11_4\point2020.txt 21 | 11_5\point2016.txt 11_5\point2020.txt 22 | 11_6\point2016.txt 11_6\point2020.txt 23 | 11_7\point2016.txt 11_7\point2020.txt 24 | 11_9\point2016_seg.txt 11_9\point2020.txt 25 | 12_2\point2016.txt 12_2\point2020.txt 26 | 12_3\point2016.txt 12_3\point2020.txt 27 | 13_1\point2016_seg.txt 13_1\point2020.txt 28 | 13_2\point2016.txt 13_2\point2020_seg.txt 29 | 13_3\point2016.txt 13_3\point2020_seg.txt 30 | 13_6\point2016.txt 13_6\point2020_seg.txt 31 | 13_7\point2016_seg.txt 13_7\point2020_seg.txt 32 | 13_8\point2016.txt 13_8\point2020.txt 33 | 14_1\point2016.txt 14_1\point2020.txt 34 | 14_11\point2016_seg.txt 14_11\point2020.txt 35 | 14_12\point2016_seg.txt 14_12\point2020.txt 36 | 14_13\point2016.txt 14_13\point2020_seg.txt 37 | 14_14\point2016_seg.txt 14_14\point2020_seg.txt 38 | 14_15\point2016.txt 14_15\point2020.txt 39 | 14_16\point2016.txt 14_16\point2020.txt 40 | 14_18\point2016.txt 14_18\point2020.txt 41 | 14_2\point2016.txt 14_2\point2020.txt 42 | 14_8\point2016.txt 14_8\point2020.txt 43 | 14_9\point2016_seg.txt 14_9\point2020.txt 44 | 17_6\point2016.txt 17_6\point2020.txt 45 | 18_1\point2016.txt 18_1\point2020_seg.txt 46 | 18_2\point2016.txt 18_2\point2020_seg.txt 47 | 18_3\point2016.txt 18_3\point2020.txt 48 | 18_4\point2016.txt 18_4\point2020.txt 49 | 18_5\point2016_seg.txt 18_5\point2020_seg.txt 50 | 19_1\point2016_seg.txt 19_1\point2020_seg.txt 51 | 19_2\point2016_seg.txt 19_2\point2020.txt 52 | 19_3\point2016_seg.txt 19_3\point2020.txt 53 | 19_6\point2016_seg.txt 19_6\point2020_seg.txt 54 | 19_7\point2016_seg.txt 19_7\point2020.txt 55 | 21_1\point2016_seg.txt 21_1\point2020.txt 56 | 21_10\point2016.txt 21_10\point2020_seg.txt 57 | 21_2\point2016_seg.txt 21_2\point2020.txt 58 | 21_4\point2016_seg.txt 21_4\point2020.txt 59 | 21_5\point2016.txt 21_5\point2020_seg.txt 60 | 21_7\point2016.txt 21_7\point2020.txt 61 | 21_8\point2016.txt 21_8\point2020.txt 62 | 21_9\point2016.txt 21_9\point2020.txt 63 | 22_2\point2016_seg.txt 22_2\point2020.txt 64 | 22_3\point2016_seg.txt 22_3\point2020_seg.txt 65 | 22_4\point2016_seg.txt 22_4\point2020_seg.txt 66 | 23_1\point2016_seg.txt 23_1\point2020.txt 67 | 23_2\point2016_seg.txt 23_2\point2020_seg.txt 68 | 23_3\point2016_seg.txt 23_3\point2020_seg.txt 69 | 23_4\point2016_seg.txt 23_4\point2020_seg.txt 70 | 23_6\point2016_seg.txt 23_6\point2020.txt 71 | 23_7\point2016.txt 23_7\point2020.txt 72 | 23_8\point2016.txt 23_8\point2020_seg.txt 73 | 23_9\point2016_seg.txt 23_9\point2020_seg.txt 74 | 24_1\point2016.txt 24_1\point2020.txt 75 | 24_10\point2016_seg.txt 24_10\point2020.txt 76 | 24_11\point2016.txt 24_11\point2020.txt 77 | 24_12\point2016_seg.txt 24_12\point2020.txt 78 | 24_14\point2016.txt 24_14\point2020.txt 79 | 24_15\point2016.txt 24_15\point2020.txt 80 | 24_17\point2016_seg.txt 24_17\point2020_seg.txt 81 | 24_19\point2016_seg.txt 24_19\point2020.txt 82 | 24_2\point2016.txt 24_2\point2020.txt 83 | 24_20\point2016.txt 24_20\point2020.txt 84 | 24_21\point2016_seg.txt 24_21\point2020_seg.txt 85 | 24_26\point2016.txt 24_26\point2020.txt 86 | 24_27\point2016.txt 24_27\point2020.txt 87 | 24_3\point2016.txt 24_3\point2020_seg.txt 88 | 24_5\point2016_seg.txt 24_5\point2020_seg.txt 89 | 24_6\point2016_seg.txt 24_6\point2020_seg.txt 90 | 24_7\point2016_seg.txt 24_7\point2020.txt 91 | 24_9\point2016_seg.txt 24_9\point2020.txt 92 | 25_1\point2016_seg.txt 25_1\point2020.txt 93 | 25_10\point2016_seg.txt 25_10\point2020.txt 94 | 25_12\point2016_seg.txt 25_12\point2020.txt 95 | 25_14\point2016.txt 25_14\point2020.txt 96 | 25_15\point2016.txt 25_15\point2020.txt 97 | 25_16\point2016.txt 25_16\point2020.txt 98 | 25_17\point2016_seg.txt 25_17\point2020_seg.txt 99 | 25_18\point2016_seg.txt 25_18\point2020.txt 100 | 25_19\point2016.txt 25_19\point2020.txt 101 | 25_21\point2016_seg.txt 25_21\point2020_seg.txt 102 | 25_23\point2016_seg.txt 25_23\point2020_seg.txt 103 | 25_3\point2016_seg.txt 25_3\point2020_seg.txt 104 | 25_4\point2016_seg.txt 25_4\point2020_seg.txt 105 | 25_5\point2016_seg.txt 25_5\point2020.txt 106 | 25_6\point2016_seg.txt 25_6\point2020.txt 107 | 25_8\point2016_seg.txt 25_8\point2020_seg.txt 108 | 25_9\point2016_seg.txt 25_9\point2020.txt 109 | 26_1\point2016_seg.txt 26_1\point2020_seg.txt 110 | 26_12\point2016_seg.txt 26_12\point2020_seg.txt 111 | 26_3\point2016_seg.txt 26_3\point2020_seg.txt 112 | 26_4\point2016_seg.txt 26_4\point2020_seg.txt 113 | 26_5\point2016_seg.txt 26_5\point2020.txt 114 | 27_1\point2016.txt 27_1\point2020.txt 115 | 27_4\point2016.txt 27_4\point2020.txt 116 | 28_1\point2016.txt 28_1\point2020.txt 117 | 28_12\point2016.txt 28_12\point2020_seg.txt 118 | 28_13\point2016.txt 28_13\point2020_seg.txt 119 | 28_2\point2016_seg.txt 28_2\point2020_seg.txt 120 | 28_3\point2016_seg.txt 28_3\point2020.txt 121 | 28_4\point2016_seg.txt 28_4\point2020_seg.txt 122 | 29_1\point2016.txt 29_1\point2020.txt 123 | 29_3\point2016_seg.txt 29_3\point2020.txt 124 | 30_1\point2016.txt 30_1\point2020_seg.txt 125 | 30_2\point2016.txt 30_2\point2020.txt 126 | 30_3\point2016.txt 30_3\point2020.txt 127 | 31_1\point2016_seg.txt 31_1\point2020_seg.txt 128 | 31_2\point2016_seg.txt 31_2\point2020_seg.txt 129 | 31_3\point2016.txt 31_3\point2020_seg.txt 130 | 31_4\point2016.txt 31_4\point2020_seg.txt 131 | 31_5\point2016_seg.txt 31_5\point2020_seg.txt 132 | 32_1\point2016.txt 32_1\point2020.txt 133 | 32_2\point2016.txt 32_2\point2020_seg.txt 134 | 32_3\point2016.txt 32_3\point2020.txt 135 | 33_1\point2016.txt 33_1\point2020.txt 136 | 33_10\point2016_seg.txt 33_10\point2020.txt 137 | 33_2\point2016.txt 33_2\point2020.txt 138 | 33_3\point2016.txt 33_3\point2020.txt 139 | 33_4\point2016.txt 33_4\point2020_seg.txt 140 | 33_5\point2016_seg.txt 33_5\point2020_seg.txt 141 | 33_6\point2016.txt 33_6\point2020.txt 142 | 33_7\point2016.txt 33_7\point2020.txt 143 | 33_8\point2016.txt 33_8\point2020.txt 144 | 34_11\point2016.txt 34_11\point2020.txt 145 | 34_12\point2016.txt 34_12\point2020.txt 146 | 34_13\point2016.txt 34_13\point2020.txt 147 | 34_2\point2016.txt 34_2\point2020_seg.txt 148 | 34_5\point2016.txt 34_5\point2020_seg.txt 149 | 34_6\point2016_seg.txt 34_6\point2020_seg.txt 150 | 34_7\point2016.txt 34_7\point2020_seg.txt 151 | 35_1\point2016_seg.txt 35_1\point2020_seg.txt 152 | 35_2\point2016.txt 35_2\point2020.txt 153 | 36_1\point2016.txt 36_1\point2020_seg.txt 154 | 36_2\point2016.txt 36_2\point2020_seg.txt 155 | 36_3\point2016.txt 36_3\point2020.txt 156 | 36_4\point2016.txt 36_4\point2020.txt 157 | 36_5\point2016.txt 36_5\point2020.txt 158 | 36_6\point2016_seg.txt 36_6\point2020_seg.txt 159 | 36_7\point2016.txt 36_7\point2020.txt 160 | 37_2\point2016_seg.txt 37_2\point2020_seg.txt 161 | 37_3\point2016_seg.txt 37_3\point2020_seg.txt 162 | 38_10\point2016.txt 38_10\point2020_seg.txt 163 | 38_12\point2016_seg.txt 38_12\point2020_seg.txt 164 | 38_2\point2016.txt 38_2\point2020.txt 165 | 38_3\point2016_seg.txt 38_3\point2020_seg.txt 166 | 38_5\point2016_seg.txt 38_5\point2020.txt 167 | 38_6\point2016_seg.txt 38_6\point2020_seg.txt 168 | 38_7\point2016_seg.txt 38_7\point2020_seg.txt 169 | 38_8\point2016_seg.txt 38_8\point2020.txt 170 | 38_9\point2016_seg.txt 38_9\point2020_seg.txt 171 | 39_2\point2016_seg.txt 39_2\point2020.txt 172 | 39_3\point2016_seg.txt 39_3\point2020.txt 173 | 39_4\point2016_seg.txt 39_4\point2020_seg.txt 174 | 39_7\point2016.txt 39_7\point2020_seg.txt 175 | 3_1\point2016.txt 3_1\point2020.txt 176 | 40_1\point2016_seg.txt 40_1\point2020.txt 177 | 40_3\point2016_seg.txt 40_3\point2020.txt 178 | 40_4\point2016.txt 40_4\point2020.txt 179 | 40_5\point2016_seg.txt 40_5\point2020_seg.txt 180 | 40_8\point2016.txt 40_8\point2020_seg.txt 181 | 41_1\point2016_seg.txt 41_1\point2020.txt 182 | 42_1\point2016.txt 42_1\point2020.txt 183 | 42_3\point2016.txt 42_3\point2020.txt 184 | 42_5\point2016_seg.txt 42_5\point2020.txt 185 | 42_6\point2016_seg.txt 42_6\point2020.txt 186 | 44_2\point2016_seg.txt 44_2\point2020.txt 187 | 44_3\point2016.txt 44_3\point2020_seg.txt 188 | 44_4\point2016.txt 44_4\point2020.txt 189 | 44_5\point2016.txt 44_5\point2020.txt 190 | 44_6\point2016.txt 44_6\point2020.txt 191 | 44_7\point2016.txt 44_7\point2020.txt 192 | 45_2\point2016_seg.txt 45_2\point2020.txt 193 | 45_3\point2016.txt 45_3\point2020_seg.txt 194 | 45_4\point2016_seg.txt 45_4\point2020.txt 195 | 45_5\point2016.txt 45_5\point2020_seg.txt 196 | 46_1\point2016_seg.txt 46_1\point2020_seg.txt 197 | 46_2\point2016.txt 46_2\point2020.txt 198 | 46_3\point2016.txt 46_3\point2020.txt 199 | 46_6\point2016.txt 46_6\point2020.txt 200 | 47_1\point2016.txt 47_1\point2020.txt 201 | 47_10\point2016.txt 47_10\point2020_seg.txt 202 | 47_11\point2016.txt 47_11\point2020_seg.txt 203 | 47_2\point2016.txt 47_2\point2020.txt 204 | 47_3\point2016_seg.txt 47_3\point2020.txt 205 | 47_5\point2016.txt 47_5\point2020_seg.txt 206 | 47_6\point2016_seg.txt 47_6\point2020.txt 207 | 47_7\point2016.txt 47_7\point2020.txt 208 | 47_8\point2016.txt 47_8\point2020.txt 209 | 47_9\point2016.txt 47_9\point2020_seg.txt 210 | 48_1\point2016.txt 48_1\point2020.txt 211 | 48_2\point2016_seg.txt 48_2\point2020.txt 212 | 48_3\point2016_seg.txt 48_3\point2020.txt 213 | 48_4\point2016_seg.txt 48_4\point2020.txt 214 | 49_2\point2016.txt 49_2\point2020.txt 215 | 49_3\point2016.txt 49_3\point2020.txt 216 | 49_4\point2016.txt 49_4\point2020_seg.txt 217 | 49_5\point2016_seg.txt 49_5\point2020_seg.txt 218 | 49_6\point2016.txt 49_6\point2020.txt 219 | 4_2\point2016.txt 4_2\point2020.txt 220 | 50_1\point2016_seg.txt 50_1\point2020.txt 221 | 50_2\point2016_seg.txt 50_2\point2020.txt 222 | 50_3\point2016.txt 50_3\point2020_seg.txt 223 | 50_4\point2016_seg.txt 50_4\point2020_seg.txt 224 | 50_5\point2016_seg.txt 50_5\point2020_seg.txt 225 | 50_6\point2016.txt 50_6\point2020_seg.txt 226 | 50_7\point2016.txt 50_7\point2020_seg.txt 227 | 50_8\point2016.txt 50_8\point2020_seg.txt 228 | 51_1\point2016_seg.txt 51_1\point2020_seg.txt 229 | 51_10\point2016.txt 51_10\point2020_seg.txt 230 | 51_11\point2016.txt 51_11\point2020_seg.txt 231 | 51_3\point2016_seg.txt 51_3\point2020.txt 232 | 51_4\point2016_seg.txt 51_4\point2020.txt 233 | 51_6\point2016.txt 51_6\point2020.txt 234 | 51_7\point2016_seg.txt 51_7\point2020.txt 235 | 51_8\point2016_seg.txt 51_8\point2020_seg.txt 236 | 51_9\point2016_seg.txt 51_9\point2020.txt 237 | 52_1\point2016_seg.txt 52_1\point2020_seg.txt 238 | 52_2\point2016_seg.txt 52_2\point2020.txt 239 | 52_4\point2016.txt 52_4\point2020.txt 240 | 53_1\point2016_seg.txt 53_1\point2020_seg.txt 241 | 53_10\point2016.txt 53_10\point2020_seg.txt 242 | 53_2\point2016_seg.txt 53_2\point2020.txt 243 | 53_3\point2016_seg.txt 53_3\point2020.txt 244 | 53_5\point2016.txt 53_5\point2020.txt 245 | 53_6\point2016_seg.txt 53_6\point2020.txt 246 | 53_7\point2016_seg.txt 53_7\point2020_seg.txt 247 | 54_1\point2016_seg.txt 54_1\point2020.txt 248 | 55_1\point2016_seg.txt 55_1\point2020_seg.txt 249 | 55_2\point2016.txt 55_2\point2020_seg.txt 250 | 55_3\point2016.txt 55_3\point2020.txt 251 | 55_4\point2016_seg.txt 55_4\point2020.txt 252 | 55_5\point2016_seg.txt 55_5\point2020_seg.txt 253 | 55_6\point2016_seg.txt 55_6\point2020.txt 254 | 56_2\point2016.txt 56_2\point2020.txt 255 | 56_3\point2016.txt 56_3\point2020.txt 256 | 56_4\point2016.txt 56_4\point2020.txt 257 | 56_7\point2016.txt 56_7\point2020_seg.txt 258 | 57_1\point2016.txt 57_1\point2020.txt 259 | 57_3\point2016.txt 57_3\point2020.txt 260 | 57_4\point2016.txt 57_4\point2020.txt 261 | 57_5\point2016.txt 57_5\point2020.txt 262 | 57_6\point2016.txt 57_6\point2020.txt 263 | 58_1\point2016.txt 58_1\point2020.txt 264 | 58_2\point2016.txt 58_2\point2020.txt 265 | 58_3\point2016.txt 58_3\point2020.txt 266 | 58_4\point2016.txt 58_4\point2020.txt 267 | 59_2\point2016.txt 59_2\point2020.txt 268 | 59_3\point2016_seg.txt 59_3\point2020.txt 269 | 5_1\point2016_seg.txt 5_1\point2020.txt 270 | 5_4\point2016.txt 5_4\point2020.txt 271 | 60_3\point2016.txt 60_3\point2020.txt 272 | 61_1\point2016_seg.txt 61_1\point2020.txt 273 | 61_2\point2016.txt 61_2\point2020.txt 274 | 62_1\point2016.txt 62_1\point2020.txt 275 | 62_3\point2016.txt 62_3\point2020.txt 276 | 62_4\point2016.txt 62_4\point2020.txt 277 | 63_1\point2016.txt 63_1\point2020.txt 278 | 63_2\point2016.txt 63_2\point2020.txt 279 | 63_3\point2016.txt 63_3\point2020.txt 280 | 63_4\point2016.txt 63_4\point2020.txt 281 | 64_1\point2016.txt 64_1\point2020.txt 282 | 64_11\point2016_seg.txt 64_11\point2020_seg.txt 283 | 64_14\point2016_seg.txt 64_14\point2020.txt 284 | 64_15\point2016.txt 64_15\point2020.txt 285 | 64_17\point2016_seg.txt 64_17\point2020.txt 286 | 64_2\point2016.txt 64_2\point2020.txt 287 | 64_3\point2016_seg.txt 64_3\point2020.txt 288 | 64_4\point2016.txt 64_4\point2020.txt 289 | 64_5\point2016.txt 64_5\point2020.txt 290 | 64_6\point2016_seg.txt 64_6\point2020.txt 291 | 64_7\point2016.txt 64_7\point2020.txt 292 | 64_8\point2016.txt 64_8\point2020.txt 293 | 64_9\point2016.txt 64_9\point2020_seg.txt 294 | 65_1\point2016.txt 65_1\point2020.txt 295 | 65_11\point2016.txt 65_11\point2020.txt 296 | 65_2\point2016.txt 65_2\point2020.txt 297 | 65_3\point2016.txt 65_3\point2020.txt 298 | 65_5\point2016.txt 65_5\point2020.txt 299 | 65_7\point2016.txt 65_7\point2020.txt 300 | 65_8\point2016.txt 65_8\point2020.txt 301 | 65_9\point2016_seg.txt 65_9\point2020.txt 302 | 66_1\point2016_seg.txt 66_1\point2020.txt 303 | 66_10\point2016_seg.txt 66_10\point2020.txt 304 | 66_11\point2016_seg.txt 66_11\point2020.txt 305 | 66_12\point2016_seg.txt 66_12\point2020.txt 306 | 66_13\point2016_seg.txt 66_13\point2020.txt 307 | 66_14\point2016_seg.txt 66_14\point2020_seg.txt 308 | 66_16\point2016_seg.txt 66_16\point2020.txt 309 | 66_18\point2016_seg.txt 66_18\point2020.txt 310 | 66_19\point2016_seg.txt 66_19\point2020.txt 311 | 66_2\point2016_seg.txt 66_2\point2020.txt 312 | 66_21\point2016_seg.txt 66_21\point2020.txt 313 | 66_22\point2016.txt 66_22\point2020.txt 314 | 66_23\point2016_seg.txt 66_23\point2020_seg.txt 315 | 66_25\point2016.txt 66_25\point2020.txt 316 | 66_26\point2016_seg.txt 66_26\point2020.txt 317 | 66_28\point2016_seg.txt 66_28\point2020.txt 318 | 66_3\point2016_seg.txt 66_3\point2020.txt 319 | 66_32\point2016_seg.txt 66_32\point2020.txt 320 | 66_4\point2016_seg.txt 66_4\point2020.txt 321 | 66_5\point2016_seg.txt 66_5\point2020.txt 322 | 66_6\point2016.txt 66_6\point2020.txt 323 | 66_7\point2016_seg.txt 66_7\point2020.txt 324 | 66_8\point2016_seg.txt 66_8\point2020_seg.txt 325 | 66_9\point2016_seg.txt 66_9\point2020_seg.txt 326 | 67_1\point2016.txt 67_1\point2020.txt 327 | 67_10\point2016.txt 67_10\point2020.txt 328 | 67_11\point2016.txt 67_11\point2020.txt 329 | 67_12\point2016_seg.txt 67_12\point2020.txt 330 | 67_13\point2016.txt 67_13\point2020.txt 331 | 67_14\point2016_seg.txt 67_14\point2020.txt 332 | 67_2\point2016.txt 67_2\point2020.txt 333 | 67_3\point2016_seg.txt 67_3\point2020.txt 334 | 67_4\point2016.txt 67_4\point2020.txt 335 | 67_5\point2016.txt 67_5\point2020.txt 336 | 67_6\point2016.txt 67_6\point2020.txt 337 | 67_7\point2016.txt 67_7\point2020.txt 338 | 67_8\point2016.txt 67_8\point2020.txt 339 | 68_1\point2016.txt 68_1\point2020.txt 340 | 68_10\point2016_seg.txt 68_10\point2020.txt 341 | 68_11\point2016.txt 68_11\point2020.txt 342 | 68_13\point2016.txt 68_13\point2020.txt 343 | 68_14\point2016_seg.txt 68_14\point2020.txt 344 | 68_3\point2016.txt 68_3\point2020.txt 345 | 68_4\point2016_seg.txt 68_4\point2020.txt 346 | 68_6\point2016.txt 68_6\point2020.txt 347 | 68_7\point2016.txt 68_7\point2020.txt 348 | 68_8\point2016.txt 68_8\point2020.txt 349 | 68_9\point2016.txt 68_9\point2020.txt 350 | 69_1\point2016.txt 69_1\point2020.txt 351 | 69_2\point2016.txt 69_2\point2020.txt 352 | 69_4\point2016.txt 69_4\point2020.txt 353 | 69_5\point2016.txt 69_5\point2020.txt 354 | 69_6\point2016.txt 69_6\point2020.txt 355 | 69_7\point2016.txt 69_7\point2020.txt 356 | 6_2\point2016_seg.txt 6_2\point2020.txt 357 | 70_2\point2016.txt 70_2\point2020_seg.txt 358 | 71_1\point2016.txt 71_1\point2020_seg.txt 359 | 72_2\point2016_seg.txt 72_2\point2020.txt 360 | 72_3\point2016_seg.txt 72_3\point2020_seg.txt 361 | 72_4\point2016.txt 72_4\point2020_seg.txt 362 | 72_5\point2016.txt 72_5\point2020_seg.txt 363 | 72_6\point2016.txt 72_6\point2020_seg.txt 364 | 72_7\point2016.txt 72_7\point2020_seg.txt 365 | 73_2\point2016_seg.txt 73_2\point2020_seg.txt 366 | 73_5\point2016.txt 73_5\point2020_seg.txt 367 | 75_1\point2016.txt 75_1\point2020_seg.txt 368 | 75_2\point2016_seg.txt 75_2\point2020.txt 369 | 76_1\point2016.txt 76_1\point2020.txt 370 | 76_3\point2016.txt 76_3\point2020.txt 371 | 76_4\point2016.txt 76_4\point2020.txt 372 | 77_1\point2016_seg.txt 77_1\point2020.txt 373 | 77_2\point2016_seg.txt 77_2\point2020.txt 374 | 77_3\point2016.txt 77_3\point2020.txt 375 | 77_4\point2016_seg.txt 77_4\point2020.txt 376 | 77_5\point2016.txt 77_5\point2020.txt 377 | 77_6\point2016.txt 77_6\point2020.txt 378 | 77_7\point2016.txt 77_7\point2020.txt 379 | 78_1\point2016.txt 78_1\point2020_seg.txt 380 | 78_2\point2016.txt 78_2\point2020.txt 381 | 78_3\point2016.txt 78_3\point2020.txt 382 | 79_1\point2016_seg.txt 79_1\point2020.txt 383 | 79_14\point2016_seg.txt 79_14\point2020.txt 384 | 79_15\point2016.txt 79_15\point2020.txt 385 | 79_16\point2016_seg.txt 79_16\point2020.txt 386 | 79_17\point2016_seg.txt 79_17\point2020.txt 387 | 79_18\point2016_seg.txt 79_18\point2020_seg.txt 388 | 79_2\point2016.txt 79_2\point2020.txt 389 | 79_3\point2016_seg.txt 79_3\point2020.txt 390 | 79_4\point2016.txt 79_4\point2020_seg.txt 391 | 79_5\point2016.txt 79_5\point2020_seg.txt 392 | 79_6\point2016.txt 79_6\point2020.txt 393 | 79_7\point2016.txt 79_7\point2020.txt 394 | 79_8\point2016.txt 79_8\point2020.txt 395 | 7_1\point2016.txt 7_1\point2020.txt 396 | 7_2\point2016.txt 7_2\point2020.txt 397 | 7_5\point2016.txt 7_5\point2020.txt 398 | 9_2\point2016.txt 9_2\point2020.txt 399 | --------------------------------------------------------------------------------