├── BGNet.png ├── README.md ├── dataloader ├── .gitignore └── __init__.py ├── datasets ├── __init__.py ├── __pycache__ │ ├── D435_dateset.cpython-36.pyc │ ├── ETH3D_dataset.cpython-36.pyc │ ├── IRS_dataset.cpython-36.pyc │ ├── __init__.cpython-36.pyc │ ├── data_io.cpython-36.pyc │ ├── kitti12_dataset.cpython-36.pyc │ ├── kitti_dataset.cpython-36.pyc │ ├── lmdb_dataset.cpython-36.pyc │ ├── mb_dataset.cpython-36.pyc │ ├── mb_test_dataset.cpython-36.pyc │ ├── no_encode_lmdb_dataset.cpython-36.pyc │ ├── sceneflow_dataset.cpython-36.pyc │ ├── sceneflow_x.cpython-36.pyc │ └── sceneflow_y.cpython-36.pyc ├── data_io.py ├── data_io.pyc ├── kitti12_dataset.py └── kitti_dataset.py ├── filenames ├── KITTI-12-Test.txt ├── KITTI-15-Test.txt ├── kitti12_train.txt └── kitti15_train.txt ├── models ├── Sceneflow-BGNet-Plus.pth ├── Sceneflow-BGNet.pth ├── Sceneflow-IRS-BGNet-Plus.pth ├── Sceneflow-IRS-BGNet.pth ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-36.pyc │ ├── bgnet.cpython-36.pyc │ ├── bgnet_plus.cpython-36.pyc │ ├── deeppruner.cpython-36.pyc │ ├── feature_extractor_fast.cpython-36.pyc │ ├── submodules.cpython-36.pyc │ ├── submodules2d.cpython-36.pyc │ └── submodules3d.cpython-36.pyc ├── bgnet.py ├── bgnet_plus.py ├── feature_extractor_fast.py ├── kitti_12_BGNet.pth ├── kitti_12_BGNet_Plus.pth ├── kitti_15_BGNet_Plus.pth ├── submodules.py ├── submodules2d.py └── submodules3d.py ├── nets ├── __pycache__ │ └── warp.cpython-36.pyc └── warp.py ├── predict.py ├── predict.sh ├── predict_sample.py └── sample ├── im0.png └── im1.png /BGNet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/BGNet.png -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## BGNet 2 | 3 | This repository contains the code for our CVPR 2021 paper `Bilateral Grid Learning for Stereo Matching Network` [[BGNet](https://arxiv.org/pdf/2101.01601.pdf)] 4 | 5 | ![alt text](./BGNet.png) 6 | 7 | 8 | 9 | ### Environment 10 | 11 | 1. Python 3.6.* 12 | 2. CUDA 10.1 13 | 3. PyTorch 1.7.1 14 | 4. TorchVision 0.8.2 15 | 16 | ### Dataset 17 | To evaluate/train our BGNet network, you will need to download the required datasets. 18 | 19 | * [SceneFlow](https://lmb.informatik.uni-freiburg.de/resources/datasets/SceneFlowDatasets.en.html) 20 | * [IRS](https://pan.baidu.com/s/1VKVVdljNdhoyJ8JdQUCwKQ#list/path=%2F) 21 | * [KITTI2015](http://www.cvlibs.net/datasets/kitti/eval_scene_flow.php?benchmark=stereo) 22 | * [KITTI2012](http://www.cvlibs.net/datasets/kitti/eval_stereo_flow.php?benchmark=stereo) 23 | * [Middlebury 2014](https://vision.middlebury.edu/stereo/submit3/) 24 | 25 | ### Pretrained model 26 | 27 | We provide seven pretrained model under the folder [models](./models) . 28 | 29 | ### Evaluation 30 | 31 | We provided a script to get the kitti benchmark result,check [predict.sh](./predict.sh) for an example usage. 32 | 33 | ### Prediction 34 | 35 | We support predicting on any rectified stereo pairs. [predict_sample.py](./predict_sample.py) provides an example usage. 36 | 37 | 38 | 39 | ### Acknowledgements 40 | Part of the code is adopted from the previous works: [DeepPruner](https://github.com/uber-research/DeepPruner), [GwcNet](https://github.com/xy-guo/GwcNet), [GANet](https://github.com/feihuzhang/GANet) and [AANet](https://github.com/haofeixu/aanet). We thank the original authors for their contributions. 41 | 42 | ### Citing 43 | If you find this code useful, please consider to cite our work. 44 | 45 | ``` 46 | @inproceedings{xu2021bilateral, 47 | title={Bilateral Grid Learning for Stereo Matching Networks}, 48 | author={Xu, Bin and Xu, Yuhua and Yang, Xiaoli and Jia, Wei and Guo, Yulan}, 49 | booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, 50 | pages={1--10}, 51 | year={2021} 52 | } 53 | ``` 54 | 55 | -------------------------------------------------------------------------------- /dataloader/.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc -------------------------------------------------------------------------------- /dataloader/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/dataloader/__init__.py -------------------------------------------------------------------------------- /datasets/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from .kitti_dataset import KITTIDataset 7 | from .kitti12_dataset import KITTI_12_Dataset 8 | __datasets__ = { 9 | "kitti": KITTIDataset, 10 | "kitti_12": KITTI_12_Dataset, 11 | } 12 | -------------------------------------------------------------------------------- /datasets/__pycache__/D435_dateset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/D435_dateset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/ETH3D_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/ETH3D_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/IRS_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/IRS_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/data_io.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/data_io.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/kitti12_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/kitti12_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/kitti_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/kitti_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/lmdb_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/lmdb_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/mb_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/mb_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/mb_test_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/mb_test_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/no_encode_lmdb_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/no_encode_lmdb_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/sceneflow_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/sceneflow_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/sceneflow_x.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/sceneflow_x.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/__pycache__/sceneflow_y.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/__pycache__/sceneflow_y.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/data_io.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | import torch 7 | import numpy as np 8 | import re 9 | import torchvision.transforms as transforms 10 | 11 | def mean_std_transform(img,flag): 12 | 13 | size = np.shape(img) 14 | height = size[0] 15 | width = size[1] 16 | temp_data = np.zeros([3, height, width], 'float32') 17 | 18 | img = np.ascontiguousarray(img) 19 | r = img[:, :, 0]/255.0 20 | g = img[:, :, 1]/255.0 21 | b = img[:, :, 2]/255.0 22 | 23 | 24 | if(flag ==1): 25 | temp_data[0, :, :] = (r - 0.353720247746) 26 | temp_data[1, :, :] = (g - 0.384273201227) 27 | temp_data[2, :, :] = (b - 0.405834376812) 28 | else: 29 | temp_data[0, :, :] = (r - 0.353581100702) 30 | temp_data[1, :, :] = (g - 0.384512037039) 31 | temp_data[2, :, :] = (b - 0.406228214502) 32 | 33 | 34 | img = temp_data[0: 3, :, :] 35 | #img = np.expand_dims(img, 0) 36 | img = torch.from_numpy(img) 37 | return img 38 | 39 | __imagenet_stats = {'mean': [0.485, 0.456, 0.406], 40 | 'std': [0.229, 0.224, 0.225]} 41 | 42 | def get_transform(): 43 | 44 | normalize = __imagenet_stats 45 | t_list = [ 46 | transforms.ToTensor(), 47 | #transforms.Normalize(**normalize), 48 | ] 49 | 50 | return transforms.Compose(t_list) 51 | 52 | 53 | # read all lines in a file 54 | def read_all_lines(filename): 55 | with open(filename) as f: 56 | lines = [line.rstrip() for line in f.readlines()] 57 | return lines 58 | 59 | 60 | # read an .pfm file into numpy array, used to load SceneFlow disparity files 61 | def pfm_imread(filename): 62 | file = open(filename, 'rb') 63 | color = None 64 | width = None 65 | height = None 66 | scale = None 67 | endian = None 68 | 69 | header = file.readline().decode('utf-8').rstrip() 70 | if header == 'PF': 71 | color = True 72 | elif header == 'Pf': 73 | color = False 74 | else: 75 | raise Exception('Not a PFM file.') 76 | 77 | dim_match = re.match(r'^(\d+)\s(\d+)\s$', file.readline().decode('utf-8')) 78 | if dim_match: 79 | width, height = map(int, dim_match.groups()) 80 | else: 81 | raise Exception('Malformed PFM header.') 82 | 83 | scale = float(file.readline().rstrip()) 84 | if scale < 0: # little-endian 85 | endian = '<' 86 | scale = -scale 87 | else: 88 | endian = '>' # big-endian 89 | 90 | data = np.fromfile(file, endian + 'f') 91 | shape = (height, width, 3) if color else (height, width) 92 | 93 | data = np.reshape(data, shape) 94 | data = np.flipud(data) 95 | return data, scale 96 | -------------------------------------------------------------------------------- /datasets/data_io.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/datasets/data_io.pyc -------------------------------------------------------------------------------- /datasets/kitti12_dataset.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | import os 7 | import random 8 | from torch.utils.data import Dataset 9 | from PIL import Image 10 | import numpy as np 11 | from datasets.data_io import get_transform, read_all_lines 12 | import cv2 13 | import torchvision 14 | from PIL import ImageFilter 15 | class KITTI_12_Dataset(Dataset): 16 | def __init__(self, datapath, list_filename, training): 17 | self.datapath = datapath 18 | self.left_filenames, self.right_filenames, self.disp_filenames,self.mask_filenames = self.load_path(list_filename) 19 | self.training = training 20 | if self.training: 21 | assert self.disp_filenames is not None 22 | def load_path(self, list_filename): 23 | lines = read_all_lines(list_filename) 24 | splits = [line.split() for line in lines] 25 | left_images = [x[0] for x in splits] 26 | right_images = [x[1] for x in splits] 27 | if len(splits[0]) == 2: # ground truth not available 28 | return left_images, right_images, None 29 | elif len(splits[0]) == 3: 30 | disp_images = [x[2] for x in splits] 31 | return left_images, right_images, disp_images,None 32 | else: 33 | disp_images = [x[2] for x in splits] 34 | mask_images = [x[3] for x in splits] 35 | return left_images, right_images, disp_images,mask_images 36 | def load_image(self, filename): 37 | return Image.open(filename).convert('L') 38 | 39 | def load_disp(self, filename): 40 | data = Image.open(filename) 41 | data = np.array(data, dtype=np.float32) / 256. 42 | return data 43 | 44 | def __len__(self): 45 | return len(self.left_filenames) 46 | 47 | def __getitem__(self, index): 48 | left_img = self.load_image(os.path.join(self.datapath, self.left_filenames[index])) 49 | right_img = self.load_image(os.path.join(self.datapath, self.right_filenames[index])) 50 | if self.mask_filenames: 51 | mask = self.load_image(os.path.join(self.datapath, self.mask_filenames[index])) 52 | else: 53 | mask = None 54 | if self.disp_filenames: # has disparity ground truth 55 | disparity = self.load_disp(os.path.join(self.datapath, self.disp_filenames[index])) 56 | if self.mask_filenames: 57 | mask = np.asarray(mask) 58 | temp = mask > 0 59 | disparity = disparity * temp 60 | else: 61 | disparity = None 62 | 63 | if self.training: 64 | #rgb2gray 65 | left_img = left_img.convert('L') 66 | right_img = right_img.convert('L') 67 | 68 | w, h = left_img.size 69 | crop_w, crop_h = 512, 256 70 | 71 | x1 = random.randint(0, w - crop_w) 72 | y1 = random.randint(0, h - crop_h) 73 | 74 | # random crop 75 | left_img = left_img.crop((x1, y1, x1 + crop_w, y1 + crop_h)) 76 | right_img = right_img.crop((x1, y1, x1 + crop_w, y1 + crop_h)) 77 | disparity = disparity[y1:y1 + crop_h, x1:x1 + crop_w] 78 | 79 | left_img = np.ascontiguousarray(left_img, dtype=np.float32) 80 | right_img = np.ascontiguousarray(right_img, dtype=np.float32) 81 | # to tensor, normalize 82 | preprocess = get_transform() 83 | left_img = preprocess(left_img) 84 | right_img = preprocess(right_img) 85 | disparity = np.expand_dims(disparity, 0) 86 | # return [left_img,right_img],-disparity 87 | return {"left": left_img, 88 | "right": right_img, 89 | "disparity": disparity} 90 | 91 | else: 92 | w, h = left_img.size 93 | top_pad = 384 - h 94 | right_pad = 1280 - w 95 | assert top_pad > 0 and right_pad > 0 96 | 97 | 98 | left_img = np.ascontiguousarray(left_img, dtype=np.float32) 99 | right_img = np.ascontiguousarray(right_img, dtype=np.float32) 100 | 101 | left_img = np.lib.pad(left_img, ((top_pad, 0), (0, right_pad)), mode='symmetric', ) 102 | right_img = np.lib.pad(right_img, ((top_pad, 0), (0, right_pad)), mode='symmetric') 103 | disparity = np.lib.pad(disparity, ((top_pad, 0), (0, right_pad)), mode='constant', constant_values=0) 104 | 105 | preprocess = get_transform() 106 | left_img = preprocess(left_img) 107 | right_img = preprocess(right_img) 108 | 109 | disparity = np.expand_dims(disparity, 0) 110 | # return [left_img,right_img],-disparity 111 | return {"left": left_img, 112 | "right": right_img, 113 | "disparity": disparity} -------------------------------------------------------------------------------- /datasets/kitti_dataset.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | import os 7 | import random 8 | from torch.utils.data import Dataset 9 | from PIL import Image 10 | import numpy as np 11 | from datasets.data_io import get_transform, read_all_lines 12 | import cv2 13 | import torchvision 14 | from PIL import ImageFilter 15 | class KITTIDataset(Dataset): 16 | def __init__(self, datapath, list_filename, training): 17 | self.datapath = datapath 18 | self.left_filenames, self.right_filenames, self.disp_filenames,self.mask_filenames = self.load_path(list_filename) 19 | self.training = training 20 | if self.training: 21 | assert self.disp_filenames is not None 22 | def load_path(self, list_filename): 23 | lines = read_all_lines(list_filename) 24 | splits = [line.split() for line in lines] 25 | left_images = [x[0] for x in splits] 26 | right_images = [x[1] for x in splits] 27 | if len(splits[0]) == 2: # ground truth not available 28 | return left_images, right_images, None 29 | elif len(splits[0]) == 3: 30 | disp_images = [x[2] for x in splits] 31 | return left_images, right_images, disp_images,None 32 | else: 33 | disp_images = [x[2] for x in splits] 34 | mask_images = [x[3] for x in splits] 35 | return left_images, right_images, disp_images,mask_images 36 | def load_image(self, filename): 37 | return Image.open(filename).convert('L') 38 | 39 | def load_disp(self, filename): 40 | data = Image.open(filename) 41 | data = np.array(data, dtype=np.float32) / 256. 42 | return data 43 | 44 | def __len__(self): 45 | return len(self.left_filenames) 46 | 47 | def __getitem__(self, index): 48 | left_img = self.load_image(os.path.join(self.datapath, self.left_filenames[index])) 49 | right_img = self.load_image(os.path.join(self.datapath, self.right_filenames[index])) 50 | if self.mask_filenames: 51 | mask = self.load_image(os.path.join(self.datapath, self.mask_filenames[index])) 52 | else: 53 | mask = None 54 | if self.disp_filenames: # has disparity ground truth 55 | disparity = self.load_disp(os.path.join(self.datapath, self.disp_filenames[index])) 56 | if self.mask_filenames: 57 | mask = np.asarray(mask) 58 | temp = mask > 0 59 | disparity = disparity * temp 60 | else: 61 | disparity = None 62 | 63 | if self.training: 64 | #rgb2gray 65 | left_img = left_img.convert('L') 66 | right_img = right_img.convert('L') 67 | 68 | w, h = left_img.size 69 | crop_w, crop_h = 512, 256 70 | 71 | x1 = random.randint(0, w - crop_w) 72 | y1 = random.randint(0, h - crop_h) 73 | 74 | # random crop 75 | left_img = left_img.crop((x1, y1, x1 + crop_w, y1 + crop_h)) 76 | right_img = right_img.crop((x1, y1, x1 + crop_w, y1 + crop_h)) 77 | disparity = disparity[y1:y1 + crop_h, x1:x1 + crop_w] 78 | 79 | left_img = np.ascontiguousarray(left_img, dtype=np.float32) 80 | right_img = np.ascontiguousarray(right_img, dtype=np.float32) 81 | # to tensor, normalize 82 | preprocess = get_transform() 83 | left_img = preprocess(left_img) 84 | right_img = preprocess(right_img) 85 | disparity = np.expand_dims(disparity, 0) 86 | # return [left_img,right_img],-disparity 87 | return {"left": left_img, 88 | "right": right_img, 89 | "disparity": disparity} 90 | else: 91 | w, h = left_img.size 92 | top_pad = 384 - h 93 | right_pad = 1280 - w 94 | assert top_pad > 0 and right_pad > 0 95 | 96 | left_img = np.ascontiguousarray(left_img, dtype=np.float32) 97 | right_img = np.ascontiguousarray(right_img, dtype=np.float32) 98 | 99 | left_img = np.lib.pad(left_img, ((top_pad, 0), (0, right_pad)), mode='symmetric', ) 100 | right_img = np.lib.pad(right_img, ((top_pad, 0), (0, right_pad)), mode='symmetric') 101 | disparity = np.lib.pad(disparity, ((top_pad, 0), (0, right_pad)), mode='constant', constant_values=0) 102 | 103 | preprocess = get_transform() 104 | left_img = preprocess(left_img) 105 | right_img = preprocess(right_img) 106 | 107 | disparity = np.expand_dims(disparity, 0) 108 | # return [left_img,right_img],-disparity 109 | return {"left": left_img, 110 | "right": right_img, 111 | "disparity": disparity} -------------------------------------------------------------------------------- /filenames/KITTI-12-Test.txt: -------------------------------------------------------------------------------- 1 | testing/colored_0/000000_10.png testing/colored_1/000000_10.png testing/GANet_disp_0/000000_10.png 2 | testing/colored_0/000001_10.png testing/colored_1/000001_10.png testing/GANet_disp_0/000001_10.png 3 | testing/colored_0/000002_10.png testing/colored_1/000002_10.png testing/GANet_disp_0/000002_10.png 4 | testing/colored_0/000003_10.png testing/colored_1/000003_10.png testing/GANet_disp_0/000003_10.png 5 | testing/colored_0/000004_10.png testing/colored_1/000004_10.png testing/GANet_disp_0/000004_10.png 6 | testing/colored_0/000005_10.png testing/colored_1/000005_10.png testing/GANet_disp_0/000005_10.png 7 | testing/colored_0/000006_10.png testing/colored_1/000006_10.png testing/GANet_disp_0/000006_10.png 8 | testing/colored_0/000007_10.png testing/colored_1/000007_10.png testing/GANet_disp_0/000007_10.png 9 | testing/colored_0/000008_10.png testing/colored_1/000008_10.png testing/GANet_disp_0/000008_10.png 10 | testing/colored_0/000009_10.png testing/colored_1/000009_10.png testing/GANet_disp_0/000009_10.png 11 | testing/colored_0/000010_10.png testing/colored_1/000010_10.png testing/GANet_disp_0/000010_10.png 12 | testing/colored_0/000011_10.png testing/colored_1/000011_10.png testing/GANet_disp_0/000011_10.png 13 | testing/colored_0/000012_10.png testing/colored_1/000012_10.png testing/GANet_disp_0/000012_10.png 14 | testing/colored_0/000013_10.png testing/colored_1/000013_10.png testing/GANet_disp_0/000013_10.png 15 | testing/colored_0/000014_10.png testing/colored_1/000014_10.png testing/GANet_disp_0/000014_10.png 16 | testing/colored_0/000015_10.png testing/colored_1/000015_10.png testing/GANet_disp_0/000015_10.png 17 | testing/colored_0/000016_10.png testing/colored_1/000016_10.png testing/GANet_disp_0/000016_10.png 18 | testing/colored_0/000017_10.png testing/colored_1/000017_10.png testing/GANet_disp_0/000017_10.png 19 | testing/colored_0/000018_10.png testing/colored_1/000018_10.png testing/GANet_disp_0/000018_10.png 20 | testing/colored_0/000019_10.png testing/colored_1/000019_10.png testing/GANet_disp_0/000019_10.png 21 | testing/colored_0/000020_10.png testing/colored_1/000020_10.png testing/GANet_disp_0/000020_10.png 22 | testing/colored_0/000021_10.png testing/colored_1/000021_10.png testing/GANet_disp_0/000021_10.png 23 | testing/colored_0/000022_10.png testing/colored_1/000022_10.png testing/GANet_disp_0/000022_10.png 24 | testing/colored_0/000023_10.png testing/colored_1/000023_10.png testing/GANet_disp_0/000023_10.png 25 | testing/colored_0/000024_10.png testing/colored_1/000024_10.png testing/GANet_disp_0/000024_10.png 26 | testing/colored_0/000025_10.png testing/colored_1/000025_10.png testing/GANet_disp_0/000025_10.png 27 | testing/colored_0/000026_10.png testing/colored_1/000026_10.png testing/GANet_disp_0/000026_10.png 28 | testing/colored_0/000027_10.png testing/colored_1/000027_10.png testing/GANet_disp_0/000027_10.png 29 | testing/colored_0/000028_10.png testing/colored_1/000028_10.png testing/GANet_disp_0/000028_10.png 30 | testing/colored_0/000029_10.png testing/colored_1/000029_10.png testing/GANet_disp_0/000029_10.png 31 | testing/colored_0/000030_10.png testing/colored_1/000030_10.png testing/GANet_disp_0/000030_10.png 32 | testing/colored_0/000031_10.png testing/colored_1/000031_10.png testing/GANet_disp_0/000031_10.png 33 | testing/colored_0/000032_10.png testing/colored_1/000032_10.png testing/GANet_disp_0/000032_10.png 34 | testing/colored_0/000033_10.png testing/colored_1/000033_10.png testing/GANet_disp_0/000033_10.png 35 | testing/colored_0/000034_10.png testing/colored_1/000034_10.png testing/GANet_disp_0/000034_10.png 36 | testing/colored_0/000035_10.png testing/colored_1/000035_10.png testing/GANet_disp_0/000035_10.png 37 | testing/colored_0/000036_10.png testing/colored_1/000036_10.png testing/GANet_disp_0/000036_10.png 38 | testing/colored_0/000037_10.png testing/colored_1/000037_10.png testing/GANet_disp_0/000037_10.png 39 | testing/colored_0/000038_10.png testing/colored_1/000038_10.png testing/GANet_disp_0/000038_10.png 40 | testing/colored_0/000039_10.png testing/colored_1/000039_10.png testing/GANet_disp_0/000039_10.png 41 | testing/colored_0/000040_10.png testing/colored_1/000040_10.png testing/GANet_disp_0/000040_10.png 42 | testing/colored_0/000041_10.png testing/colored_1/000041_10.png testing/GANet_disp_0/000041_10.png 43 | testing/colored_0/000042_10.png testing/colored_1/000042_10.png testing/GANet_disp_0/000042_10.png 44 | testing/colored_0/000043_10.png testing/colored_1/000043_10.png testing/GANet_disp_0/000043_10.png 45 | testing/colored_0/000044_10.png testing/colored_1/000044_10.png testing/GANet_disp_0/000044_10.png 46 | testing/colored_0/000045_10.png testing/colored_1/000045_10.png testing/GANet_disp_0/000045_10.png 47 | testing/colored_0/000046_10.png testing/colored_1/000046_10.png testing/GANet_disp_0/000046_10.png 48 | testing/colored_0/000047_10.png testing/colored_1/000047_10.png testing/GANet_disp_0/000047_10.png 49 | testing/colored_0/000048_10.png testing/colored_1/000048_10.png testing/GANet_disp_0/000048_10.png 50 | testing/colored_0/000049_10.png testing/colored_1/000049_10.png testing/GANet_disp_0/000049_10.png 51 | testing/colored_0/000050_10.png testing/colored_1/000050_10.png testing/GANet_disp_0/000050_10.png 52 | testing/colored_0/000051_10.png testing/colored_1/000051_10.png testing/GANet_disp_0/000051_10.png 53 | testing/colored_0/000052_10.png testing/colored_1/000052_10.png testing/GANet_disp_0/000052_10.png 54 | testing/colored_0/000053_10.png testing/colored_1/000053_10.png testing/GANet_disp_0/000053_10.png 55 | testing/colored_0/000054_10.png testing/colored_1/000054_10.png testing/GANet_disp_0/000054_10.png 56 | testing/colored_0/000055_10.png testing/colored_1/000055_10.png testing/GANet_disp_0/000055_10.png 57 | testing/colored_0/000056_10.png testing/colored_1/000056_10.png testing/GANet_disp_0/000056_10.png 58 | testing/colored_0/000057_10.png testing/colored_1/000057_10.png testing/GANet_disp_0/000057_10.png 59 | testing/colored_0/000058_10.png testing/colored_1/000058_10.png testing/GANet_disp_0/000058_10.png 60 | testing/colored_0/000059_10.png testing/colored_1/000059_10.png testing/GANet_disp_0/000059_10.png 61 | testing/colored_0/000060_10.png testing/colored_1/000060_10.png testing/GANet_disp_0/000060_10.png 62 | testing/colored_0/000061_10.png testing/colored_1/000061_10.png testing/GANet_disp_0/000061_10.png 63 | testing/colored_0/000062_10.png testing/colored_1/000062_10.png testing/GANet_disp_0/000062_10.png 64 | testing/colored_0/000063_10.png testing/colored_1/000063_10.png testing/GANet_disp_0/000063_10.png 65 | testing/colored_0/000064_10.png testing/colored_1/000064_10.png testing/GANet_disp_0/000064_10.png 66 | testing/colored_0/000065_10.png testing/colored_1/000065_10.png testing/GANet_disp_0/000065_10.png 67 | testing/colored_0/000066_10.png testing/colored_1/000066_10.png testing/GANet_disp_0/000066_10.png 68 | testing/colored_0/000067_10.png testing/colored_1/000067_10.png testing/GANet_disp_0/000067_10.png 69 | testing/colored_0/000068_10.png testing/colored_1/000068_10.png testing/GANet_disp_0/000068_10.png 70 | testing/colored_0/000069_10.png testing/colored_1/000069_10.png testing/GANet_disp_0/000069_10.png 71 | testing/colored_0/000070_10.png testing/colored_1/000070_10.png testing/GANet_disp_0/000070_10.png 72 | testing/colored_0/000071_10.png testing/colored_1/000071_10.png testing/GANet_disp_0/000071_10.png 73 | testing/colored_0/000072_10.png testing/colored_1/000072_10.png testing/GANet_disp_0/000072_10.png 74 | testing/colored_0/000073_10.png testing/colored_1/000073_10.png testing/GANet_disp_0/000073_10.png 75 | testing/colored_0/000074_10.png testing/colored_1/000074_10.png testing/GANet_disp_0/000074_10.png 76 | testing/colored_0/000075_10.png testing/colored_1/000075_10.png testing/GANet_disp_0/000075_10.png 77 | testing/colored_0/000076_10.png testing/colored_1/000076_10.png testing/GANet_disp_0/000076_10.png 78 | testing/colored_0/000077_10.png testing/colored_1/000077_10.png testing/GANet_disp_0/000077_10.png 79 | testing/colored_0/000078_10.png testing/colored_1/000078_10.png testing/GANet_disp_0/000078_10.png 80 | testing/colored_0/000079_10.png testing/colored_1/000079_10.png testing/GANet_disp_0/000079_10.png 81 | testing/colored_0/000080_10.png testing/colored_1/000080_10.png testing/GANet_disp_0/000080_10.png 82 | testing/colored_0/000081_10.png testing/colored_1/000081_10.png testing/GANet_disp_0/000081_10.png 83 | testing/colored_0/000082_10.png testing/colored_1/000082_10.png testing/GANet_disp_0/000082_10.png 84 | testing/colored_0/000083_10.png testing/colored_1/000083_10.png testing/GANet_disp_0/000083_10.png 85 | testing/colored_0/000084_10.png testing/colored_1/000084_10.png testing/GANet_disp_0/000084_10.png 86 | testing/colored_0/000085_10.png testing/colored_1/000085_10.png testing/GANet_disp_0/000085_10.png 87 | testing/colored_0/000086_10.png testing/colored_1/000086_10.png testing/GANet_disp_0/000086_10.png 88 | testing/colored_0/000087_10.png testing/colored_1/000087_10.png testing/GANet_disp_0/000087_10.png 89 | testing/colored_0/000088_10.png testing/colored_1/000088_10.png testing/GANet_disp_0/000088_10.png 90 | testing/colored_0/000089_10.png testing/colored_1/000089_10.png testing/GANet_disp_0/000089_10.png 91 | testing/colored_0/000090_10.png testing/colored_1/000090_10.png testing/GANet_disp_0/000090_10.png 92 | testing/colored_0/000091_10.png testing/colored_1/000091_10.png testing/GANet_disp_0/000091_10.png 93 | testing/colored_0/000092_10.png testing/colored_1/000092_10.png testing/GANet_disp_0/000092_10.png 94 | testing/colored_0/000093_10.png testing/colored_1/000093_10.png testing/GANet_disp_0/000093_10.png 95 | testing/colored_0/000094_10.png testing/colored_1/000094_10.png testing/GANet_disp_0/000094_10.png 96 | testing/colored_0/000095_10.png testing/colored_1/000095_10.png testing/GANet_disp_0/000095_10.png 97 | testing/colored_0/000096_10.png testing/colored_1/000096_10.png testing/GANet_disp_0/000096_10.png 98 | testing/colored_0/000097_10.png testing/colored_1/000097_10.png testing/GANet_disp_0/000097_10.png 99 | testing/colored_0/000098_10.png testing/colored_1/000098_10.png testing/GANet_disp_0/000098_10.png 100 | testing/colored_0/000099_10.png testing/colored_1/000099_10.png testing/GANet_disp_0/000099_10.png 101 | testing/colored_0/000100_10.png testing/colored_1/000100_10.png testing/GANet_disp_0/000100_10.png 102 | testing/colored_0/000101_10.png testing/colored_1/000101_10.png testing/GANet_disp_0/000101_10.png 103 | testing/colored_0/000102_10.png testing/colored_1/000102_10.png testing/GANet_disp_0/000102_10.png 104 | testing/colored_0/000103_10.png testing/colored_1/000103_10.png testing/GANet_disp_0/000103_10.png 105 | testing/colored_0/000104_10.png testing/colored_1/000104_10.png testing/GANet_disp_0/000104_10.png 106 | testing/colored_0/000105_10.png testing/colored_1/000105_10.png testing/GANet_disp_0/000105_10.png 107 | testing/colored_0/000106_10.png testing/colored_1/000106_10.png testing/GANet_disp_0/000106_10.png 108 | testing/colored_0/000107_10.png testing/colored_1/000107_10.png testing/GANet_disp_0/000107_10.png 109 | testing/colored_0/000108_10.png testing/colored_1/000108_10.png testing/GANet_disp_0/000108_10.png 110 | testing/colored_0/000109_10.png testing/colored_1/000109_10.png testing/GANet_disp_0/000109_10.png 111 | testing/colored_0/000110_10.png testing/colored_1/000110_10.png testing/GANet_disp_0/000110_10.png 112 | testing/colored_0/000111_10.png testing/colored_1/000111_10.png testing/GANet_disp_0/000111_10.png 113 | testing/colored_0/000112_10.png testing/colored_1/000112_10.png testing/GANet_disp_0/000112_10.png 114 | testing/colored_0/000113_10.png testing/colored_1/000113_10.png testing/GANet_disp_0/000113_10.png 115 | testing/colored_0/000114_10.png testing/colored_1/000114_10.png testing/GANet_disp_0/000114_10.png 116 | testing/colored_0/000115_10.png testing/colored_1/000115_10.png testing/GANet_disp_0/000115_10.png 117 | testing/colored_0/000116_10.png testing/colored_1/000116_10.png testing/GANet_disp_0/000116_10.png 118 | testing/colored_0/000117_10.png testing/colored_1/000117_10.png testing/GANet_disp_0/000117_10.png 119 | testing/colored_0/000118_10.png testing/colored_1/000118_10.png testing/GANet_disp_0/000118_10.png 120 | testing/colored_0/000119_10.png testing/colored_1/000119_10.png testing/GANet_disp_0/000119_10.png 121 | testing/colored_0/000120_10.png testing/colored_1/000120_10.png testing/GANet_disp_0/000120_10.png 122 | testing/colored_0/000121_10.png testing/colored_1/000121_10.png testing/GANet_disp_0/000121_10.png 123 | testing/colored_0/000122_10.png testing/colored_1/000122_10.png testing/GANet_disp_0/000122_10.png 124 | testing/colored_0/000123_10.png testing/colored_1/000123_10.png testing/GANet_disp_0/000123_10.png 125 | testing/colored_0/000124_10.png testing/colored_1/000124_10.png testing/GANet_disp_0/000124_10.png 126 | testing/colored_0/000125_10.png testing/colored_1/000125_10.png testing/GANet_disp_0/000125_10.png 127 | testing/colored_0/000126_10.png testing/colored_1/000126_10.png testing/GANet_disp_0/000126_10.png 128 | testing/colored_0/000127_10.png testing/colored_1/000127_10.png testing/GANet_disp_0/000127_10.png 129 | testing/colored_0/000128_10.png testing/colored_1/000128_10.png testing/GANet_disp_0/000128_10.png 130 | testing/colored_0/000129_10.png testing/colored_1/000129_10.png testing/GANet_disp_0/000129_10.png 131 | testing/colored_0/000130_10.png testing/colored_1/000130_10.png testing/GANet_disp_0/000130_10.png 132 | testing/colored_0/000131_10.png testing/colored_1/000131_10.png testing/GANet_disp_0/000131_10.png 133 | testing/colored_0/000132_10.png testing/colored_1/000132_10.png testing/GANet_disp_0/000132_10.png 134 | testing/colored_0/000133_10.png testing/colored_1/000133_10.png testing/GANet_disp_0/000133_10.png 135 | testing/colored_0/000134_10.png testing/colored_1/000134_10.png testing/GANet_disp_0/000134_10.png 136 | testing/colored_0/000135_10.png testing/colored_1/000135_10.png testing/GANet_disp_0/000135_10.png 137 | testing/colored_0/000136_10.png testing/colored_1/000136_10.png testing/GANet_disp_0/000136_10.png 138 | testing/colored_0/000137_10.png testing/colored_1/000137_10.png testing/GANet_disp_0/000137_10.png 139 | testing/colored_0/000138_10.png testing/colored_1/000138_10.png testing/GANet_disp_0/000138_10.png 140 | testing/colored_0/000139_10.png testing/colored_1/000139_10.png testing/GANet_disp_0/000139_10.png 141 | testing/colored_0/000140_10.png testing/colored_1/000140_10.png testing/GANet_disp_0/000140_10.png 142 | testing/colored_0/000141_10.png testing/colored_1/000141_10.png testing/GANet_disp_0/000141_10.png 143 | testing/colored_0/000142_10.png testing/colored_1/000142_10.png testing/GANet_disp_0/000142_10.png 144 | testing/colored_0/000143_10.png testing/colored_1/000143_10.png testing/GANet_disp_0/000143_10.png 145 | testing/colored_0/000144_10.png testing/colored_1/000144_10.png testing/GANet_disp_0/000144_10.png 146 | testing/colored_0/000145_10.png testing/colored_1/000145_10.png testing/GANet_disp_0/000145_10.png 147 | testing/colored_0/000146_10.png testing/colored_1/000146_10.png testing/GANet_disp_0/000146_10.png 148 | testing/colored_0/000147_10.png testing/colored_1/000147_10.png testing/GANet_disp_0/000147_10.png 149 | testing/colored_0/000148_10.png testing/colored_1/000148_10.png testing/GANet_disp_0/000148_10.png 150 | testing/colored_0/000149_10.png testing/colored_1/000149_10.png testing/GANet_disp_0/000149_10.png 151 | testing/colored_0/000150_10.png testing/colored_1/000150_10.png testing/GANet_disp_0/000150_10.png 152 | testing/colored_0/000151_10.png testing/colored_1/000151_10.png testing/GANet_disp_0/000151_10.png 153 | testing/colored_0/000152_10.png testing/colored_1/000152_10.png testing/GANet_disp_0/000152_10.png 154 | testing/colored_0/000153_10.png testing/colored_1/000153_10.png testing/GANet_disp_0/000153_10.png 155 | testing/colored_0/000154_10.png testing/colored_1/000154_10.png testing/GANet_disp_0/000154_10.png 156 | testing/colored_0/000155_10.png testing/colored_1/000155_10.png testing/GANet_disp_0/000155_10.png 157 | testing/colored_0/000156_10.png testing/colored_1/000156_10.png testing/GANet_disp_0/000156_10.png 158 | testing/colored_0/000157_10.png testing/colored_1/000157_10.png testing/GANet_disp_0/000157_10.png 159 | testing/colored_0/000158_10.png testing/colored_1/000158_10.png testing/GANet_disp_0/000158_10.png 160 | testing/colored_0/000159_10.png testing/colored_1/000159_10.png testing/GANet_disp_0/000159_10.png 161 | testing/colored_0/000160_10.png testing/colored_1/000160_10.png testing/GANet_disp_0/000160_10.png 162 | testing/colored_0/000161_10.png testing/colored_1/000161_10.png testing/GANet_disp_0/000161_10.png 163 | testing/colored_0/000162_10.png testing/colored_1/000162_10.png testing/GANet_disp_0/000162_10.png 164 | testing/colored_0/000163_10.png testing/colored_1/000163_10.png testing/GANet_disp_0/000163_10.png 165 | testing/colored_0/000164_10.png testing/colored_1/000164_10.png testing/GANet_disp_0/000164_10.png 166 | testing/colored_0/000165_10.png testing/colored_1/000165_10.png testing/GANet_disp_0/000165_10.png 167 | testing/colored_0/000166_10.png testing/colored_1/000166_10.png testing/GANet_disp_0/000166_10.png 168 | testing/colored_0/000167_10.png testing/colored_1/000167_10.png testing/GANet_disp_0/000167_10.png 169 | testing/colored_0/000168_10.png testing/colored_1/000168_10.png testing/GANet_disp_0/000168_10.png 170 | testing/colored_0/000169_10.png testing/colored_1/000169_10.png testing/GANet_disp_0/000169_10.png 171 | testing/colored_0/000170_10.png testing/colored_1/000170_10.png testing/GANet_disp_0/000170_10.png 172 | testing/colored_0/000171_10.png testing/colored_1/000171_10.png testing/GANet_disp_0/000171_10.png 173 | testing/colored_0/000172_10.png testing/colored_1/000172_10.png testing/GANet_disp_0/000172_10.png 174 | testing/colored_0/000173_10.png testing/colored_1/000173_10.png testing/GANet_disp_0/000173_10.png 175 | testing/colored_0/000174_10.png testing/colored_1/000174_10.png testing/GANet_disp_0/000174_10.png 176 | testing/colored_0/000175_10.png testing/colored_1/000175_10.png testing/GANet_disp_0/000175_10.png 177 | testing/colored_0/000176_10.png testing/colored_1/000176_10.png testing/GANet_disp_0/000176_10.png 178 | testing/colored_0/000177_10.png testing/colored_1/000177_10.png testing/GANet_disp_0/000177_10.png 179 | testing/colored_0/000178_10.png testing/colored_1/000178_10.png testing/GANet_disp_0/000178_10.png 180 | testing/colored_0/000179_10.png testing/colored_1/000179_10.png testing/GANet_disp_0/000179_10.png 181 | testing/colored_0/000180_10.png testing/colored_1/000180_10.png testing/GANet_disp_0/000180_10.png 182 | testing/colored_0/000181_10.png testing/colored_1/000181_10.png testing/GANet_disp_0/000181_10.png 183 | testing/colored_0/000182_10.png testing/colored_1/000182_10.png testing/GANet_disp_0/000182_10.png 184 | testing/colored_0/000183_10.png testing/colored_1/000183_10.png testing/GANet_disp_0/000183_10.png 185 | testing/colored_0/000184_10.png testing/colored_1/000184_10.png testing/GANet_disp_0/000184_10.png 186 | testing/colored_0/000185_10.png testing/colored_1/000185_10.png testing/GANet_disp_0/000185_10.png 187 | testing/colored_0/000186_10.png testing/colored_1/000186_10.png testing/GANet_disp_0/000186_10.png 188 | testing/colored_0/000187_10.png testing/colored_1/000187_10.png testing/GANet_disp_0/000187_10.png 189 | testing/colored_0/000188_10.png testing/colored_1/000188_10.png testing/GANet_disp_0/000188_10.png 190 | testing/colored_0/000189_10.png testing/colored_1/000189_10.png testing/GANet_disp_0/000189_10.png 191 | testing/colored_0/000190_10.png testing/colored_1/000190_10.png testing/GANet_disp_0/000190_10.png 192 | testing/colored_0/000191_10.png testing/colored_1/000191_10.png testing/GANet_disp_0/000191_10.png 193 | testing/colored_0/000192_10.png testing/colored_1/000192_10.png testing/GANet_disp_0/000192_10.png 194 | testing/colored_0/000193_10.png testing/colored_1/000193_10.png testing/GANet_disp_0/000193_10.png 195 | testing/colored_0/000194_10.png testing/colored_1/000194_10.png testing/GANet_disp_0/000194_10.png 196 | -------------------------------------------------------------------------------- /filenames/KITTI-15-Test.txt: -------------------------------------------------------------------------------- 1 | testing/image_2/000000_10.png testing/image_3/000000_10.png testing/GANet_disp_0/000000_10.png 2 | testing/image_2/000001_10.png testing/image_3/000001_10.png testing/GANet_disp_0/000001_10.png 3 | testing/image_2/000002_10.png testing/image_3/000002_10.png testing/GANet_disp_0/000002_10.png 4 | testing/image_2/000003_10.png testing/image_3/000003_10.png testing/GANet_disp_0/000003_10.png 5 | testing/image_2/000004_10.png testing/image_3/000004_10.png testing/GANet_disp_0/000004_10.png 6 | testing/image_2/000005_10.png testing/image_3/000005_10.png testing/GANet_disp_0/000005_10.png 7 | testing/image_2/000006_10.png testing/image_3/000006_10.png testing/GANet_disp_0/000006_10.png 8 | testing/image_2/000007_10.png testing/image_3/000007_10.png testing/GANet_disp_0/000007_10.png 9 | testing/image_2/000008_10.png testing/image_3/000008_10.png testing/GANet_disp_0/000008_10.png 10 | testing/image_2/000009_10.png testing/image_3/000009_10.png testing/GANet_disp_0/000009_10.png 11 | testing/image_2/000010_10.png testing/image_3/000010_10.png testing/GANet_disp_0/000010_10.png 12 | testing/image_2/000011_10.png testing/image_3/000011_10.png testing/GANet_disp_0/000011_10.png 13 | testing/image_2/000012_10.png testing/image_3/000012_10.png testing/GANet_disp_0/000012_10.png 14 | testing/image_2/000013_10.png testing/image_3/000013_10.png testing/GANet_disp_0/000013_10.png 15 | testing/image_2/000014_10.png testing/image_3/000014_10.png testing/GANet_disp_0/000014_10.png 16 | testing/image_2/000015_10.png testing/image_3/000015_10.png testing/GANet_disp_0/000015_10.png 17 | testing/image_2/000016_10.png testing/image_3/000016_10.png testing/GANet_disp_0/000016_10.png 18 | testing/image_2/000017_10.png testing/image_3/000017_10.png testing/GANet_disp_0/000017_10.png 19 | testing/image_2/000018_10.png testing/image_3/000018_10.png testing/GANet_disp_0/000018_10.png 20 | testing/image_2/000019_10.png testing/image_3/000019_10.png testing/GANet_disp_0/000019_10.png 21 | testing/image_2/000020_10.png testing/image_3/000020_10.png testing/GANet_disp_0/000020_10.png 22 | testing/image_2/000021_10.png testing/image_3/000021_10.png testing/GANet_disp_0/000021_10.png 23 | testing/image_2/000022_10.png testing/image_3/000022_10.png testing/GANet_disp_0/000022_10.png 24 | testing/image_2/000023_10.png testing/image_3/000023_10.png testing/GANet_disp_0/000023_10.png 25 | testing/image_2/000024_10.png testing/image_3/000024_10.png testing/GANet_disp_0/000024_10.png 26 | testing/image_2/000025_10.png testing/image_3/000025_10.png testing/GANet_disp_0/000025_10.png 27 | testing/image_2/000026_10.png testing/image_3/000026_10.png testing/GANet_disp_0/000026_10.png 28 | testing/image_2/000027_10.png testing/image_3/000027_10.png testing/GANet_disp_0/000027_10.png 29 | testing/image_2/000028_10.png testing/image_3/000028_10.png testing/GANet_disp_0/000028_10.png 30 | testing/image_2/000029_10.png testing/image_3/000029_10.png testing/GANet_disp_0/000029_10.png 31 | testing/image_2/000030_10.png testing/image_3/000030_10.png testing/GANet_disp_0/000030_10.png 32 | testing/image_2/000031_10.png testing/image_3/000031_10.png testing/GANet_disp_0/000031_10.png 33 | testing/image_2/000032_10.png testing/image_3/000032_10.png testing/GANet_disp_0/000032_10.png 34 | testing/image_2/000033_10.png testing/image_3/000033_10.png testing/GANet_disp_0/000033_10.png 35 | testing/image_2/000034_10.png testing/image_3/000034_10.png testing/GANet_disp_0/000034_10.png 36 | testing/image_2/000035_10.png testing/image_3/000035_10.png testing/GANet_disp_0/000035_10.png 37 | testing/image_2/000036_10.png testing/image_3/000036_10.png testing/GANet_disp_0/000036_10.png 38 | testing/image_2/000037_10.png testing/image_3/000037_10.png testing/GANet_disp_0/000037_10.png 39 | testing/image_2/000038_10.png testing/image_3/000038_10.png testing/GANet_disp_0/000038_10.png 40 | testing/image_2/000039_10.png testing/image_3/000039_10.png testing/GANet_disp_0/000039_10.png 41 | testing/image_2/000040_10.png testing/image_3/000040_10.png testing/GANet_disp_0/000040_10.png 42 | testing/image_2/000041_10.png testing/image_3/000041_10.png testing/GANet_disp_0/000041_10.png 43 | testing/image_2/000042_10.png testing/image_3/000042_10.png testing/GANet_disp_0/000042_10.png 44 | testing/image_2/000043_10.png testing/image_3/000043_10.png testing/GANet_disp_0/000043_10.png 45 | testing/image_2/000044_10.png testing/image_3/000044_10.png testing/GANet_disp_0/000044_10.png 46 | testing/image_2/000045_10.png testing/image_3/000045_10.png testing/GANet_disp_0/000045_10.png 47 | testing/image_2/000046_10.png testing/image_3/000046_10.png testing/GANet_disp_0/000046_10.png 48 | testing/image_2/000047_10.png testing/image_3/000047_10.png testing/GANet_disp_0/000047_10.png 49 | testing/image_2/000048_10.png testing/image_3/000048_10.png testing/GANet_disp_0/000048_10.png 50 | testing/image_2/000049_10.png testing/image_3/000049_10.png testing/GANet_disp_0/000049_10.png 51 | testing/image_2/000050_10.png testing/image_3/000050_10.png testing/GANet_disp_0/000050_10.png 52 | testing/image_2/000051_10.png testing/image_3/000051_10.png testing/GANet_disp_0/000051_10.png 53 | testing/image_2/000052_10.png testing/image_3/000052_10.png testing/GANet_disp_0/000052_10.png 54 | testing/image_2/000053_10.png testing/image_3/000053_10.png testing/GANet_disp_0/000053_10.png 55 | testing/image_2/000054_10.png testing/image_3/000054_10.png testing/GANet_disp_0/000054_10.png 56 | testing/image_2/000055_10.png testing/image_3/000055_10.png testing/GANet_disp_0/000055_10.png 57 | testing/image_2/000056_10.png testing/image_3/000056_10.png testing/GANet_disp_0/000056_10.png 58 | testing/image_2/000057_10.png testing/image_3/000057_10.png testing/GANet_disp_0/000057_10.png 59 | testing/image_2/000058_10.png testing/image_3/000058_10.png testing/GANet_disp_0/000058_10.png 60 | testing/image_2/000059_10.png testing/image_3/000059_10.png testing/GANet_disp_0/000059_10.png 61 | testing/image_2/000060_10.png testing/image_3/000060_10.png testing/GANet_disp_0/000060_10.png 62 | testing/image_2/000061_10.png testing/image_3/000061_10.png testing/GANet_disp_0/000061_10.png 63 | testing/image_2/000062_10.png testing/image_3/000062_10.png testing/GANet_disp_0/000062_10.png 64 | testing/image_2/000063_10.png testing/image_3/000063_10.png testing/GANet_disp_0/000063_10.png 65 | testing/image_2/000064_10.png testing/image_3/000064_10.png testing/GANet_disp_0/000064_10.png 66 | testing/image_2/000065_10.png testing/image_3/000065_10.png testing/GANet_disp_0/000065_10.png 67 | testing/image_2/000066_10.png testing/image_3/000066_10.png testing/GANet_disp_0/000066_10.png 68 | testing/image_2/000067_10.png testing/image_3/000067_10.png testing/GANet_disp_0/000067_10.png 69 | testing/image_2/000068_10.png testing/image_3/000068_10.png testing/GANet_disp_0/000068_10.png 70 | testing/image_2/000069_10.png testing/image_3/000069_10.png testing/GANet_disp_0/000069_10.png 71 | testing/image_2/000070_10.png testing/image_3/000070_10.png testing/GANet_disp_0/000070_10.png 72 | testing/image_2/000071_10.png testing/image_3/000071_10.png testing/GANet_disp_0/000071_10.png 73 | testing/image_2/000072_10.png testing/image_3/000072_10.png testing/GANet_disp_0/000072_10.png 74 | testing/image_2/000073_10.png testing/image_3/000073_10.png testing/GANet_disp_0/000073_10.png 75 | testing/image_2/000074_10.png testing/image_3/000074_10.png testing/GANet_disp_0/000074_10.png 76 | testing/image_2/000075_10.png testing/image_3/000075_10.png testing/GANet_disp_0/000075_10.png 77 | testing/image_2/000076_10.png testing/image_3/000076_10.png testing/GANet_disp_0/000076_10.png 78 | testing/image_2/000077_10.png testing/image_3/000077_10.png testing/GANet_disp_0/000077_10.png 79 | testing/image_2/000078_10.png testing/image_3/000078_10.png testing/GANet_disp_0/000078_10.png 80 | testing/image_2/000079_10.png testing/image_3/000079_10.png testing/GANet_disp_0/000079_10.png 81 | testing/image_2/000080_10.png testing/image_3/000080_10.png testing/GANet_disp_0/000080_10.png 82 | testing/image_2/000081_10.png testing/image_3/000081_10.png testing/GANet_disp_0/000081_10.png 83 | testing/image_2/000082_10.png testing/image_3/000082_10.png testing/GANet_disp_0/000082_10.png 84 | testing/image_2/000083_10.png testing/image_3/000083_10.png testing/GANet_disp_0/000083_10.png 85 | testing/image_2/000084_10.png testing/image_3/000084_10.png testing/GANet_disp_0/000084_10.png 86 | testing/image_2/000085_10.png testing/image_3/000085_10.png testing/GANet_disp_0/000085_10.png 87 | testing/image_2/000086_10.png testing/image_3/000086_10.png testing/GANet_disp_0/000086_10.png 88 | testing/image_2/000087_10.png testing/image_3/000087_10.png testing/GANet_disp_0/000087_10.png 89 | testing/image_2/000088_10.png testing/image_3/000088_10.png testing/GANet_disp_0/000088_10.png 90 | testing/image_2/000089_10.png testing/image_3/000089_10.png testing/GANet_disp_0/000089_10.png 91 | testing/image_2/000090_10.png testing/image_3/000090_10.png testing/GANet_disp_0/000090_10.png 92 | testing/image_2/000091_10.png testing/image_3/000091_10.png testing/GANet_disp_0/000091_10.png 93 | testing/image_2/000092_10.png testing/image_3/000092_10.png testing/GANet_disp_0/000092_10.png 94 | testing/image_2/000093_10.png testing/image_3/000093_10.png testing/GANet_disp_0/000093_10.png 95 | testing/image_2/000094_10.png testing/image_3/000094_10.png testing/GANet_disp_0/000094_10.png 96 | testing/image_2/000095_10.png testing/image_3/000095_10.png testing/GANet_disp_0/000095_10.png 97 | testing/image_2/000096_10.png testing/image_3/000096_10.png testing/GANet_disp_0/000096_10.png 98 | testing/image_2/000097_10.png testing/image_3/000097_10.png testing/GANet_disp_0/000097_10.png 99 | testing/image_2/000098_10.png testing/image_3/000098_10.png testing/GANet_disp_0/000098_10.png 100 | testing/image_2/000099_10.png testing/image_3/000099_10.png testing/GANet_disp_0/000099_10.png 101 | testing/image_2/000100_10.png testing/image_3/000100_10.png testing/GANet_disp_0/000100_10.png 102 | testing/image_2/000101_10.png testing/image_3/000101_10.png testing/GANet_disp_0/000101_10.png 103 | testing/image_2/000102_10.png testing/image_3/000102_10.png testing/GANet_disp_0/000102_10.png 104 | testing/image_2/000103_10.png testing/image_3/000103_10.png testing/GANet_disp_0/000103_10.png 105 | testing/image_2/000104_10.png testing/image_3/000104_10.png testing/GANet_disp_0/000104_10.png 106 | testing/image_2/000105_10.png testing/image_3/000105_10.png testing/GANet_disp_0/000105_10.png 107 | testing/image_2/000106_10.png testing/image_3/000106_10.png testing/GANet_disp_0/000106_10.png 108 | testing/image_2/000107_10.png testing/image_3/000107_10.png testing/GANet_disp_0/000107_10.png 109 | testing/image_2/000108_10.png testing/image_3/000108_10.png testing/GANet_disp_0/000108_10.png 110 | testing/image_2/000109_10.png testing/image_3/000109_10.png testing/GANet_disp_0/000109_10.png 111 | testing/image_2/000110_10.png testing/image_3/000110_10.png testing/GANet_disp_0/000110_10.png 112 | testing/image_2/000111_10.png testing/image_3/000111_10.png testing/GANet_disp_0/000111_10.png 113 | testing/image_2/000112_10.png testing/image_3/000112_10.png testing/GANet_disp_0/000112_10.png 114 | testing/image_2/000113_10.png testing/image_3/000113_10.png testing/GANet_disp_0/000113_10.png 115 | testing/image_2/000114_10.png testing/image_3/000114_10.png testing/GANet_disp_0/000114_10.png 116 | testing/image_2/000115_10.png testing/image_3/000115_10.png testing/GANet_disp_0/000115_10.png 117 | testing/image_2/000116_10.png testing/image_3/000116_10.png testing/GANet_disp_0/000116_10.png 118 | testing/image_2/000117_10.png testing/image_3/000117_10.png testing/GANet_disp_0/000117_10.png 119 | testing/image_2/000118_10.png testing/image_3/000118_10.png testing/GANet_disp_0/000118_10.png 120 | testing/image_2/000119_10.png testing/image_3/000119_10.png testing/GANet_disp_0/000119_10.png 121 | testing/image_2/000120_10.png testing/image_3/000120_10.png testing/GANet_disp_0/000120_10.png 122 | testing/image_2/000121_10.png testing/image_3/000121_10.png testing/GANet_disp_0/000121_10.png 123 | testing/image_2/000122_10.png testing/image_3/000122_10.png testing/GANet_disp_0/000122_10.png 124 | testing/image_2/000123_10.png testing/image_3/000123_10.png testing/GANet_disp_0/000123_10.png 125 | testing/image_2/000124_10.png testing/image_3/000124_10.png testing/GANet_disp_0/000124_10.png 126 | testing/image_2/000125_10.png testing/image_3/000125_10.png testing/GANet_disp_0/000125_10.png 127 | testing/image_2/000126_10.png testing/image_3/000126_10.png testing/GANet_disp_0/000126_10.png 128 | testing/image_2/000127_10.png testing/image_3/000127_10.png testing/GANet_disp_0/000127_10.png 129 | testing/image_2/000128_10.png testing/image_3/000128_10.png testing/GANet_disp_0/000128_10.png 130 | testing/image_2/000129_10.png testing/image_3/000129_10.png testing/GANet_disp_0/000129_10.png 131 | testing/image_2/000130_10.png testing/image_3/000130_10.png testing/GANet_disp_0/000130_10.png 132 | testing/image_2/000131_10.png testing/image_3/000131_10.png testing/GANet_disp_0/000131_10.png 133 | testing/image_2/000132_10.png testing/image_3/000132_10.png testing/GANet_disp_0/000132_10.png 134 | testing/image_2/000133_10.png testing/image_3/000133_10.png testing/GANet_disp_0/000133_10.png 135 | testing/image_2/000134_10.png testing/image_3/000134_10.png testing/GANet_disp_0/000134_10.png 136 | testing/image_2/000135_10.png testing/image_3/000135_10.png testing/GANet_disp_0/000135_10.png 137 | testing/image_2/000136_10.png testing/image_3/000136_10.png testing/GANet_disp_0/000136_10.png 138 | testing/image_2/000137_10.png testing/image_3/000137_10.png testing/GANet_disp_0/000137_10.png 139 | testing/image_2/000138_10.png testing/image_3/000138_10.png testing/GANet_disp_0/000138_10.png 140 | testing/image_2/000139_10.png testing/image_3/000139_10.png testing/GANet_disp_0/000139_10.png 141 | testing/image_2/000140_10.png testing/image_3/000140_10.png testing/GANet_disp_0/000140_10.png 142 | testing/image_2/000141_10.png testing/image_3/000141_10.png testing/GANet_disp_0/000141_10.png 143 | testing/image_2/000142_10.png testing/image_3/000142_10.png testing/GANet_disp_0/000142_10.png 144 | testing/image_2/000143_10.png testing/image_3/000143_10.png testing/GANet_disp_0/000143_10.png 145 | testing/image_2/000144_10.png testing/image_3/000144_10.png testing/GANet_disp_0/000144_10.png 146 | testing/image_2/000145_10.png testing/image_3/000145_10.png testing/GANet_disp_0/000145_10.png 147 | testing/image_2/000146_10.png testing/image_3/000146_10.png testing/GANet_disp_0/000146_10.png 148 | testing/image_2/000147_10.png testing/image_3/000147_10.png testing/GANet_disp_0/000147_10.png 149 | testing/image_2/000148_10.png testing/image_3/000148_10.png testing/GANet_disp_0/000148_10.png 150 | testing/image_2/000149_10.png testing/image_3/000149_10.png testing/GANet_disp_0/000149_10.png 151 | testing/image_2/000150_10.png testing/image_3/000150_10.png testing/GANet_disp_0/000150_10.png 152 | testing/image_2/000151_10.png testing/image_3/000151_10.png testing/GANet_disp_0/000151_10.png 153 | testing/image_2/000152_10.png testing/image_3/000152_10.png testing/GANet_disp_0/000152_10.png 154 | testing/image_2/000153_10.png testing/image_3/000153_10.png testing/GANet_disp_0/000153_10.png 155 | testing/image_2/000154_10.png testing/image_3/000154_10.png testing/GANet_disp_0/000154_10.png 156 | testing/image_2/000155_10.png testing/image_3/000155_10.png testing/GANet_disp_0/000155_10.png 157 | testing/image_2/000156_10.png testing/image_3/000156_10.png testing/GANet_disp_0/000156_10.png 158 | testing/image_2/000157_10.png testing/image_3/000157_10.png testing/GANet_disp_0/000157_10.png 159 | testing/image_2/000158_10.png testing/image_3/000158_10.png testing/GANet_disp_0/000158_10.png 160 | testing/image_2/000159_10.png testing/image_3/000159_10.png testing/GANet_disp_0/000159_10.png 161 | testing/image_2/000160_10.png testing/image_3/000160_10.png testing/GANet_disp_0/000160_10.png 162 | testing/image_2/000161_10.png testing/image_3/000161_10.png testing/GANet_disp_0/000161_10.png 163 | testing/image_2/000162_10.png testing/image_3/000162_10.png testing/GANet_disp_0/000162_10.png 164 | testing/image_2/000163_10.png testing/image_3/000163_10.png testing/GANet_disp_0/000163_10.png 165 | testing/image_2/000164_10.png testing/image_3/000164_10.png testing/GANet_disp_0/000164_10.png 166 | testing/image_2/000165_10.png testing/image_3/000165_10.png testing/GANet_disp_0/000165_10.png 167 | testing/image_2/000166_10.png testing/image_3/000166_10.png testing/GANet_disp_0/000166_10.png 168 | testing/image_2/000167_10.png testing/image_3/000167_10.png testing/GANet_disp_0/000167_10.png 169 | testing/image_2/000168_10.png testing/image_3/000168_10.png testing/GANet_disp_0/000168_10.png 170 | testing/image_2/000169_10.png testing/image_3/000169_10.png testing/GANet_disp_0/000169_10.png 171 | testing/image_2/000170_10.png testing/image_3/000170_10.png testing/GANet_disp_0/000170_10.png 172 | testing/image_2/000171_10.png testing/image_3/000171_10.png testing/GANet_disp_0/000171_10.png 173 | testing/image_2/000172_10.png testing/image_3/000172_10.png testing/GANet_disp_0/000172_10.png 174 | testing/image_2/000173_10.png testing/image_3/000173_10.png testing/GANet_disp_0/000173_10.png 175 | testing/image_2/000174_10.png testing/image_3/000174_10.png testing/GANet_disp_0/000174_10.png 176 | testing/image_2/000175_10.png testing/image_3/000175_10.png testing/GANet_disp_0/000175_10.png 177 | testing/image_2/000176_10.png testing/image_3/000176_10.png testing/GANet_disp_0/000176_10.png 178 | testing/image_2/000177_10.png testing/image_3/000177_10.png testing/GANet_disp_0/000177_10.png 179 | testing/image_2/000178_10.png testing/image_3/000178_10.png testing/GANet_disp_0/000178_10.png 180 | testing/image_2/000179_10.png testing/image_3/000179_10.png testing/GANet_disp_0/000179_10.png 181 | testing/image_2/000180_10.png testing/image_3/000180_10.png testing/GANet_disp_0/000180_10.png 182 | testing/image_2/000181_10.png testing/image_3/000181_10.png testing/GANet_disp_0/000181_10.png 183 | testing/image_2/000182_10.png testing/image_3/000182_10.png testing/GANet_disp_0/000182_10.png 184 | testing/image_2/000183_10.png testing/image_3/000183_10.png testing/GANet_disp_0/000183_10.png 185 | testing/image_2/000184_10.png testing/image_3/000184_10.png testing/GANet_disp_0/000184_10.png 186 | testing/image_2/000185_10.png testing/image_3/000185_10.png testing/GANet_disp_0/000185_10.png 187 | testing/image_2/000186_10.png testing/image_3/000186_10.png testing/GANet_disp_0/000186_10.png 188 | testing/image_2/000187_10.png testing/image_3/000187_10.png testing/GANet_disp_0/000187_10.png 189 | testing/image_2/000188_10.png testing/image_3/000188_10.png testing/GANet_disp_0/000188_10.png 190 | testing/image_2/000189_10.png testing/image_3/000189_10.png testing/GANet_disp_0/000189_10.png 191 | testing/image_2/000190_10.png testing/image_3/000190_10.png testing/GANet_disp_0/000190_10.png 192 | testing/image_2/000191_10.png testing/image_3/000191_10.png testing/GANet_disp_0/000191_10.png 193 | testing/image_2/000192_10.png testing/image_3/000192_10.png testing/GANet_disp_0/000192_10.png 194 | testing/image_2/000193_10.png testing/image_3/000193_10.png testing/GANet_disp_0/000193_10.png 195 | testing/image_2/000194_10.png testing/image_3/000194_10.png testing/GANet_disp_0/000194_10.png 196 | testing/image_2/000195_10.png testing/image_3/000195_10.png testing/GANet_disp_0/000195_10.png 197 | testing/image_2/000196_10.png testing/image_3/000196_10.png testing/GANet_disp_0/000196_10.png 198 | testing/image_2/000197_10.png testing/image_3/000197_10.png testing/GANet_disp_0/000197_10.png 199 | testing/image_2/000198_10.png testing/image_3/000198_10.png testing/GANet_disp_0/000198_10.png 200 | testing/image_2/000199_10.png testing/image_3/000199_10.png testing/GANet_disp_0/000199_10.png 201 | -------------------------------------------------------------------------------- /filenames/kitti12_train.txt: -------------------------------------------------------------------------------- 1 | training/colored_0/000000_10.png training/colored_1/000000_10.png training/disp_occ/000000_10.png 2 | training/colored_0/000001_10.png training/colored_1/000001_10.png training/disp_occ/000001_10.png 3 | training/colored_0/000002_10.png training/colored_1/000002_10.png training/disp_occ/000002_10.png 4 | training/colored_0/000003_10.png training/colored_1/000003_10.png training/disp_occ/000003_10.png 5 | training/colored_0/000004_10.png training/colored_1/000004_10.png training/disp_occ/000004_10.png 6 | training/colored_0/000005_10.png training/colored_1/000005_10.png training/disp_occ/000005_10.png 7 | training/colored_0/000006_10.png training/colored_1/000006_10.png training/disp_occ/000006_10.png 8 | training/colored_0/000007_10.png training/colored_1/000007_10.png training/disp_occ/000007_10.png 9 | training/colored_0/000008_10.png training/colored_1/000008_10.png training/disp_occ/000008_10.png 10 | training/colored_0/000009_10.png training/colored_1/000009_10.png training/disp_occ/000009_10.png 11 | training/colored_0/000010_10.png training/colored_1/000010_10.png training/disp_occ/000010_10.png 12 | training/colored_0/000011_10.png training/colored_1/000011_10.png training/disp_occ/000011_10.png 13 | training/colored_0/000012_10.png training/colored_1/000012_10.png training/disp_occ/000012_10.png 14 | training/colored_0/000013_10.png training/colored_1/000013_10.png training/disp_occ/000013_10.png 15 | training/colored_0/000014_10.png training/colored_1/000014_10.png training/disp_occ/000014_10.png 16 | training/colored_0/000015_10.png training/colored_1/000015_10.png training/disp_occ/000015_10.png 17 | training/colored_0/000016_10.png training/colored_1/000016_10.png training/disp_occ/000016_10.png 18 | training/colored_0/000017_10.png training/colored_1/000017_10.png training/disp_occ/000017_10.png 19 | training/colored_0/000018_10.png training/colored_1/000018_10.png training/disp_occ/000018_10.png 20 | training/colored_0/000019_10.png training/colored_1/000019_10.png training/disp_occ/000019_10.png 21 | training/colored_0/000020_10.png training/colored_1/000020_10.png training/disp_occ/000020_10.png 22 | training/colored_0/000021_10.png training/colored_1/000021_10.png training/disp_occ/000021_10.png 23 | training/colored_0/000022_10.png training/colored_1/000022_10.png training/disp_occ/000022_10.png 24 | training/colored_0/000023_10.png training/colored_1/000023_10.png training/disp_occ/000023_10.png 25 | training/colored_0/000024_10.png training/colored_1/000024_10.png training/disp_occ/000024_10.png 26 | training/colored_0/000025_10.png training/colored_1/000025_10.png training/disp_occ/000025_10.png 27 | training/colored_0/000026_10.png training/colored_1/000026_10.png training/disp_occ/000026_10.png 28 | training/colored_0/000027_10.png training/colored_1/000027_10.png training/disp_occ/000027_10.png 29 | training/colored_0/000028_10.png training/colored_1/000028_10.png training/disp_occ/000028_10.png 30 | training/colored_0/000029_10.png training/colored_1/000029_10.png training/disp_occ/000029_10.png 31 | training/colored_0/000030_10.png training/colored_1/000030_10.png training/disp_occ/000030_10.png 32 | training/colored_0/000031_10.png training/colored_1/000031_10.png training/disp_occ/000031_10.png 33 | training/colored_0/000032_10.png training/colored_1/000032_10.png training/disp_occ/000032_10.png 34 | training/colored_0/000033_10.png training/colored_1/000033_10.png training/disp_occ/000033_10.png 35 | training/colored_0/000034_10.png training/colored_1/000034_10.png training/disp_occ/000034_10.png 36 | training/colored_0/000035_10.png training/colored_1/000035_10.png training/disp_occ/000035_10.png 37 | training/colored_0/000036_10.png training/colored_1/000036_10.png training/disp_occ/000036_10.png 38 | training/colored_0/000037_10.png training/colored_1/000037_10.png training/disp_occ/000037_10.png 39 | training/colored_0/000038_10.png training/colored_1/000038_10.png training/disp_occ/000038_10.png 40 | training/colored_0/000039_10.png training/colored_1/000039_10.png training/disp_occ/000039_10.png 41 | training/colored_0/000040_10.png training/colored_1/000040_10.png training/disp_occ/000040_10.png 42 | training/colored_0/000041_10.png training/colored_1/000041_10.png training/disp_occ/000041_10.png 43 | training/colored_0/000042_10.png training/colored_1/000042_10.png training/disp_occ/000042_10.png 44 | training/colored_0/000043_10.png training/colored_1/000043_10.png training/disp_occ/000043_10.png 45 | training/colored_0/000044_10.png training/colored_1/000044_10.png training/disp_occ/000044_10.png 46 | training/colored_0/000045_10.png training/colored_1/000045_10.png training/disp_occ/000045_10.png 47 | training/colored_0/000046_10.png training/colored_1/000046_10.png training/disp_occ/000046_10.png 48 | training/colored_0/000047_10.png training/colored_1/000047_10.png training/disp_occ/000047_10.png 49 | training/colored_0/000048_10.png training/colored_1/000048_10.png training/disp_occ/000048_10.png 50 | training/colored_0/000049_10.png training/colored_1/000049_10.png training/disp_occ/000049_10.png 51 | training/colored_0/000050_10.png training/colored_1/000050_10.png training/disp_occ/000050_10.png 52 | training/colored_0/000051_10.png training/colored_1/000051_10.png training/disp_occ/000051_10.png 53 | training/colored_0/000052_10.png training/colored_1/000052_10.png training/disp_occ/000052_10.png 54 | training/colored_0/000053_10.png training/colored_1/000053_10.png training/disp_occ/000053_10.png 55 | training/colored_0/000054_10.png training/colored_1/000054_10.png training/disp_occ/000054_10.png 56 | training/colored_0/000055_10.png training/colored_1/000055_10.png training/disp_occ/000055_10.png 57 | training/colored_0/000056_10.png training/colored_1/000056_10.png training/disp_occ/000056_10.png 58 | training/colored_0/000057_10.png training/colored_1/000057_10.png training/disp_occ/000057_10.png 59 | training/colored_0/000058_10.png training/colored_1/000058_10.png training/disp_occ/000058_10.png 60 | training/colored_0/000059_10.png training/colored_1/000059_10.png training/disp_occ/000059_10.png 61 | training/colored_0/000060_10.png training/colored_1/000060_10.png training/disp_occ/000060_10.png 62 | training/colored_0/000061_10.png training/colored_1/000061_10.png training/disp_occ/000061_10.png 63 | training/colored_0/000062_10.png training/colored_1/000062_10.png training/disp_occ/000062_10.png 64 | training/colored_0/000063_10.png training/colored_1/000063_10.png training/disp_occ/000063_10.png 65 | training/colored_0/000064_10.png training/colored_1/000064_10.png training/disp_occ/000064_10.png 66 | training/colored_0/000065_10.png training/colored_1/000065_10.png training/disp_occ/000065_10.png 67 | training/colored_0/000066_10.png training/colored_1/000066_10.png training/disp_occ/000066_10.png 68 | training/colored_0/000067_10.png training/colored_1/000067_10.png training/disp_occ/000067_10.png 69 | training/colored_0/000068_10.png training/colored_1/000068_10.png training/disp_occ/000068_10.png 70 | training/colored_0/000069_10.png training/colored_1/000069_10.png training/disp_occ/000069_10.png 71 | training/colored_0/000070_10.png training/colored_1/000070_10.png training/disp_occ/000070_10.png 72 | training/colored_0/000071_10.png training/colored_1/000071_10.png training/disp_occ/000071_10.png 73 | training/colored_0/000072_10.png training/colored_1/000072_10.png training/disp_occ/000072_10.png 74 | training/colored_0/000073_10.png training/colored_1/000073_10.png training/disp_occ/000073_10.png 75 | training/colored_0/000074_10.png training/colored_1/000074_10.png training/disp_occ/000074_10.png 76 | training/colored_0/000075_10.png training/colored_1/000075_10.png training/disp_occ/000075_10.png 77 | training/colored_0/000076_10.png training/colored_1/000076_10.png training/disp_occ/000076_10.png 78 | training/colored_0/000077_10.png training/colored_1/000077_10.png training/disp_occ/000077_10.png 79 | training/colored_0/000078_10.png training/colored_1/000078_10.png training/disp_occ/000078_10.png 80 | training/colored_0/000079_10.png training/colored_1/000079_10.png training/disp_occ/000079_10.png 81 | training/colored_0/000080_10.png training/colored_1/000080_10.png training/disp_occ/000080_10.png 82 | training/colored_0/000081_10.png training/colored_1/000081_10.png training/disp_occ/000081_10.png 83 | training/colored_0/000082_10.png training/colored_1/000082_10.png training/disp_occ/000082_10.png 84 | training/colored_0/000083_10.png training/colored_1/000083_10.png training/disp_occ/000083_10.png 85 | training/colored_0/000084_10.png training/colored_1/000084_10.png training/disp_occ/000084_10.png 86 | training/colored_0/000085_10.png training/colored_1/000085_10.png training/disp_occ/000085_10.png 87 | training/colored_0/000086_10.png training/colored_1/000086_10.png training/disp_occ/000086_10.png 88 | training/colored_0/000087_10.png training/colored_1/000087_10.png training/disp_occ/000087_10.png 89 | training/colored_0/000088_10.png training/colored_1/000088_10.png training/disp_occ/000088_10.png 90 | training/colored_0/000089_10.png training/colored_1/000089_10.png training/disp_occ/000089_10.png 91 | training/colored_0/000090_10.png training/colored_1/000090_10.png training/disp_occ/000090_10.png 92 | training/colored_0/000091_10.png training/colored_1/000091_10.png training/disp_occ/000091_10.png 93 | training/colored_0/000092_10.png training/colored_1/000092_10.png training/disp_occ/000092_10.png 94 | training/colored_0/000093_10.png training/colored_1/000093_10.png training/disp_occ/000093_10.png 95 | training/colored_0/000094_10.png training/colored_1/000094_10.png training/disp_occ/000094_10.png 96 | training/colored_0/000095_10.png training/colored_1/000095_10.png training/disp_occ/000095_10.png 97 | training/colored_0/000096_10.png training/colored_1/000096_10.png training/disp_occ/000096_10.png 98 | training/colored_0/000097_10.png training/colored_1/000097_10.png training/disp_occ/000097_10.png 99 | training/colored_0/000098_10.png training/colored_1/000098_10.png training/disp_occ/000098_10.png 100 | training/colored_0/000099_10.png training/colored_1/000099_10.png training/disp_occ/000099_10.png 101 | training/colored_0/000100_10.png training/colored_1/000100_10.png training/disp_occ/000100_10.png 102 | training/colored_0/000101_10.png training/colored_1/000101_10.png training/disp_occ/000101_10.png 103 | training/colored_0/000102_10.png training/colored_1/000102_10.png training/disp_occ/000102_10.png 104 | training/colored_0/000103_10.png training/colored_1/000103_10.png training/disp_occ/000103_10.png 105 | training/colored_0/000104_10.png training/colored_1/000104_10.png training/disp_occ/000104_10.png 106 | training/colored_0/000105_10.png training/colored_1/000105_10.png training/disp_occ/000105_10.png 107 | training/colored_0/000106_10.png training/colored_1/000106_10.png training/disp_occ/000106_10.png 108 | training/colored_0/000107_10.png training/colored_1/000107_10.png training/disp_occ/000107_10.png 109 | training/colored_0/000108_10.png training/colored_1/000108_10.png training/disp_occ/000108_10.png 110 | training/colored_0/000109_10.png training/colored_1/000109_10.png training/disp_occ/000109_10.png 111 | training/colored_0/000110_10.png training/colored_1/000110_10.png training/disp_occ/000110_10.png 112 | training/colored_0/000111_10.png training/colored_1/000111_10.png training/disp_occ/000111_10.png 113 | training/colored_0/000112_10.png training/colored_1/000112_10.png training/disp_occ/000112_10.png 114 | training/colored_0/000113_10.png training/colored_1/000113_10.png training/disp_occ/000113_10.png 115 | training/colored_0/000114_10.png training/colored_1/000114_10.png training/disp_occ/000114_10.png 116 | training/colored_0/000115_10.png training/colored_1/000115_10.png training/disp_occ/000115_10.png 117 | training/colored_0/000116_10.png training/colored_1/000116_10.png training/disp_occ/000116_10.png 118 | training/colored_0/000117_10.png training/colored_1/000117_10.png training/disp_occ/000117_10.png 119 | training/colored_0/000118_10.png training/colored_1/000118_10.png training/disp_occ/000118_10.png 120 | training/colored_0/000119_10.png training/colored_1/000119_10.png training/disp_occ/000119_10.png 121 | training/colored_0/000120_10.png training/colored_1/000120_10.png training/disp_occ/000120_10.png 122 | training/colored_0/000121_10.png training/colored_1/000121_10.png training/disp_occ/000121_10.png 123 | training/colored_0/000122_10.png training/colored_1/000122_10.png training/disp_occ/000122_10.png 124 | training/colored_0/000123_10.png training/colored_1/000123_10.png training/disp_occ/000123_10.png 125 | training/colored_0/000124_10.png training/colored_1/000124_10.png training/disp_occ/000124_10.png 126 | training/colored_0/000125_10.png training/colored_1/000125_10.png training/disp_occ/000125_10.png 127 | training/colored_0/000126_10.png training/colored_1/000126_10.png training/disp_occ/000126_10.png 128 | training/colored_0/000127_10.png training/colored_1/000127_10.png training/disp_occ/000127_10.png 129 | training/colored_0/000128_10.png training/colored_1/000128_10.png training/disp_occ/000128_10.png 130 | training/colored_0/000129_10.png training/colored_1/000129_10.png training/disp_occ/000129_10.png 131 | training/colored_0/000130_10.png training/colored_1/000130_10.png training/disp_occ/000130_10.png 132 | training/colored_0/000131_10.png training/colored_1/000131_10.png training/disp_occ/000131_10.png 133 | training/colored_0/000132_10.png training/colored_1/000132_10.png training/disp_occ/000132_10.png 134 | training/colored_0/000133_10.png training/colored_1/000133_10.png training/disp_occ/000133_10.png 135 | training/colored_0/000134_10.png training/colored_1/000134_10.png training/disp_occ/000134_10.png 136 | training/colored_0/000135_10.png training/colored_1/000135_10.png training/disp_occ/000135_10.png 137 | training/colored_0/000136_10.png training/colored_1/000136_10.png training/disp_occ/000136_10.png 138 | training/colored_0/000137_10.png training/colored_1/000137_10.png training/disp_occ/000137_10.png 139 | training/colored_0/000138_10.png training/colored_1/000138_10.png training/disp_occ/000138_10.png 140 | training/colored_0/000139_10.png training/colored_1/000139_10.png training/disp_occ/000139_10.png 141 | training/colored_0/000140_10.png training/colored_1/000140_10.png training/disp_occ/000140_10.png 142 | training/colored_0/000141_10.png training/colored_1/000141_10.png training/disp_occ/000141_10.png 143 | training/colored_0/000142_10.png training/colored_1/000142_10.png training/disp_occ/000142_10.png 144 | training/colored_0/000143_10.png training/colored_1/000143_10.png training/disp_occ/000143_10.png 145 | training/colored_0/000144_10.png training/colored_1/000144_10.png training/disp_occ/000144_10.png 146 | training/colored_0/000145_10.png training/colored_1/000145_10.png training/disp_occ/000145_10.png 147 | training/colored_0/000146_10.png training/colored_1/000146_10.png training/disp_occ/000146_10.png 148 | training/colored_0/000147_10.png training/colored_1/000147_10.png training/disp_occ/000147_10.png 149 | training/colored_0/000148_10.png training/colored_1/000148_10.png training/disp_occ/000148_10.png 150 | training/colored_0/000149_10.png training/colored_1/000149_10.png training/disp_occ/000149_10.png 151 | training/colored_0/000150_10.png training/colored_1/000150_10.png training/disp_occ/000150_10.png 152 | training/colored_0/000151_10.png training/colored_1/000151_10.png training/disp_occ/000151_10.png 153 | training/colored_0/000152_10.png training/colored_1/000152_10.png training/disp_occ/000152_10.png 154 | training/colored_0/000153_10.png training/colored_1/000153_10.png training/disp_occ/000153_10.png 155 | training/colored_0/000154_10.png training/colored_1/000154_10.png training/disp_occ/000154_10.png 156 | training/colored_0/000155_10.png training/colored_1/000155_10.png training/disp_occ/000155_10.png 157 | training/colored_0/000156_10.png training/colored_1/000156_10.png training/disp_occ/000156_10.png 158 | training/colored_0/000157_10.png training/colored_1/000157_10.png training/disp_occ/000157_10.png 159 | training/colored_0/000158_10.png training/colored_1/000158_10.png training/disp_occ/000158_10.png 160 | training/colored_0/000159_10.png training/colored_1/000159_10.png training/disp_occ/000159_10.png 161 | training/colored_0/000160_10.png training/colored_1/000160_10.png training/disp_occ/000160_10.png 162 | training/colored_0/000161_10.png training/colored_1/000161_10.png training/disp_occ/000161_10.png 163 | training/colored_0/000162_10.png training/colored_1/000162_10.png training/disp_occ/000162_10.png 164 | training/colored_0/000163_10.png training/colored_1/000163_10.png training/disp_occ/000163_10.png 165 | training/colored_0/000164_10.png training/colored_1/000164_10.png training/disp_occ/000164_10.png 166 | training/colored_0/000165_10.png training/colored_1/000165_10.png training/disp_occ/000165_10.png 167 | training/colored_0/000166_10.png training/colored_1/000166_10.png training/disp_occ/000166_10.png 168 | training/colored_0/000167_10.png training/colored_1/000167_10.png training/disp_occ/000167_10.png 169 | training/colored_0/000168_10.png training/colored_1/000168_10.png training/disp_occ/000168_10.png 170 | training/colored_0/000169_10.png training/colored_1/000169_10.png training/disp_occ/000169_10.png 171 | training/colored_0/000170_10.png training/colored_1/000170_10.png training/disp_occ/000170_10.png 172 | training/colored_0/000171_10.png training/colored_1/000171_10.png training/disp_occ/000171_10.png 173 | training/colored_0/000172_10.png training/colored_1/000172_10.png training/disp_occ/000172_10.png 174 | training/colored_0/000173_10.png training/colored_1/000173_10.png training/disp_occ/000173_10.png 175 | training/colored_0/000174_10.png training/colored_1/000174_10.png training/disp_occ/000174_10.png 176 | training/colored_0/000175_10.png training/colored_1/000175_10.png training/disp_occ/000175_10.png 177 | training/colored_0/000176_10.png training/colored_1/000176_10.png training/disp_occ/000176_10.png 178 | training/colored_0/000177_10.png training/colored_1/000177_10.png training/disp_occ/000177_10.png 179 | training/colored_0/000178_10.png training/colored_1/000178_10.png training/disp_occ/000178_10.png 180 | training/colored_0/000179_10.png training/colored_1/000179_10.png training/disp_occ/000179_10.png 181 | training/colored_0/000180_10.png training/colored_1/000180_10.png training/disp_occ/000180_10.png 182 | training/colored_0/000181_10.png training/colored_1/000181_10.png training/disp_occ/000181_10.png 183 | training/colored_0/000182_10.png training/colored_1/000182_10.png training/disp_occ/000182_10.png 184 | training/colored_0/000183_10.png training/colored_1/000183_10.png training/disp_occ/000183_10.png 185 | training/colored_0/000184_10.png training/colored_1/000184_10.png training/disp_occ/000184_10.png 186 | training/colored_0/000185_10.png training/colored_1/000185_10.png training/disp_occ/000185_10.png 187 | training/colored_0/000186_10.png training/colored_1/000186_10.png training/disp_occ/000186_10.png 188 | training/colored_0/000187_10.png training/colored_1/000187_10.png training/disp_occ/000187_10.png 189 | training/colored_0/000188_10.png training/colored_1/000188_10.png training/disp_occ/000188_10.png 190 | training/colored_0/000189_10.png training/colored_1/000189_10.png training/disp_occ/000189_10.png 191 | training/colored_0/000190_10.png training/colored_1/000190_10.png training/disp_occ/000190_10.png 192 | training/colored_0/000191_10.png training/colored_1/000191_10.png training/disp_occ/000191_10.png 193 | training/colored_0/000192_10.png training/colored_1/000192_10.png training/disp_occ/000192_10.png 194 | training/colored_0/000193_10.png training/colored_1/000193_10.png training/disp_occ/000193_10.png -------------------------------------------------------------------------------- /filenames/kitti15_train.txt: -------------------------------------------------------------------------------- 1 | training/image_2/000000_10.png training/image_3/000000_10.png training/disp_occ_0/000000_10.png 2 | training/image_2/000002_10.png training/image_3/000002_10.png training/disp_occ_0/000002_10.png 3 | training/image_2/000003_10.png training/image_3/000003_10.png training/disp_occ_0/000003_10.png 4 | training/image_2/000004_10.png training/image_3/000004_10.png training/disp_occ_0/000004_10.png 5 | training/image_2/000005_10.png training/image_3/000005_10.png training/disp_occ_0/000005_10.png 6 | training/image_2/000007_10.png training/image_3/000007_10.png training/disp_occ_0/000007_10.png 7 | training/image_2/000008_10.png training/image_3/000008_10.png training/disp_occ_0/000008_10.png 8 | training/image_2/000009_10.png training/image_3/000009_10.png training/disp_occ_0/000009_10.png 9 | training/image_2/000010_10.png training/image_3/000010_10.png training/disp_occ_0/000010_10.png 10 | training/image_2/000011_10.png training/image_3/000011_10.png training/disp_occ_0/000011_10.png 11 | training/image_2/000012_10.png training/image_3/000012_10.png training/disp_occ_0/000012_10.png 12 | training/image_2/000013_10.png training/image_3/000013_10.png training/disp_occ_0/000013_10.png 13 | training/image_2/000014_10.png training/image_3/000014_10.png training/disp_occ_0/000014_10.png 14 | training/image_2/000015_10.png training/image_3/000015_10.png training/disp_occ_0/000015_10.png 15 | training/image_2/000016_10.png training/image_3/000016_10.png training/disp_occ_0/000016_10.png 16 | training/image_2/000017_10.png training/image_3/000017_10.png training/disp_occ_0/000017_10.png 17 | training/image_2/000018_10.png training/image_3/000018_10.png training/disp_occ_0/000018_10.png 18 | training/image_2/000019_10.png training/image_3/000019_10.png training/disp_occ_0/000019_10.png 19 | training/image_2/000020_10.png training/image_3/000020_10.png training/disp_occ_0/000020_10.png 20 | training/image_2/000021_10.png training/image_3/000021_10.png training/disp_occ_0/000021_10.png 21 | training/image_2/000022_10.png training/image_3/000022_10.png training/disp_occ_0/000022_10.png 22 | training/image_2/000023_10.png training/image_3/000023_10.png training/disp_occ_0/000023_10.png 23 | training/image_2/000024_10.png training/image_3/000024_10.png training/disp_occ_0/000024_10.png 24 | training/image_2/000025_10.png training/image_3/000025_10.png training/disp_occ_0/000025_10.png 25 | training/image_2/000027_10.png training/image_3/000027_10.png training/disp_occ_0/000027_10.png 26 | training/image_2/000028_10.png training/image_3/000028_10.png training/disp_occ_0/000028_10.png 27 | training/image_2/000029_10.png training/image_3/000029_10.png training/disp_occ_0/000029_10.png 28 | training/image_2/000030_10.png training/image_3/000030_10.png training/disp_occ_0/000030_10.png 29 | training/image_2/000031_10.png training/image_3/000031_10.png training/disp_occ_0/000031_10.png 30 | training/image_2/000032_10.png training/image_3/000032_10.png training/disp_occ_0/000032_10.png 31 | training/image_2/000033_10.png training/image_3/000033_10.png training/disp_occ_0/000033_10.png 32 | training/image_2/000034_10.png training/image_3/000034_10.png training/disp_occ_0/000034_10.png 33 | training/image_2/000035_10.png training/image_3/000035_10.png training/disp_occ_0/000035_10.png 34 | training/image_2/000036_10.png training/image_3/000036_10.png training/disp_occ_0/000036_10.png 35 | training/image_2/000037_10.png training/image_3/000037_10.png training/disp_occ_0/000037_10.png 36 | training/image_2/000039_10.png training/image_3/000039_10.png training/disp_occ_0/000039_10.png 37 | training/image_2/000040_10.png training/image_3/000040_10.png training/disp_occ_0/000040_10.png 38 | training/image_2/000041_10.png training/image_3/000041_10.png training/disp_occ_0/000041_10.png 39 | training/image_2/000042_10.png training/image_3/000042_10.png training/disp_occ_0/000042_10.png 40 | training/image_2/000044_10.png training/image_3/000044_10.png training/disp_occ_0/000044_10.png 41 | training/image_2/000045_10.png training/image_3/000045_10.png training/disp_occ_0/000045_10.png 42 | training/image_2/000046_10.png training/image_3/000046_10.png training/disp_occ_0/000046_10.png 43 | training/image_2/000047_10.png training/image_3/000047_10.png training/disp_occ_0/000047_10.png 44 | training/image_2/000048_10.png training/image_3/000048_10.png training/disp_occ_0/000048_10.png 45 | training/image_2/000050_10.png training/image_3/000050_10.png training/disp_occ_0/000050_10.png 46 | training/image_2/000051_10.png training/image_3/000051_10.png training/disp_occ_0/000051_10.png 47 | training/image_2/000052_10.png training/image_3/000052_10.png training/disp_occ_0/000052_10.png 48 | training/image_2/000053_10.png training/image_3/000053_10.png training/disp_occ_0/000053_10.png 49 | training/image_2/000054_10.png training/image_3/000054_10.png training/disp_occ_0/000054_10.png 50 | training/image_2/000055_10.png training/image_3/000055_10.png training/disp_occ_0/000055_10.png 51 | training/image_2/000056_10.png training/image_3/000056_10.png training/disp_occ_0/000056_10.png 52 | training/image_2/000057_10.png training/image_3/000057_10.png training/disp_occ_0/000057_10.png 53 | training/image_2/000058_10.png training/image_3/000058_10.png training/disp_occ_0/000058_10.png 54 | training/image_2/000059_10.png training/image_3/000059_10.png training/disp_occ_0/000059_10.png 55 | training/image_2/000060_10.png training/image_3/000060_10.png training/disp_occ_0/000060_10.png 56 | training/image_2/000061_10.png training/image_3/000061_10.png training/disp_occ_0/000061_10.png 57 | training/image_2/000062_10.png training/image_3/000062_10.png training/disp_occ_0/000062_10.png 58 | training/image_2/000063_10.png training/image_3/000063_10.png training/disp_occ_0/000063_10.png 59 | training/image_2/000064_10.png training/image_3/000064_10.png training/disp_occ_0/000064_10.png 60 | training/image_2/000065_10.png training/image_3/000065_10.png training/disp_occ_0/000065_10.png 61 | training/image_2/000066_10.png training/image_3/000066_10.png training/disp_occ_0/000066_10.png 62 | training/image_2/000068_10.png training/image_3/000068_10.png training/disp_occ_0/000068_10.png 63 | training/image_2/000069_10.png training/image_3/000069_10.png training/disp_occ_0/000069_10.png 64 | training/image_2/000070_10.png training/image_3/000070_10.png training/disp_occ_0/000070_10.png 65 | training/image_2/000071_10.png training/image_3/000071_10.png training/disp_occ_0/000071_10.png 66 | training/image_2/000072_10.png training/image_3/000072_10.png training/disp_occ_0/000072_10.png 67 | training/image_2/000073_10.png training/image_3/000073_10.png training/disp_occ_0/000073_10.png 68 | training/image_2/000074_10.png training/image_3/000074_10.png training/disp_occ_0/000074_10.png 69 | training/image_2/000075_10.png training/image_3/000075_10.png training/disp_occ_0/000075_10.png 70 | training/image_2/000076_10.png training/image_3/000076_10.png training/disp_occ_0/000076_10.png 71 | training/image_2/000077_10.png training/image_3/000077_10.png training/disp_occ_0/000077_10.png 72 | training/image_2/000078_10.png training/image_3/000078_10.png training/disp_occ_0/000078_10.png 73 | training/image_2/000079_10.png training/image_3/000079_10.png training/disp_occ_0/000079_10.png 74 | training/image_2/000080_10.png training/image_3/000080_10.png training/disp_occ_0/000080_10.png 75 | training/image_2/000082_10.png training/image_3/000082_10.png training/disp_occ_0/000082_10.png 76 | training/image_2/000083_10.png training/image_3/000083_10.png training/disp_occ_0/000083_10.png 77 | training/image_2/000084_10.png training/image_3/000084_10.png training/disp_occ_0/000084_10.png 78 | training/image_2/000085_10.png training/image_3/000085_10.png training/disp_occ_0/000085_10.png 79 | training/image_2/000086_10.png training/image_3/000086_10.png training/disp_occ_0/000086_10.png 80 | training/image_2/000087_10.png training/image_3/000087_10.png training/disp_occ_0/000087_10.png 81 | training/image_2/000088_10.png training/image_3/000088_10.png training/disp_occ_0/000088_10.png 82 | training/image_2/000090_10.png training/image_3/000090_10.png training/disp_occ_0/000090_10.png 83 | training/image_2/000091_10.png training/image_3/000091_10.png training/disp_occ_0/000091_10.png 84 | training/image_2/000092_10.png training/image_3/000092_10.png training/disp_occ_0/000092_10.png 85 | training/image_2/000093_10.png training/image_3/000093_10.png training/disp_occ_0/000093_10.png 86 | training/image_2/000094_10.png training/image_3/000094_10.png training/disp_occ_0/000094_10.png 87 | training/image_2/000095_10.png training/image_3/000095_10.png training/disp_occ_0/000095_10.png 88 | training/image_2/000096_10.png training/image_3/000096_10.png training/disp_occ_0/000096_10.png 89 | training/image_2/000097_10.png training/image_3/000097_10.png training/disp_occ_0/000097_10.png 90 | training/image_2/000098_10.png training/image_3/000098_10.png training/disp_occ_0/000098_10.png 91 | training/image_2/000099_10.png training/image_3/000099_10.png training/disp_occ_0/000099_10.png 92 | training/image_2/000100_10.png training/image_3/000100_10.png training/disp_occ_0/000100_10.png 93 | training/image_2/000101_10.png training/image_3/000101_10.png training/disp_occ_0/000101_10.png 94 | training/image_2/000102_10.png training/image_3/000102_10.png training/disp_occ_0/000102_10.png 95 | training/image_2/000103_10.png training/image_3/000103_10.png training/disp_occ_0/000103_10.png 96 | training/image_2/000104_10.png training/image_3/000104_10.png training/disp_occ_0/000104_10.png 97 | training/image_2/000105_10.png training/image_3/000105_10.png training/disp_occ_0/000105_10.png 98 | training/image_2/000106_10.png training/image_3/000106_10.png training/disp_occ_0/000106_10.png 99 | training/image_2/000107_10.png training/image_3/000107_10.png training/disp_occ_0/000107_10.png 100 | training/image_2/000108_10.png training/image_3/000108_10.png training/disp_occ_0/000108_10.png 101 | training/image_2/000110_10.png training/image_3/000110_10.png training/disp_occ_0/000110_10.png 102 | training/image_2/000111_10.png training/image_3/000111_10.png training/disp_occ_0/000111_10.png 103 | training/image_2/000112_10.png training/image_3/000112_10.png training/disp_occ_0/000112_10.png 104 | training/image_2/000113_10.png training/image_3/000113_10.png training/disp_occ_0/000113_10.png 105 | training/image_2/000114_10.png training/image_3/000114_10.png training/disp_occ_0/000114_10.png 106 | training/image_2/000115_10.png training/image_3/000115_10.png training/disp_occ_0/000115_10.png 107 | training/image_2/000116_10.png training/image_3/000116_10.png training/disp_occ_0/000116_10.png 108 | training/image_2/000117_10.png training/image_3/000117_10.png training/disp_occ_0/000117_10.png 109 | training/image_2/000118_10.png training/image_3/000118_10.png training/disp_occ_0/000118_10.png 110 | training/image_2/000119_10.png training/image_3/000119_10.png training/disp_occ_0/000119_10.png 111 | training/image_2/000120_10.png training/image_3/000120_10.png training/disp_occ_0/000120_10.png 112 | training/image_2/000121_10.png training/image_3/000121_10.png training/disp_occ_0/000121_10.png 113 | training/image_2/000123_10.png training/image_3/000123_10.png training/disp_occ_0/000123_10.png 114 | training/image_2/000124_10.png training/image_3/000124_10.png training/disp_occ_0/000124_10.png 115 | training/image_2/000125_10.png training/image_3/000125_10.png training/disp_occ_0/000125_10.png 116 | training/image_2/000126_10.png training/image_3/000126_10.png training/disp_occ_0/000126_10.png 117 | training/image_2/000127_10.png training/image_3/000127_10.png training/disp_occ_0/000127_10.png 118 | training/image_2/000128_10.png training/image_3/000128_10.png training/disp_occ_0/000128_10.png 119 | training/image_2/000130_10.png training/image_3/000130_10.png training/disp_occ_0/000130_10.png 120 | training/image_2/000131_10.png training/image_3/000131_10.png training/disp_occ_0/000131_10.png 121 | training/image_2/000133_10.png training/image_3/000133_10.png training/disp_occ_0/000133_10.png 122 | training/image_2/000134_10.png training/image_3/000134_10.png training/disp_occ_0/000134_10.png 123 | training/image_2/000135_10.png training/image_3/000135_10.png training/disp_occ_0/000135_10.png 124 | training/image_2/000136_10.png training/image_3/000136_10.png training/disp_occ_0/000136_10.png 125 | training/image_2/000137_10.png training/image_3/000137_10.png training/disp_occ_0/000137_10.png 126 | training/image_2/000138_10.png training/image_3/000138_10.png training/disp_occ_0/000138_10.png 127 | training/image_2/000139_10.png training/image_3/000139_10.png training/disp_occ_0/000139_10.png 128 | training/image_2/000140_10.png training/image_3/000140_10.png training/disp_occ_0/000140_10.png 129 | training/image_2/000142_10.png training/image_3/000142_10.png training/disp_occ_0/000142_10.png 130 | training/image_2/000143_10.png training/image_3/000143_10.png training/disp_occ_0/000143_10.png 131 | training/image_2/000144_10.png training/image_3/000144_10.png training/disp_occ_0/000144_10.png 132 | training/image_2/000145_10.png training/image_3/000145_10.png training/disp_occ_0/000145_10.png 133 | training/image_2/000146_10.png training/image_3/000146_10.png training/disp_occ_0/000146_10.png 134 | training/image_2/000147_10.png training/image_3/000147_10.png training/disp_occ_0/000147_10.png 135 | training/image_2/000148_10.png training/image_3/000148_10.png training/disp_occ_0/000148_10.png 136 | training/image_2/000149_10.png training/image_3/000149_10.png training/disp_occ_0/000149_10.png 137 | training/image_2/000150_10.png training/image_3/000150_10.png training/disp_occ_0/000150_10.png 138 | training/image_2/000151_10.png training/image_3/000151_10.png training/disp_occ_0/000151_10.png 139 | training/image_2/000153_10.png training/image_3/000153_10.png training/disp_occ_0/000153_10.png 140 | training/image_2/000154_10.png training/image_3/000154_10.png training/disp_occ_0/000154_10.png 141 | training/image_2/000155_10.png training/image_3/000155_10.png training/disp_occ_0/000155_10.png 142 | training/image_2/000156_10.png training/image_3/000156_10.png training/disp_occ_0/000156_10.png 143 | training/image_2/000157_10.png training/image_3/000157_10.png training/disp_occ_0/000157_10.png 144 | training/image_2/000158_10.png training/image_3/000158_10.png training/disp_occ_0/000158_10.png 145 | training/image_2/000160_10.png training/image_3/000160_10.png training/disp_occ_0/000160_10.png 146 | training/image_2/000161_10.png training/image_3/000161_10.png training/disp_occ_0/000161_10.png 147 | training/image_2/000162_10.png training/image_3/000162_10.png training/disp_occ_0/000162_10.png 148 | training/image_2/000163_10.png training/image_3/000163_10.png training/disp_occ_0/000163_10.png 149 | training/image_2/000164_10.png training/image_3/000164_10.png training/disp_occ_0/000164_10.png 150 | training/image_2/000165_10.png training/image_3/000165_10.png training/disp_occ_0/000165_10.png 151 | training/image_2/000166_10.png training/image_3/000166_10.png training/disp_occ_0/000166_10.png 152 | training/image_2/000167_10.png training/image_3/000167_10.png training/disp_occ_0/000167_10.png 153 | training/image_2/000168_10.png training/image_3/000168_10.png training/disp_occ_0/000168_10.png 154 | training/image_2/000169_10.png training/image_3/000169_10.png training/disp_occ_0/000169_10.png 155 | training/image_2/000170_10.png training/image_3/000170_10.png training/disp_occ_0/000170_10.png 156 | training/image_2/000172_10.png training/image_3/000172_10.png training/disp_occ_0/000172_10.png 157 | training/image_2/000173_10.png training/image_3/000173_10.png training/disp_occ_0/000173_10.png 158 | training/image_2/000174_10.png training/image_3/000174_10.png training/disp_occ_0/000174_10.png 159 | training/image_2/000175_10.png training/image_3/000175_10.png training/disp_occ_0/000175_10.png 160 | training/image_2/000176_10.png training/image_3/000176_10.png training/disp_occ_0/000176_10.png 161 | training/image_2/000177_10.png training/image_3/000177_10.png training/disp_occ_0/000177_10.png 162 | training/image_2/000178_10.png training/image_3/000178_10.png training/disp_occ_0/000178_10.png 163 | training/image_2/000180_10.png training/image_3/000180_10.png training/disp_occ_0/000180_10.png 164 | training/image_2/000181_10.png training/image_3/000181_10.png training/disp_occ_0/000181_10.png 165 | training/image_2/000182_10.png training/image_3/000182_10.png training/disp_occ_0/000182_10.png 166 | training/image_2/000183_10.png training/image_3/000183_10.png training/disp_occ_0/000183_10.png 167 | training/image_2/000185_10.png training/image_3/000185_10.png training/disp_occ_0/000185_10.png 168 | training/image_2/000186_10.png training/image_3/000186_10.png training/disp_occ_0/000186_10.png 169 | training/image_2/000188_10.png training/image_3/000188_10.png training/disp_occ_0/000188_10.png 170 | training/image_2/000189_10.png training/image_3/000189_10.png training/disp_occ_0/000189_10.png 171 | training/image_2/000190_10.png training/image_3/000190_10.png training/disp_occ_0/000190_10.png 172 | training/image_2/000191_10.png training/image_3/000191_10.png training/disp_occ_0/000191_10.png 173 | training/image_2/000192_10.png training/image_3/000192_10.png training/disp_occ_0/000192_10.png 174 | training/image_2/000193_10.png training/image_3/000193_10.png training/disp_occ_0/000193_10.png 175 | training/image_2/000194_10.png training/image_3/000194_10.png training/disp_occ_0/000194_10.png 176 | training/image_2/000195_10.png training/image_3/000195_10.png training/disp_occ_0/000195_10.png 177 | training/image_2/000196_10.png training/image_3/000196_10.png training/disp_occ_0/000196_10.png 178 | training/image_2/000197_10.png training/image_3/000197_10.png training/disp_occ_0/000197_10.png 179 | training/image_2/000198_10.png training/image_3/000198_10.png training/disp_occ_0/000198_10.png 180 | training/image_2/000199_10.png training/image_3/000199_10.png training/disp_occ_0/000199_10.png 181 | training/image_2/000001_10.png training/image_3/000001_10.png training/disp_occ_0/000001_10.png 182 | training/image_2/000006_10.png training/image_3/000006_10.png training/disp_occ_0/000006_10.png 183 | training/image_2/000026_10.png training/image_3/000026_10.png training/disp_occ_0/000026_10.png 184 | training/image_2/000038_10.png training/image_3/000038_10.png training/disp_occ_0/000038_10.png 185 | training/image_2/000043_10.png training/image_3/000043_10.png training/disp_occ_0/000043_10.png 186 | training/image_2/000049_10.png training/image_3/000049_10.png training/disp_occ_0/000049_10.png 187 | training/image_2/000067_10.png training/image_3/000067_10.png training/disp_occ_0/000067_10.png 188 | training/image_2/000081_10.png training/image_3/000081_10.png training/disp_occ_0/000081_10.png 189 | training/image_2/000089_10.png training/image_3/000089_10.png training/disp_occ_0/000089_10.png 190 | training/image_2/000109_10.png training/image_3/000109_10.png training/disp_occ_0/000109_10.png 191 | training/image_2/000122_10.png training/image_3/000122_10.png training/disp_occ_0/000122_10.png 192 | training/image_2/000129_10.png training/image_3/000129_10.png training/disp_occ_0/000129_10.png 193 | training/image_2/000132_10.png training/image_3/000132_10.png training/disp_occ_0/000132_10.png 194 | training/image_2/000141_10.png training/image_3/000141_10.png training/disp_occ_0/000141_10.png 195 | training/image_2/000152_10.png training/image_3/000152_10.png training/disp_occ_0/000152_10.png 196 | training/image_2/000159_10.png training/image_3/000159_10.png training/disp_occ_0/000159_10.png 197 | training/image_2/000171_10.png training/image_3/000171_10.png training/disp_occ_0/000171_10.png 198 | training/image_2/000179_10.png training/image_3/000179_10.png training/disp_occ_0/000179_10.png 199 | training/image_2/000184_10.png training/image_3/000184_10.png training/disp_occ_0/000184_10.png 200 | training/image_2/000187_10.png training/image_3/000187_10.png training/disp_occ_0/000187_10.png 201 | -------------------------------------------------------------------------------- /models/Sceneflow-BGNet-Plus.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/Sceneflow-BGNet-Plus.pth -------------------------------------------------------------------------------- /models/Sceneflow-BGNet.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/Sceneflow-BGNet.pth -------------------------------------------------------------------------------- /models/Sceneflow-IRS-BGNet-Plus.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/Sceneflow-IRS-BGNet-Plus.pth -------------------------------------------------------------------------------- /models/Sceneflow-IRS-BGNet.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/Sceneflow-IRS-BGNet.pth -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__init__.py -------------------------------------------------------------------------------- /models/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/bgnet.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/bgnet.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/bgnet_plus.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/bgnet_plus.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/deeppruner.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/deeppruner.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/feature_extractor_fast.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/feature_extractor_fast.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/submodules.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/submodules.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/submodules2d.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/submodules2d.cpython-36.pyc -------------------------------------------------------------------------------- /models/__pycache__/submodules3d.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/__pycache__/submodules3d.cpython-36.pyc -------------------------------------------------------------------------------- /models/bgnet.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function 7 | from models.feature_extractor_fast import feature_extraction 8 | from models.submodules3d import CoeffsPredictor 9 | from models.submodules2d import HourglassRefinement 10 | from models.submodules import SubModule,convbn_2d_lrelu,convbn_3d_lrelu,convbn_2d_Tanh 11 | from nets.warp import disp_warp 12 | import torch 13 | import torch.nn as nn 14 | import torch.nn.functional as F 15 | import time 16 | class Slice(SubModule): 17 | def __init__(self): 18 | super(Slice, self).__init__() 19 | def forward(self, bilateral_grid, wg, hg, guidemap): 20 | guidemap = guidemap.permute(0,2,3,1).contiguous() #[B,C,H,W]-> [B,H,W,C] 21 | guidemap_guide = torch.cat([wg, hg, guidemap], dim=3).unsqueeze(1) # Nx1xHxWx3 22 | coeff = F.grid_sample(bilateral_grid, guidemap_guide,align_corners =False) 23 | return coeff.squeeze(2) #[B,1,H,W] 24 | 25 | 26 | class GuideNN(SubModule): 27 | def __init__(self, params=None): 28 | super(GuideNN, self).__init__() 29 | self.params = params 30 | self.conv1 = convbn_2d_lrelu(32, 16, 1, 1, 0) 31 | self.conv2 = convbn_2d_Tanh(16, 1, 1, 1, 0) 32 | 33 | def forward(self, x): 34 | return self.conv2(self.conv1(x)) 35 | 36 | def groupwise_correlation(fea1, fea2, num_groups): 37 | B, C, H, W = fea1.shape 38 | assert C % num_groups == 0 39 | channels_per_group = C // num_groups 40 | cost = (fea1 * fea2).view([B, num_groups, channels_per_group, H, W]).mean(dim=2) 41 | assert cost.shape == (B, num_groups, H, W) 42 | return cost 43 | 44 | 45 | def build_gwc_volume(refimg_fea, targetimg_fea, maxdisp, num_groups): 46 | B, C, H, W = refimg_fea.shape 47 | #[B,G,D,H,W] 48 | volume = refimg_fea.new_zeros([B, num_groups, maxdisp, H, W]) 49 | for i in range(maxdisp): 50 | if i > 0: 51 | volume[:, :, i, :, i:] = groupwise_correlation(refimg_fea[:, :, :, i:], targetimg_fea[:, :, :, :-i], num_groups) 52 | else: 53 | volume[:, :, i, :, :] = groupwise_correlation(refimg_fea, targetimg_fea, num_groups) 54 | volume = volume.contiguous() 55 | return volume 56 | def correlation(fea1, fea2): 57 | B, C, H, W = fea1.shape 58 | cost = (fea1 * fea2).mean(dim=1) 59 | assert cost.shape == (B, H, W) 60 | return cost 61 | 62 | def disparity_regression(x, maxdisp): 63 | assert len(x.shape) == 4 64 | disp_values = torch.arange(0, maxdisp + 1, dtype=x.dtype, device=x.device) 65 | disp_values = disp_values.view(1, maxdisp + 1, 1, 1) 66 | return torch.sum(x * disp_values, 1, keepdim=True) 67 | class BGNet(SubModule): 68 | def __init__(self): 69 | super(BGNet, self).__init__() 70 | self.softmax = nn.Softmax(dim = 1) 71 | # self.refinement_net = HourglassRefinement() 72 | 73 | self.feature_extraction = feature_extraction() 74 | self.coeffs_disparity_predictor = CoeffsPredictor() 75 | 76 | 77 | self.dres0 = nn.Sequential(convbn_3d_lrelu(44, 32, 3, 1, 1), 78 | convbn_3d_lrelu(32, 16, 3, 1, 1)) 79 | self.guide = GuideNN() 80 | self.slice = Slice() 81 | self.weight_init() 82 | 83 | 84 | 85 | 86 | 87 | 88 | def forward(self, left_input, right_input): 89 | left_low_level_features_1, left_gwc_feature = self.feature_extraction(left_input) 90 | _, right_gwc_feature = self.feature_extraction(right_input) 91 | 92 | guide = self.guide(left_low_level_features_1) #[B,1,H,W] 93 | # torch.cuda.synchronize() 94 | # start = time.time() 95 | cost_volume = build_gwc_volume(left_gwc_feature,right_gwc_feature,25,44) 96 | cost_volume = self.dres0(cost_volume) 97 | #coeffs:[B,D,G,H,W] 98 | coeffs = self.coeffs_disparity_predictor(cost_volume) 99 | 100 | list_coeffs = torch.split(coeffs,1,dim = 1) 101 | index = torch.arange(0,97) 102 | index_float = index/4.0 103 | index_a = torch.floor(index_float) 104 | index_b = index_a + 1 105 | 106 | index_a = torch.clamp(index_a, min=0, max= 24) 107 | index_b = torch.clamp(index_b, min=0, max= 24) 108 | 109 | wa = index_b - index_float 110 | wb = index_float - index_a 111 | 112 | list_float = [] 113 | device = list_coeffs[0].get_device() 114 | wa = wa.view(1,-1,1,1) 115 | wb = wb.view(1,-1,1,1) 116 | wa = wa.to(device) 117 | wb = wb.to(device) 118 | wa = wa.float() 119 | wb = wb.float() 120 | 121 | N, _, H, W = guide.shape 122 | #[H,W] 123 | hg, wg = torch.meshgrid([torch.arange(0, H), torch.arange(0, W)]) # [0,511] HxW 124 | if device >= 0: 125 | hg = hg.to(device) 126 | wg = wg.to(device) 127 | #[B,H,W,1] 128 | hg = hg.float().repeat(N, 1, 1).unsqueeze(3) / (H-1) * 2 - 1 # norm to [-1,1] NxHxWx1 129 | wg = wg.float().repeat(N, 1, 1).unsqueeze(3) / (W-1) * 2 - 1 # norm to [-1,1] NxHxWx1 130 | slice_dict = [] 131 | # torch.cuda.synchronize() 132 | # start = time.time() 133 | for i in range(25): 134 | slice_dict.append(self.slice(list_coeffs[i], wg, hg, guide)) #[B,1,H,W] 135 | slice_dict_a = [] 136 | slice_dict_b = [] 137 | for i in range(97): 138 | inx_a = i//4 139 | inx_b = inx_a + 1 140 | inx_b = min(inx_b,24) 141 | slice_dict_a.append(slice_dict[inx_a]) 142 | slice_dict_b.append(slice_dict[inx_b]) 143 | 144 | final_cost_volume = wa * torch.cat(slice_dict_a,dim = 1) + wb * torch.cat(slice_dict_b,dim = 1) 145 | slice = self.softmax(final_cost_volume) 146 | disparity_samples = torch.arange(0, 97, dtype=slice.dtype, device=slice.device).view(1, 97, 1, 1) 147 | 148 | disparity_samples = disparity_samples.repeat(slice.size()[0],1,slice.size()[2],slice.size()[3]) 149 | half_disp = torch.sum(disparity_samples * slice,dim = 1).unsqueeze(1) 150 | out2 = F.interpolate(half_disp * 2.0, scale_factor=(2.0, 2.0), 151 | mode='bilinear',align_corners =False).squeeze(1) 152 | 153 | return out2,out2 154 | 155 | 156 | -------------------------------------------------------------------------------- /models/bgnet_plus.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function 7 | from models.feature_extractor_fast import feature_extraction 8 | from models.submodules3d import CoeffsPredictor 9 | from models.submodules2d import HourglassRefinement 10 | from models.submodules import SubModule, convbn_2d_lrelu, convbn_3d_lrelu,convbn_2d_Tanh 11 | from nets.warp import disp_warp 12 | import torch 13 | import torch.nn as nn 14 | import torch.nn.functional as F 15 | import time 16 | class Slice(SubModule): 17 | def __init__(self): 18 | super(Slice, self).__init__() 19 | 20 | def forward(self, bilateral_grid, wg, hg, guidemap): 21 | guidemap = guidemap.permute(0,2,3,1).contiguous() #[B,C,H,W]-> [B,H,W,C] 22 | guidemap_guide = torch.cat([wg, hg, guidemap], dim=3).unsqueeze(1) # Nx1xHxWx3 23 | coeff = F.grid_sample(bilateral_grid, guidemap_guide,align_corners =False) 24 | return coeff.squeeze(2) #[B,1,H,W] 25 | 26 | 27 | class GuideNN(SubModule): 28 | def __init__(self, params=None): 29 | super(GuideNN, self).__init__() 30 | self.params = params 31 | self.conv1 = convbn_2d_lrelu(32, 16, 1, 1, 0) 32 | self.conv2 = convbn_2d_Tanh(16, 1, 1, 1, 0) 33 | 34 | def forward(self, x): 35 | return self.conv2(self.conv1(x)) 36 | 37 | def groupwise_correlation(fea1, fea2, num_groups): 38 | B, C, H, W = fea1.shape 39 | assert C % num_groups == 0 40 | channels_per_group = C // num_groups 41 | 42 | cost = (fea1 * fea2).view([B, num_groups, channels_per_group, H, W]).mean(dim=2) 43 | assert cost.shape == (B, num_groups, H, W) 44 | return cost 45 | 46 | 47 | def build_gwc_volume(refimg_fea, targetimg_fea, maxdisp, num_groups): 48 | B, C, H, W = refimg_fea.shape 49 | #[B,G,D,H,W] 50 | volume = refimg_fea.new_zeros([B, num_groups, maxdisp, H, W]) 51 | for i in range(maxdisp): 52 | if i > 0: 53 | volume[:, :, i, :, i:] = groupwise_correlation(refimg_fea[:, :, :, i:], targetimg_fea[:, :, :, :-i], 54 | num_groups) 55 | else: 56 | volume[:, :, i, :, :] = groupwise_correlation(refimg_fea, targetimg_fea, num_groups) 57 | volume = volume.contiguous() 58 | return volume 59 | def correlation(fea1, fea2): 60 | B, C, H, W = fea1.shape 61 | cost = (fea1 * fea2).mean(dim=1) 62 | assert cost.shape == (B, H, W) 63 | return cost 64 | 65 | def disparity_regression(x, maxdisp): 66 | assert len(x.shape) == 4 67 | disp_values = torch.arange(0, maxdisp + 1, dtype=x.dtype, device=x.device) 68 | disp_values = disp_values.view(1, maxdisp + 1, 1, 1) 69 | return torch.sum(x * disp_values, 1, keepdim=True) 70 | class BGNet_Plus(SubModule): 71 | def __init__(self): 72 | super(BGNet_Plus, self).__init__() 73 | self.softmax = nn.Softmax(dim = 1) 74 | self.refinement_net = HourglassRefinement() 75 | self.feature_extraction = feature_extraction() 76 | self.coeffs_disparity_predictor = CoeffsPredictor() 77 | 78 | self.dres0 = nn.Sequential(convbn_3d_lrelu(44, 32, 3, 1, 1), 79 | convbn_3d_lrelu(32, 16, 3, 1, 1)) 80 | self.guide = GuideNN() 81 | self.slice = Slice() 82 | self.weight_init() 83 | 84 | def forward(self, left_input, right_input): 85 | 86 | 87 | left_low_level_features_1, left_gwc_feature = self.feature_extraction(left_input) 88 | _, right_gwc_feature = self.feature_extraction(right_input) 89 | guide = self.guide(left_low_level_features_1) #[B,1,H,W] 90 | cost_volume = build_gwc_volume(left_gwc_feature,right_gwc_feature,25,44) 91 | 92 | cost_volume = self.dres0(cost_volume) 93 | coeffs = self.coeffs_disparity_predictor(cost_volume) 94 | 95 | list_coeffs = torch.split(coeffs,1,dim = 1) 96 | index = torch.arange(0,97) 97 | index_float = index/4.0 98 | index_a = torch.floor(index_float) 99 | index_b = index_a + 1 100 | 101 | index_a = torch.clamp(index_a, min=0, max= 24) 102 | index_b = torch.clamp(index_b, min=0, max= 24) 103 | 104 | wa = index_b - index_float 105 | wb = index_float - index_a 106 | 107 | list_float = [] 108 | device = list_coeffs[0].get_device() 109 | wa = wa.view(1,-1,1,1) 110 | wb = wb.view(1,-1,1,1) 111 | wa = wa.to(device) 112 | wb = wb.to(device) 113 | wa = wa.float() 114 | wb = wb.float() 115 | 116 | N, _, H, W = guide.shape 117 | #[H,W] 118 | hg, wg = torch.meshgrid([torch.arange(0, H), torch.arange(0, W)]) # [0,511] HxW 119 | if device >= 0: 120 | hg = hg.to(device) 121 | wg = wg.to(device) 122 | #[B,H,W,1] 123 | hg = hg.float().repeat(N, 1, 1).unsqueeze(3) / (H-1) * 2 - 1 # norm to [-1,1] NxHxWx1 124 | wg = wg.float().repeat(N, 1, 1).unsqueeze(3) / (W-1) * 2 - 1 # norm to [-1,1] NxHxWx1 125 | slice_dict = [] 126 | # torch.cuda.synchronize() 127 | # start = time.time() 128 | for i in range(25): 129 | slice_dict.append(self.slice(list_coeffs[i], wg, hg, guide)) #[B,1,H,W] 130 | slice_dict_a = [] 131 | slice_dict_b = [] 132 | for i in range(97): 133 | inx_a = i//4 134 | inx_b = inx_a + 1 135 | inx_b = min(inx_b,24) 136 | slice_dict_a.append(slice_dict[inx_a]) 137 | slice_dict_b.append(slice_dict[inx_b]) 138 | 139 | final_cost_volume = wa * torch.cat(slice_dict_a,dim = 1) + wb * torch.cat(slice_dict_b,dim = 1) 140 | slice = self.softmax(final_cost_volume) 141 | disparity_samples = torch.arange(0, 97, dtype=slice.dtype, device=slice.device).view(1, 97, 1, 1) 142 | 143 | disparity_samples = disparity_samples.repeat(slice.size()[0],1,slice.size()[2],slice.size()[3]) 144 | half_disp = torch.sum(disparity_samples * slice,dim = 1).unsqueeze(1) 145 | left_half = F.interpolate( 146 | left_input, 147 | scale_factor=1 / pow(2, 1), 148 | mode='bilinear', 149 | align_corners=False) 150 | right_half = F.interpolate( 151 | right_input, 152 | scale_factor=1 / pow(2, 1), 153 | mode='bilinear', 154 | align_corners=False) 155 | refinement_disp = self.refinement_net(half_disp,left_half,right_half) 156 | out1 = F.interpolate(refinement_disp * 2.0, scale_factor=(2.0, 2.0), 157 | mode='bilinear',align_corners =False).squeeze(1) 158 | out2 = F.interpolate(half_disp * 2.0, scale_factor=(2.0, 2.0), 159 | mode='bilinear',align_corners =False).squeeze(1) 160 | 161 | return out1,out2 162 | -------------------------------------------------------------------------------- /models/feature_extractor_fast.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function 7 | import torch 8 | import torch.nn as nn 9 | import torch.utils.data 10 | import torch.nn.functional as F 11 | def convbn_relu(in_planes, out_planes, kernel_size, stride, pad, dilation): 12 | 13 | return nn.Sequential(nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, 14 | padding=dilation if dilation > 1 else pad, dilation=dilation, bias=False), 15 | nn.BatchNorm2d(out_planes), 16 | nn.ReLU(inplace=True)) 17 | def convbn(in_planes, out_planes, kernel_size, stride, pad, dilation): 18 | 19 | return nn.Sequential(nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, 20 | padding=dilation if dilation > 1 else pad, dilation=dilation, bias=False), 21 | nn.BatchNorm2d(out_planes)) 22 | 23 | class BasicBlock(nn.Module): 24 | expansion = 1 25 | 26 | def __init__(self, inplanes, planes, stride, downsample, pad, dilation): 27 | super(BasicBlock, self).__init__() 28 | 29 | self.conv1 = convbn_relu(inplanes, planes, 3, stride, pad, dilation) 30 | self.conv2 = convbn(planes, planes, 3, 1, pad, dilation) 31 | 32 | self.downsample = downsample 33 | self.stride = stride 34 | def forward(self, x): 35 | out = self.conv1(x) 36 | out = self.conv2(out) 37 | 38 | if self.downsample is not None: 39 | x = self.downsample(x) 40 | 41 | out += x 42 | 43 | return out 44 | class BasicConv(nn.Module): 45 | 46 | def __init__(self, in_channels, out_channels, deconv=False, is_3d=False, bn=True, relu=True, **kwargs): 47 | super(BasicConv, self).__init__() 48 | self.relu = relu 49 | self.use_bn = bn 50 | if is_3d: 51 | if deconv: 52 | self.conv = nn.ConvTranspose3d(in_channels, out_channels, bias=False, **kwargs) 53 | else: 54 | self.conv = nn.Conv3d(in_channels, out_channels, bias=False, **kwargs) 55 | self.bn = nn.BatchNorm3d(out_channels) 56 | else: 57 | if deconv: 58 | self.conv = nn.ConvTranspose2d(in_channels, out_channels, bias=False, **kwargs) 59 | else: 60 | self.conv = nn.Conv2d(in_channels, out_channels, bias=False, **kwargs) 61 | self.bn = nn.BatchNorm2d(out_channels) 62 | 63 | def forward(self, x): 64 | x = self.conv(x) 65 | # if self.use_bn: 66 | # x = self.bn(x) 67 | if self.relu: 68 | x = F.relu(x, inplace=True) 69 | return x 70 | 71 | 72 | class Conv2x(nn.Module): 73 | 74 | def __init__(self, in_channels, out_channels, deconv=False, is_3d=False, concat=True, bn=True, relu=True): 75 | super(Conv2x, self).__init__() 76 | self.concat = concat 77 | 78 | if deconv and is_3d: 79 | kernel = (3, 4, 4) 80 | 81 | elif deconv: 82 | kernel = 4 83 | else: 84 | kernel = 3 85 | self.conv1 = BasicConv(in_channels, out_channels, deconv, is_3d, bn=False, relu=True, kernel_size=kernel, stride=2, padding=1) 86 | 87 | if self.concat: 88 | self.conv2 = BasicConv(out_channels*2, out_channels, False, is_3d, bn, relu, kernel_size=3, stride=1, padding=1) 89 | else: 90 | self.conv2 = BasicConv(out_channels, out_channels, False, is_3d, bn, relu, kernel_size=3, stride=1, padding=1) 91 | 92 | def forward(self, x, rem): 93 | 94 | x = self.conv1(x) 95 | assert(x.size() == rem.size()) 96 | if self.concat: 97 | x = torch.cat((x, rem), 1) 98 | else: 99 | x = x + rem 100 | x = self.conv2(x) 101 | return x 102 | 103 | 104 | class feature_extraction(nn.Module): 105 | def __init__(self): 106 | super(feature_extraction, self).__init__() 107 | 108 | self.inplanes = 32 109 | # self.firstconv = convbn_relu(1, 32, 7, 2, 3, 1) 110 | self.firstconv = nn.Sequential(convbn_relu(1, 32, 3, 2, 1, 1), 111 | convbn_relu(32, 32, 3, 1, 1, 1), 112 | convbn_relu(32, 32, 3, 1, 1, 1)) 113 | self.layer1 = self._make_layer(BasicBlock, 32, 1, 1, 1, 1) 114 | self.layer2 = self._make_layer(BasicBlock, 64, 1, 2, 1, 1) 115 | self.layer3 = self._make_layer(BasicBlock, 128, 1, 2, 1, 1) 116 | self.layer4 = self._make_layer(BasicBlock, 128, 1, 1, 1, 1) 117 | self.reduce = convbn_relu(128, 32, 3, 1, 1, 1) 118 | 119 | self.conv1a = BasicConv(32, 48, kernel_size=3, stride=2, padding=1) 120 | self.conv2a = BasicConv(48, 64, kernel_size=3, stride=2, padding=1) 121 | self.conv3a = BasicConv(64, 96, kernel_size=3, stride=2, padding=1) 122 | # self.conv4a = BasicConv(96, 128, kernel_size=3, stride=2, padding=1) 123 | 124 | # self.deconv4a = Conv2x(128, 96, deconv=True) 125 | self.deconv3a = Conv2x(96, 64, deconv=True) 126 | self.deconv2a = Conv2x(64, 48, deconv=True) 127 | self.deconv1a = Conv2x(48, 32, deconv=True) 128 | 129 | self.conv1b = Conv2x(32, 48) 130 | self.conv2b = Conv2x(48, 64) 131 | self.conv3b = Conv2x(64, 96) 132 | # self.conv4b = Conv2x(96, 128) 133 | 134 | # self.deconv4b = Conv2x(128, 96, deconv=True) 135 | self.deconv3b = Conv2x(96, 64, deconv=True) 136 | self.deconv2b = Conv2x(64, 48, deconv=True) 137 | self.deconv1b = Conv2x(48, 32, deconv=True) 138 | 139 | def _make_layer(self, block, planes, blocks, stride, pad, dilation): 140 | downsample = None 141 | if stride != 1 or self.inplanes != planes * block.expansion: 142 | downsample = nn.Sequential( 143 | nn.Conv2d(self.inplanes, planes * block.expansion, 144 | kernel_size=1, stride=stride, bias=False), 145 | nn.BatchNorm2d(planes * block.expansion),) 146 | 147 | layers = [] 148 | layers.append(block(self.inplanes, planes, stride, downsample, pad, dilation)) 149 | self.inplanes = planes * block.expansion 150 | for i in range(1, blocks): 151 | layers.append(block(self.inplanes, planes, 1, None, pad, dilation)) 152 | 153 | return nn.Sequential(*layers) 154 | def forward(self, x): 155 | #1/2 156 | x = self.firstconv(x) 157 | x = self.layer1(x) 158 | conv0a = x 159 | x = self.layer2(x) #1/4 160 | conv1a = x 161 | x = self.layer3(x) #1/8 * 128 162 | feat0 = x 163 | x = self.layer4(x) #1/8 * 128 164 | feat1 = x 165 | x = self.reduce(x) #1/8 * 32 166 | feat2 = x 167 | rem0 = x 168 | #1/2 * 1/2 * 48 169 | x = self.conv1a(x) 170 | rem1 = x 171 | #1/4 * 1/4 * 64 172 | x = self.conv2a(x) 173 | rem2 = x 174 | #1/8 * 1/8 * 96 175 | x = self.conv3a(x) 176 | rem3 = x 177 | #1/16 * 1/16 * 128 178 | # x = self.conv4a(x) 179 | # rem4 = x 180 | # x = self.deconv4a(x, rem3) 181 | # rem3 = x 182 | 183 | x = self.deconv3a(x, rem2) 184 | rem2 = x 185 | x = self.deconv2a(x, rem1) 186 | rem1 = x 187 | x = self.deconv1a(x, rem0) 188 | feat3 = x 189 | rem0 = x 190 | #1/2 191 | x = self.conv1b(x, rem1) 192 | rem1 = x 193 | x = self.conv2b(x, rem2) 194 | rem2 = x 195 | x = self.conv3b(x, rem3) 196 | rem3 = x 197 | #1/16 198 | # x = self.conv4b(x, rem4) 199 | 200 | # x = self.deconv4b(x, rem3) 201 | x = self.deconv3b(x, rem2) 202 | x = self.deconv2b(x, rem1) 203 | x = self.deconv1b(x, rem0) 204 | feat4 = x 205 | gwc_feature = torch.cat((feat0,feat1,feat2,feat3,feat4),dim = 1) 206 | return conv0a,gwc_feature 207 | 208 | -------------------------------------------------------------------------------- /models/kitti_12_BGNet.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/kitti_12_BGNet.pth -------------------------------------------------------------------------------- /models/kitti_12_BGNet_Plus.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/kitti_12_BGNet_Plus.pth -------------------------------------------------------------------------------- /models/kitti_15_BGNet_Plus.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/models/kitti_15_BGNet_Plus.pth -------------------------------------------------------------------------------- /models/submodules.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function 7 | import torch.nn as nn 8 | import math 9 | def convbn_2d_lrelu(in_planes, out_planes, kernel_size, stride, pad, dilation=1, bias=False): 10 | return nn.Sequential( 11 | nn.Conv2d(in_planes, out_planes, kernel_size=(kernel_size, kernel_size), 12 | stride=(stride, stride), padding=(pad, pad), dilation=(dilation, dilation), bias=bias), 13 | nn.BatchNorm2d(out_planes), 14 | nn.LeakyReLU(0.1, inplace=True)) 15 | 16 | def convbn_2d_Tanh(in_planes, out_planes, kernel_size, stride, pad, dilation=1, bias=False): 17 | return nn.Sequential( 18 | nn.Conv2d(in_planes, out_planes, kernel_size=(kernel_size, kernel_size), 19 | stride=(stride, stride), padding=(pad, pad), dilation=(dilation, dilation), bias=bias), 20 | nn.BatchNorm2d(out_planes), 21 | nn.Tanh()) 22 | 23 | def deconvbn_2d_lrelu(in_planes, out_planes, kernel_size, stride, pad, dilation=1,bias=False): 24 | return nn.Sequential( 25 | nn.ConvTranspose2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=pad, 26 | dilation=dilation, bias=True), 27 | nn.BatchNorm2d(out_planes), 28 | nn.LeakyReLU(negative_slope=0.1, inplace=True)) 29 | 30 | def convbn_3d_lrelu(in_planes, out_planes, kernel_size, stride, pad): 31 | 32 | return nn.Sequential(nn.Conv3d(in_planes, out_planes, kernel_size=kernel_size, padding=(pad, pad, pad), 33 | stride=(1, stride, stride), bias=False), 34 | nn.BatchNorm3d(out_planes), 35 | nn.LeakyReLU(0.1, inplace=True)) 36 | 37 | 38 | def conv_relu(in_planes, out_planes, kernel_size, stride, pad, bias=True): 39 | return nn.Sequential(nn.Conv2d(in_planes, out_planes, kernel_size, stride, pad, bias=bias), 40 | nn.ReLU(inplace=True)) 41 | 42 | 43 | def convbn(in_planes, out_planes, kernel_size, stride, pad, dilation): 44 | 45 | return nn.Sequential(nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, 46 | padding=dilation if dilation > 1 else pad, dilation=dilation, bias=False), 47 | nn.BatchNorm2d(out_planes)) 48 | 49 | 50 | def convbn_relu(in_planes, out_planes, kernel_size, stride, pad, dilation): 51 | 52 | return nn.Sequential(nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, 53 | padding=dilation if dilation > 1 else pad, dilation=dilation, bias=False), 54 | nn.BatchNorm2d(out_planes), 55 | nn.ReLU(inplace=True)) 56 | 57 | 58 | def convbn_transpose_3d(inplanes, outplanes, kernel_size, padding, output_padding, stride, bias): 59 | return nn.Sequential(nn.ConvTranspose3d(inplanes, outplanes, kernel_size, padding=padding, 60 | output_padding=output_padding, stride=stride, bias=bias), 61 | nn.BatchNorm3d(outplanes)) 62 | 63 | 64 | class BasicBlock(nn.Module): 65 | expansion = 1 66 | 67 | def __init__(self, inplanes, planes, stride, downsample, pad, dilation): 68 | super(BasicBlock, self).__init__() 69 | 70 | self.conv1 = convbn_relu(inplanes, planes, 3, stride, pad, dilation) 71 | self.conv2 = convbn(planes, planes, 3, 1, pad, dilation) 72 | 73 | self.downsample = downsample 74 | self.stride = stride 75 | def forward(self, x): 76 | out = self.conv1(x) 77 | out = self.conv2(out) 78 | 79 | if self.downsample is not None: 80 | x = self.downsample(x) 81 | out += x 82 | 83 | return out 84 | 85 | 86 | class SubModule(nn.Module): 87 | def __init__(self): 88 | super(SubModule, self).__init__() 89 | 90 | def weight_init(self): 91 | for m in self.modules(): 92 | if isinstance(m, nn.Conv2d): 93 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 94 | m.weight.data.normal_(0, math.sqrt(2. / n)) 95 | elif isinstance(m, nn.Conv3d): 96 | n = m.kernel_size[0] * m.kernel_size[1] * m.kernel_size[2] * m.out_channels 97 | m.weight.data.normal_(0, math.sqrt(2. / n)) 98 | elif isinstance(m, nn.SyncBatchNorm): 99 | m.weight.data.fill_(1) 100 | m.bias.data.zero_() 101 | elif isinstance(m, nn.SyncBatchNorm): 102 | m.weight.data.fill_(1) 103 | m.bias.data.zero_() 104 | elif isinstance(m, nn.Linear): 105 | m.bias.data.zero_() 106 | -------------------------------------------------------------------------------- /models/submodules2d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function 7 | import torch.nn as nn 8 | from models.submodules import SubModule, convbn_2d_lrelu,deconvbn_2d_lrelu 9 | import torch 10 | from nets.warp import disp_warp 11 | import torch.nn.functional as F 12 | def disparity_regression(x, maxdisp): 13 | assert len(x.shape) == 4 14 | disp_values = torch.arange(0, maxdisp + 1, dtype=x.dtype, device=x.device) 15 | disp_values = disp_values.view(1, maxdisp + 1, 1, 1) 16 | return torch.sum(x * disp_values, 1, keepdim=True) 17 | def conv2d(in_channels, out_channels, kernel_size=3, stride=1, dilation=1, groups=1): 18 | return nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, 19 | stride=stride, padding=dilation, dilation=dilation, 20 | bias=False, groups=groups), 21 | nn.BatchNorm2d(out_channels), 22 | nn.LeakyReLU(0.1, inplace=True)) 23 | # GANet feature 24 | class BasicConv(nn.Module): 25 | 26 | def __init__(self, in_channels, out_channels, deconv=False, is_3d=False, bn=True, relu=True, **kwargs): 27 | super(BasicConv, self).__init__() 28 | self.relu = relu 29 | self.use_bn = bn 30 | if is_3d: 31 | if deconv: 32 | self.conv = nn.ConvTranspose3d(in_channels, out_channels, bias=False, **kwargs) 33 | else: 34 | self.conv = nn.Conv3d(in_channels, out_channels, bias=False, **kwargs) 35 | self.bn = nn.BatchNorm3d(out_channels) 36 | else: 37 | if deconv: 38 | self.conv = nn.ConvTranspose2d(in_channels, out_channels, bias=False, **kwargs) 39 | else: 40 | self.conv = nn.Conv2d(in_channels, out_channels, bias=False, **kwargs) 41 | self.bn = nn.BatchNorm2d(out_channels) 42 | 43 | def forward(self, x): 44 | x = self.conv(x) 45 | if self.use_bn: 46 | x = self.bn(x) 47 | if self.relu: 48 | x = F.relu(x, inplace=True) 49 | return x 50 | 51 | 52 | class Conv2x(nn.Module): 53 | 54 | def __init__(self, in_channels, out_channels, deconv=False, is_3d=False, concat=True, bn=True, relu=True, 55 | mdconv=False): 56 | super(Conv2x, self).__init__() 57 | self.concat = concat 58 | 59 | if deconv and is_3d: 60 | kernel = (3, 4, 4) 61 | elif deconv: 62 | kernel = 4 63 | else: 64 | kernel = 3 65 | self.conv1 = BasicConv(in_channels, out_channels, deconv, is_3d, bn=True, relu=True, kernel_size=kernel, 66 | stride=2, padding=1) 67 | 68 | if self.concat: 69 | # if mdconv: 70 | # self.conv2 = DeformConv2d(out_channels * 2, out_channels, kernel_size=3, stride=1) 71 | # else: 72 | self.conv2 = BasicConv(out_channels * 2, out_channels, False, is_3d, bn, relu, kernel_size=3, 73 | stride=1, padding=1) 74 | else: 75 | self.conv2 = BasicConv(out_channels, out_channels, False, is_3d, bn, relu, kernel_size=3, stride=1, 76 | padding=1) 77 | 78 | def forward(self, x, rem): 79 | x = self.conv1(x) 80 | assert (x.size() == rem.size()) 81 | if self.concat: 82 | x = torch.cat((x, rem), 1) 83 | else: 84 | x = x + rem 85 | x = self.conv2(x) 86 | return x 87 | class HourglassRefinement(SubModule): 88 | """Height and width need to be divided by 16""" 89 | 90 | def __init__(self): 91 | super(HourglassRefinement, self).__init__() 92 | 93 | # Left and warped error 94 | in_channels = 2 95 | self.conv1 = conv2d(in_channels, 16) 96 | self.conv2 = conv2d(1, 16) # on low disparity 97 | 98 | self.conv_start = conv2d(32, 32) 99 | 100 | self.conv1a = BasicConv(32, 48, kernel_size=3, stride=2, padding=1) 101 | self.conv2a = BasicConv(48, 64, kernel_size=3, stride=2, padding=1) 102 | self.conv3a = BasicConv(64, 96, kernel_size=3, stride=2, padding=1) 103 | self.conv4a = BasicConv(96, 128, kernel_size=3, stride=2,padding=1) 104 | 105 | self.deconv4a = Conv2x(128, 96, deconv=True) 106 | self.deconv3a = Conv2x(96, 64, deconv=True) 107 | self.deconv2a = Conv2x(64, 48, deconv=True) 108 | self.deconv1a = Conv2x(48, 32, deconv=True) 109 | 110 | self.conv1b = Conv2x(32, 48) 111 | self.conv2b = Conv2x(48, 64) 112 | self.conv3b = Conv2x(64, 96) 113 | self.conv4b = Conv2x(96, 128) 114 | 115 | self.deconv4b = Conv2x(128, 96, deconv=True) 116 | self.deconv3b = Conv2x(96, 64, deconv=True) 117 | self.deconv2b = Conv2x(64, 48, deconv=True) 118 | self.deconv1b = Conv2x(48, 32, deconv=True) 119 | 120 | self.final_conv = nn.Conv2d(32, 1, 3, 1, 1) 121 | 122 | def forward(self, low_disp, left_img, right_img): 123 | scale_factor = left_img.size(-1) / low_disp.size(-1) 124 | if scale_factor == 1.0: 125 | disp = low_disp 126 | else: 127 | disp = F.interpolate(low_disp, size=left_img.size()[-2:], mode='bilinear') 128 | disp = disp * scale_factor 129 | warped_right = disp_warp(right_img, disp)[0] # [B, C, H, W] 130 | error = warped_right - left_img # [B, C, H, W] 131 | concat1 = torch.cat((error, left_img), dim=1) # [B, 6, H, W] 132 | conv1 = self.conv1(concat1) # [B, 16, H, W] 133 | conv2 = self.conv2(disp) # [B, 16, H, W] 134 | x = torch.cat((conv1, conv2), dim=1) # [B, 32, H, W] 135 | 136 | x = self.conv_start(x) 137 | rem0 = x 138 | x = self.conv1a(x) 139 | rem1 = x 140 | x = self.conv2a(x) 141 | rem2 = x 142 | x = self.conv3a(x) 143 | rem3 = x 144 | x = self.conv4a(x) 145 | rem4 = x 146 | 147 | x = self.deconv4a(x, rem3) 148 | rem3 = x 149 | 150 | x = self.deconv3a(x, rem2) 151 | rem2 = x 152 | x = self.deconv2a(x, rem1) 153 | rem1 = x 154 | x = self.deconv1a(x, rem0) 155 | rem0 = x 156 | 157 | x = self.conv1b(x, rem1) 158 | rem1 = x 159 | x = self.conv2b(x, rem2) 160 | rem2 = x 161 | x = self.conv3b(x, rem3) 162 | rem3 = x 163 | x = self.conv4b(x, rem4) 164 | 165 | x = self.deconv4b(x, rem3) 166 | x = self.deconv3b(x, rem2) 167 | x = self.deconv2b(x, rem1) 168 | x = self.deconv1b(x, rem0) # [B, 32, H, W] 169 | 170 | residual_disp = self.final_conv(x) # [B, 1, H, W] 171 | 172 | disp = F.relu(disp + residual_disp, inplace=True) # [B, 1, H, W] 173 | # disp = disp.squeeze(1) # [B, H, W] 174 | 175 | return disp -------------------------------------------------------------------------------- /models/submodules3d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function 7 | import torch 8 | import torch.nn as nn 9 | import torch.utils.data 10 | from models.submodules import SubModule, convbn_3d_lrelu, convbn_transpose_3d 11 | class HourGlass(SubModule): 12 | def __init__(self, inplanes=16): 13 | super(HourGlass, self).__init__() 14 | 15 | self.conv1 = convbn_3d_lrelu(inplanes, inplanes * 2, kernel_size=3, stride=2, pad=1) 16 | self.conv2 = convbn_3d_lrelu(inplanes * 2, inplanes * 2, kernel_size=3, stride=1, pad=1) 17 | 18 | self.conv1_1 = convbn_3d_lrelu(inplanes * 2, inplanes * 4, kernel_size=3, stride=2, pad=1) 19 | self.conv2_1 = convbn_3d_lrelu(inplanes * 4, inplanes * 4, kernel_size=3, stride=1, pad=1) 20 | 21 | self.conv3 = convbn_3d_lrelu(inplanes * 4, inplanes * 8, kernel_size=3, stride=2, pad=1) 22 | self.conv4 = convbn_3d_lrelu(inplanes * 8, inplanes * 8, kernel_size=3, stride=1, pad=1) 23 | 24 | self.conv5 = convbn_transpose_3d(inplanes * 8, inplanes * 4, kernel_size=3, padding=1, 25 | output_padding=(0, 1, 1), stride=(1, 2, 2), bias=False) 26 | self.conv6 = convbn_transpose_3d(inplanes * 4, inplanes * 2, kernel_size=3, padding=1, 27 | output_padding=(0, 1, 1), stride=(1, 2, 2), bias=False) 28 | self.conv7 = convbn_transpose_3d(inplanes * 2, inplanes, kernel_size=3, padding=1, 29 | output_padding=(0, 1, 1), stride=(1, 2, 2), bias=False) 30 | self.last_for_guidance = convbn_3d_lrelu(inplanes, 32, kernel_size=3, stride=1, pad=1) 31 | self.weight_init() 32 | #modify from Deeppruner code 33 | class CoeffsPredictor(HourGlass): 34 | 35 | def __init__(self, hourglass_inplanes=16): 36 | super(CoeffsPredictor, self).__init__(hourglass_inplanes) 37 | 38 | def forward(self, input): 39 | output0 = self.conv1(input) 40 | output0_a = self.conv2(output0) + output0 41 | 42 | output0 = self.conv1_1(output0_a) 43 | output0_c = self.conv2_1(output0) + output0 44 | 45 | output0 = self.conv3(output0_c) 46 | output0 = self.conv4(output0) + output0 47 | 48 | output1 = self.conv5(output0) + output0_c 49 | output1 = self.conv6(output1) + output0_a 50 | output1 = self.conv7(output1) 51 | #[B,G,D,H,W] -> [B,D,G,H,W] 52 | coeffs = self.last_for_guidance(output1).permute(0,2,1,3,4).contiguous() 53 | return coeffs 54 | 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /nets/__pycache__/warp.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/nets/__pycache__/warp.cpython-36.pyc -------------------------------------------------------------------------------- /nets/warp.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | import torch 7 | import torch.nn.functional as F 8 | import time 9 | def disp_warp(right_input, disparity_samples, padding_mode='border'): 10 | device = right_input.get_device() 11 | left_y_coordinate = torch.arange(0.0, right_input.size()[3], device=device).repeat(right_input.size()[2]) 12 | left_y_coordinate = left_y_coordinate.view(right_input.size()[2], right_input.size()[3]) 13 | left_y_coordinate = torch.clamp(left_y_coordinate, min=0, max=right_input.size()[3] - 1) 14 | left_y_coordinate = left_y_coordinate.expand(right_input.size()[0], -1, -1) #[B,H,W] 15 | 16 | right_feature_map = right_input.expand(disparity_samples.size()[1], -1, -1, -1, -1).permute([1, 2, 0, 3, 4]) 17 | disparity_samples = disparity_samples.float() 18 | #[B,C,H,W] - [B,C,H,W] 19 | right_y_coordinate = left_y_coordinate.expand( 20 | disparity_samples.size()[1], -1, -1, -1).permute([1, 0, 2, 3]) - disparity_samples 21 | 22 | right_y_coordinate_1 = right_y_coordinate 23 | right_y_coordinate = torch.clamp(right_y_coordinate, min=0, max=right_input.size()[3] - 1) 24 | # torch.cuda.synchronize() 25 | # start = time.time() 26 | warped_right_feature_map = torch.gather(right_feature_map, dim=4, index=right_y_coordinate.expand( 27 | right_input.size()[1], -1, -1, -1, -1).permute([1, 0, 2, 3, 4]).long()) 28 | # torch.cuda.synchronize() 29 | # temp_time = time.time() - start 30 | # print('gather_time = {:3f}'.format(temp_time)) 31 | right_y_coordinate_1 = right_y_coordinate_1.unsqueeze(1) 32 | warped_right_feature_map = (1 - ((right_y_coordinate_1 < 0) + 33 | (right_y_coordinate_1 > right_input.size()[3] - 1)).float()) * \ 34 | (warped_right_feature_map) + torch.zeros_like(warped_right_feature_map) 35 | 36 | return warped_right_feature_map.squeeze(2) 37 | 38 | -------------------------------------------------------------------------------- /predict.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function, division 7 | import argparse 8 | import os 9 | import torch.backends.cudnn as cudnn 10 | from torch.utils.data import DataLoader 11 | import torch.utils.data 12 | import time 13 | from datasets import __datasets__ 14 | import gc 15 | import skimage 16 | import skimage.io 17 | import skimage.transform 18 | from PIL import Image 19 | from models.bgnet import BGNet 20 | from models.bgnet_plus import BGNet_Plus 21 | parser = argparse.ArgumentParser(description='BGNet') 22 | parser.add_argument('--model', default='bgnet', help='select a model structure') 23 | parser.add_argument('--dataset', required=True, help='dataset name', choices=__datasets__.keys()) 24 | parser.add_argument('--datapath', required=True, help='data path') 25 | parser.add_argument('--savepath', required=True, help='save path') 26 | parser.add_argument('--testlist', required=True, help='testing list') 27 | parser.add_argument('--resume', required=True, help='the directory to save logs and checkpoints') 28 | 29 | args = parser.parse_args() 30 | datapath = args.datapath 31 | StereoDataset = __datasets__[args.dataset] 32 | kitti_real_test = args.testlist 33 | kitti_real_test_dataset = StereoDataset(datapath, kitti_real_test, False) 34 | TestImgLoader = DataLoader(kitti_real_test_dataset, batch_size= 1, shuffle=False, num_workers=1, drop_last=False) 35 | 36 | if(args.model == 'bgnet'): 37 | model = BGNet().cuda() 38 | elif(args.model == 'bgnet_plus'): 39 | model = BGNet_Plus().cuda() 40 | # else: 41 | # print('wrong model') 42 | # return -1 43 | sub_name = None 44 | if(args.dataset == 'kitti_12'): 45 | sub_name = 'testing/colored_0/' 46 | elif(args.dataset == 'kitti'): 47 | sub_name = 'testing/image_2/' 48 | # else: 49 | # print('wrong dataset') 50 | # return -1 51 | checkpoint = torch.load(args.resume,map_location=lambda storage, loc: storage) 52 | model.load_state_dict(checkpoint) 53 | model.eval() 54 | for batch_idx, sample in enumerate(TestImgLoader): 55 | print('predict the sample:',batch_idx) 56 | imgL, imgR = sample['left'], sample['right'] 57 | imgL = imgL.cuda() 58 | imgR = imgR.cuda() 59 | pred,_ = model(imgL, imgR) 60 | pred = pred[0].data.cpu().numpy() * 256 61 | # print('pred',pred.shape) 62 | filename = datapath + sub_name + '{:0>6d}'.format(batch_idx) +'_10.png' 63 | gt = Image.open(filename) 64 | w, h = gt.size 65 | top_pad = 384 - h 66 | right_pad = 1280 - w 67 | temp = pred[top_pad:, :-right_pad] 68 | os.makedirs(args.savepath,exist_ok=True) 69 | skimage.io.imsave(args.savepath + '{:0>6d}'.format(batch_idx) +'_10.png',temp.astype('uint16')) 70 | -------------------------------------------------------------------------------- /predict.sh: -------------------------------------------------------------------------------- 1 | # python predict.py --dataset 'kitti_12' \ 2 | # --model 'bgnet' \ 3 | # --datapath '/data/home/user_name/dataset/dataset/data_stereo_flow/' \ 4 | # --testlist './filenames/KITTI-12-Test.txt' \ 5 | # --savepath './kitti-12-BGNet/' \ 6 | # --resume './models/kitti_12_BGNet.pth' 7 | # exit 8 | # python predict.py --dataset 'kitti_12' \ 9 | # --model 'bgnet_plus' \ 10 | # --datapath '/data/home/user_name/dataset/dataset/data_stereo_flow/' \ 11 | # --testlist './filenames/KITTI-12-Test.txt' \ 12 | # --savepath './kitti-12-BGNet-Plus/' \ 13 | # --resume './models/kitti_12_BGNet_Plus.pth' 14 | python predict.py --dataset 'kitti' \ 15 | --model 'bgnet_plus' \ 16 | --datapath '/data/home/user_name/dataset/dataset/kitti/' \ 17 | --testlist './filenames/KITTI-15-Test.txt' \ 18 | --savepath './kitti-15-BGNet-Plus/' \ 19 | --resume './models/kitti_15_BGNet_Plus.pth' 20 | #python predict.py --dataset 'kitti' \ 21 | #--model 'bgnet' \ 22 | #--datapath '/data/home/user_name/dataset/dataset/kitti/' \ 23 | #--testlist './filenames/KITTI-15-Test.txt' \ 24 | #--savepath './kitti-15-BGNet/' \ 25 | #--resume './models/kitti_15_BGNet.pth' -------------------------------------------------------------------------------- /predict_sample.py: -------------------------------------------------------------------------------- 1 | # -*- coding: UTF-8 -*- 2 | # --------------------------------------------------------------------------- 3 | # Official code of our paper:Bilateral Grid Learning for Stereo Matching Network 4 | # Written by Bin Xu 5 | # --------------------------------------------------------------------------- 6 | from __future__ import print_function, division 7 | import argparse 8 | import os 9 | import torch.backends.cudnn as cudnn 10 | from torch.utils.data import DataLoader 11 | import torch.utils.data 12 | import time 13 | from datasets import __datasets__ 14 | import gc 15 | import skimage 16 | import skimage.io 17 | import skimage.transform 18 | import numpy as np 19 | from PIL import Image 20 | from datasets.data_io import get_transform 21 | from models.bgnet import BGNet 22 | from models.bgnet_plus import BGNet_Plus 23 | 24 | model = BGNet_Plus().cuda() 25 | 26 | checkpoint = torch.load('models/Sceneflow-IRS-BGNet-Plus.pth',map_location=lambda storage, loc: storage) 27 | model.load_state_dict(checkpoint) 28 | model.eval() 29 | left_img = Image.open('sample/im0.png').convert('L') 30 | right_img = Image.open('sample/im1.png').convert('L') 31 | w, h = left_img.size 32 | h1 = h % 64 33 | w1 = w % 64 34 | h1 = h - h1 35 | w1 = w - w1 36 | h1 = int(h1) 37 | w1 = int(w1) 38 | left_img = left_img.resize((w1, h1),Image.ANTIALIAS) 39 | right_img = right_img.resize((w1, h1),Image.ANTIALIAS) 40 | left_img = np.ascontiguousarray(left_img, dtype=np.float32) 41 | right_img = np.ascontiguousarray(right_img, dtype=np.float32) 42 | preprocess = get_transform() 43 | left_img = preprocess(left_img) 44 | right_img = preprocess(right_img) 45 | pred,_ = model(left_img.unsqueeze(0).cuda(), right_img.unsqueeze(0).cuda()) 46 | pred = pred[0].data.cpu().numpy() * 256 47 | skimage.io.imsave('sample_disp.png',pred.astype('uint16')) -------------------------------------------------------------------------------- /sample/im0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/sample/im0.png -------------------------------------------------------------------------------- /sample/im1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3DCVdeveloper/BGNet/19b9c1bc17ebf653b344e52470ae6f85953c17a9/sample/im1.png --------------------------------------------------------------------------------