├── README.md ├── dataloader.py ├── filenames ├── nyu_depth_v2_test_654.txt ├── nyu_depth_v2_train_795.txt ├── nyu_depth_v2_train_even.txt └── nyu_depth_v2_val_2k.txt ├── losses.py ├── metrics.py ├── nets ├── resnet_utils.py ├── resnet_v2.py └── vgg.py ├── network.py ├── test_nyu_depth_v2.py ├── train.py └── utils.py /README.md: -------------------------------------------------------------------------------- 1 | # Tensorflow implementation for HIGH-QUALITY-MONOCULAR-DEPTH-ESTIMATION 2 | **[High Quality Monocular Depth Estimation via a Multi-scale Netowrk and Detail-perserving Objective](https://ieeexplore.ieee.org/document/8803168)** 3 | 4 | 1. To trian the model with the full loss of the paper on the NYU Depth V2 dataset, \ 5 | python train.py \ 6 | --dataset nyu_depth_v2 \ 7 | --train_file "../filenames/nyu_depth_v2_train_even.txt" \ 8 | --val_file "../filenames/nyu_depth_v2_val_2k.txt" \ 9 | --cnn_model "resnet_v2_50" \ 10 | --decoding_at_image_size \ 11 | --output_stride 16 \ 12 | --multi_grid 1 2 4 \ 13 | --aspp_rates 4 8 12 \ 14 | --loss_depth_norm berhu \ 15 | --loss_gradient_magnitude_norm l1 \ 16 | --loss_gradient_magnitude_weight 1.0 \ 17 | --loss_gradient_direction_norm l1 \ 18 | --loss_gradient_direction_weight 1.0 \ 19 | --loss_normal_weight 0.0 \ 20 | --batch_size 8 \ 21 | --num_epochs 20 \ 22 | --learning_rate 1e-4 \ 23 | --num_gpus 1 \ 24 | --num_threads 4 \ 25 | --batch_norm_epsilon 1e-5 \ 26 | --batch_norm_decay 0.9997 \ 27 | --l2_regularizer 1e-4 28 | 29 | 2. To evaluate on the model, \ 30 | python test_nyu_depth_v2.py --process_id_for_evaluation pid(12582) \ 31 | You can download the trained model and results of the testset from Badidu NetDisk, \ 32 | link: https://pan.baidu.com/s/1tihXtR72Y-M_PyyOhYEryQ code: gmfx 33 | 34 | Test performance: \ 35 | |abs_rel: 0.127 |sq_rel: 0.088 |rmse: 0.468 |rmse_log: 0.165 |log10: 0.054 |acc1: 0.841 |acc2: 0.967 |acc3: 0.993 36 | -------------------------------------------------------------------------------- /dataloader.py: -------------------------------------------------------------------------------- 1 | """deep monocular depth regression data loader. """ 2 | 3 | 4 | from __future__ import absolute_import, division, print_function 5 | import tensorflow as tf 6 | 7 | _RESIZE_WIDTH_nyu_depth_v2 = 400 8 | _RESIZE_HEIGHT_nyu_depth_v2 = 300 9 | _INPUT_WIDTH_nyu_depth_v2 = 385 10 | _INPUT_HEIGHT_nyu_depth_v2 = 289 11 | _MAX_DEPTH_nyu_depth_v2 = 10.0 12 | _MIN_DEPTH_nyu_depth_v2 = 0.01 13 | 14 | 15 | def string_length_tf(t): 16 | return tf.py_func(len, [t], [tf.int64]) 17 | 18 | class MDEdataloader(object): 19 | """MDE dataloader""" 20 | 21 | def __init__(self, dataset='nyu_depth_v2', 22 | num_threads=4, 23 | batch_size=1, 24 | epochs=1, 25 | train_file=None, 26 | val_file=None, 27 | test_file=None): 28 | 29 | self.dataset = dataset 30 | self.batch_size = batch_size 31 | self.epochs = epochs 32 | 33 | self.train_dataset = None 34 | self.val_dataset = None 35 | self.test_dataset = None 36 | 37 | 38 | if dataset == 'nyu_depth_v2': 39 | self.RESIZE_WIDTH = _RESIZE_WIDTH_nyu_depth_v2 40 | self.RESIZE_HEIGHT = _RESIZE_HEIGHT_nyu_depth_v2 41 | self.INPUT_WIDTH = _INPUT_WIDTH_nyu_depth_v2 42 | self.INPUT_HEIGHT = _INPUT_HEIGHT_nyu_depth_v2 43 | self.MAX_DEPTH = _MAX_DEPTH_nyu_depth_v2 44 | self.MIN_DEPTH = _MIN_DEPTH_nyu_depth_v2 45 | self.max_scale = 1.5 46 | self.min_scale = 1.0 47 | else: 48 | raise ValueError('dataset must be nyu_depth_v2.') 49 | 50 | self.num_train_samples = 0 51 | self.num_val_samples = 0 52 | self.num_test_samples = 0 53 | 54 | if train_file: 55 | with open(train_file, 'r') as f: 56 | train_filelist = [line.strip().split() for line in f.readlines()] 57 | self.num_train_samples = len(train_filelist) 58 | self.train_dataset = tf.data.Dataset.from_tensor_slices(train_filelist) 59 | 60 | def train_parse_fn(image_paths): 61 | rgb, depth = self.read_image(image_paths) 62 | rgb, depth = self.resize_and_scale(rgb, depth) 63 | if dataset is 'nyu_depth_v2': 64 | rgb, depth = self.rotate(rgb, depth) 65 | rgb, depth = self.crop(rgb, depth) 66 | rgb, _ = self.augment_color(rgb) 67 | rgb, depth = self.random_flip(rgb, depth) 68 | max_depth = tf.constant(self.MAX_DEPTH, shape=[1, 1, 1]) 69 | return rgb, depth, max_depth 70 | 71 | self.train_dataset = self.train_dataset.map(train_parse_fn, num_parallel_calls=num_threads) 72 | self.train_dataset = self.train_dataset.repeat() 73 | self.train_dataset = self.train_dataset.shuffle(buffer_size=500+5*batch_size) 74 | self.train_dataset = self.train_dataset.batch(batch_size) 75 | self.train_dataset = self.train_dataset.prefetch(5*batch_size) 76 | 77 | 78 | if val_file: 79 | with open(val_file, 'r') as f: 80 | val_filelist = [line.strip().split() for line in f.readlines()] 81 | self.num_val_samples = len(val_filelist) 82 | """ """ 83 | def val_parse_fn(image_paths): 84 | rgb, depth = self.read_image(image_paths) 85 | rgb, depth = self.resize(rgb, depth) 86 | rgb, depth = self.crop(rgb, depth) 87 | max_depth = tf.constant(self.MAX_DEPTH, shape=[1, 1, 1]) 88 | return rgb, depth, max_depth 89 | 90 | 91 | self.val_dataset = tf.data.Dataset.from_tensor_slices(val_filelist) 92 | self.val_dataset = self.val_dataset.map(val_parse_fn, num_parallel_calls=num_threads) 93 | 94 | self.val_dataset = self.val_dataset.repeat() 95 | self.val_dataset = self.val_dataset.shuffle(buffer_size=100+2*batch_size) 96 | self.val_dataset = self.val_dataset.batch(batch_size) 97 | self.val_dataset = self.val_dataset.prefetch(2*batch_size) 98 | 99 | 100 | if test_file: 101 | with open(test_file, 'r') as f: 102 | test_filelist = [line.strip().split() for line in f.readlines()] 103 | self.num_test_samples = len(test_filelist) 104 | self.test_dataset = tf.data.Dataset.from_tensor_slices(test_filelist) 105 | def test_parse_fn(image_paths): 106 | rgb, depth = self.read_image(image_paths) 107 | im_size = tf.shape(rgb)[0:2] 108 | rgb = tf.image.resize_images(rgb, [self.INPUT_HEIGHT, self.INPUT_WIDTH]) 109 | return rgb, depth, im_size 110 | 111 | self.test_dataset = self.test_dataset.map(test_parse_fn, num_parallel_calls=num_threads) 112 | self.test_dataset = self.test_dataset.batch(1) 113 | 114 | def read_image(self, image_paths): 115 | rgb_string = tf.read_file(image_paths[0]) 116 | #rgb_decoded = tf.image.decode_image(rgb_string, channels=3) 117 | 118 | path_length = string_length_tf(rgb_string)[0] 119 | file_extension = tf.substr(rgb_string, path_length-3, 3) 120 | file_cond = tf.equal(file_extension, 'jpg') 121 | 122 | rgb_decoded = tf.cond(file_cond, 123 | lambda: tf.image.decode_jpeg(rgb_string, channels=3), 124 | lambda: tf.image.decode_png(rgb_string, channels=3)) 125 | depth_file = tf.read_file(image_paths[1]) 126 | depth_decoded = tf.image.decode_png(depth_file, channels=1, dtype=tf.uint16) 127 | 128 | rgb_float = tf.to_float(rgb_decoded) 129 | depth_float = tf.divide(tf.to_float(depth_decoded), [256.0]) 130 | return rgb_float, depth_float 131 | 132 | def resize(self, rgb, depth): 133 | size = tf.shape(rgb) 134 | downsize_scale = tf.divide(tf.to_float(self.RESIZE_HEIGHT), tf.to_float(size[0])) if self.RESIZE_HEIGHT \ 135 | else tf.divide(tf.to_float(self.RESIZE_WIDTH), tf.to_float(size[1])) 136 | h_scaled = tf.to_int32(tf.multiply(tf.to_float(size[0]), downsize_scale)) 137 | w_scaled = tf.to_int32(tf.multiply(tf.to_float(size[1]), downsize_scale)) 138 | size_scaled = tf.stack([h_scaled, w_scaled], axis=0) 139 | rgb_resized = tf.image.resize_images(rgb, size_scaled) 140 | depth_resized = tf.image.resize_images(depth, size_scaled, 141 | method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) 142 | return rgb_resized, depth_resized 143 | 144 | 145 | def resize_and_scale(self, rgb, depth): 146 | size = tf.shape(rgb) 147 | downsize_scale = tf.divide(tf.to_float(self.RESIZE_HEIGHT), tf.to_float(size[0])) if self.RESIZE_HEIGHT \ 148 | else tf.divide(tf.to_float(self.RESIZE_WIDTH), tf.to_float(size[1])) 149 | scale = tf.random_uniform([1], minval=self.min_scale, maxval=self.max_scale, dtype=tf.float32, seed=None) 150 | h_scaled = tf.to_int32(tf.multiply(tf.to_float(size[0]), scale*downsize_scale)) 151 | w_scaled = tf.to_int32(tf.multiply(tf.to_float(size[1]), scale*downsize_scale)) 152 | size_scaled = tf.concat([h_scaled, w_scaled], axis=0) 153 | rgb_scaled = tf.image.resize_images(rgb, size_scaled) 154 | depth_scaled = tf.image.resize_images(depth, size_scaled, 155 | method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) 156 | depth_scaled = tf.divide(depth_scaled, scale) 157 | return rgb_scaled, depth_scaled 158 | 159 | 160 | def rotate(self, rgb, depth): 161 | angle = tf.random_uniform([1], minval=-5, maxval=5, dtype=tf.float32, seed=None) 162 | angle = angle*3.14/180 163 | rgb_rotated = tf.contrib.image.rotate(rgb, angle, interpolation='BILINEAR') 164 | depth_rotated = tf.contrib.image.rotate(depth, angle) 165 | return rgb_rotated, depth_rotated 166 | 167 | def crop(self, rgb, depth): 168 | combine = tf.concat(axis=2, values=[rgb, depth]) 169 | combine_cropped = tf.random_crop(combine, [self.INPUT_HEIGHT, self.INPUT_WIDTH, 4]) 170 | rgb_cropped = combine_cropped[:, :, :3] 171 | depth_cropped = combine_cropped[:, :, 3:] 172 | rgb_cropped.set_shape((self.INPUT_HEIGHT, self.INPUT_WIDTH, 3)) 173 | depth_cropped.set_shape((self.INPUT_HEIGHT, self.INPUT_WIDTH, 1)) 174 | return rgb_cropped, depth_cropped 175 | 176 | def augment_color(self, rgb, depth=None): 177 | rgb_normalized = rgb/255 178 | 179 | # randomly shift gamma 180 | random_gamma = tf.random_uniform([], 0.8, 1.2) 181 | rgb_aug = rgb_normalized ** random_gamma 182 | 183 | # randomly shift brightness 184 | random_brightness = tf.random_uniform([], 0.8, 1.25) 185 | rgb_aug = rgb_aug * random_brightness 186 | 187 | # randomly shift color 188 | random_colors = tf.random_uniform([3], 0.8, 1.2) 189 | white = tf.ones([tf.shape(rgb_aug)[0], tf.shape(rgb_aug)[1]]) 190 | color_image = tf.stack([white * random_colors[i] for i in range(3)], axis=2) 191 | rgb_aug *= color_image 192 | 193 | # saturate 194 | rgb_aug = tf.clip_by_value(rgb_aug, 0, 1) 195 | rgb_aug = rgb_aug*255 196 | return rgb_aug, depth 197 | 198 | 199 | def random_flip(self, rgb, depth): 200 | random_var = tf.random_uniform([], 0, 1) 201 | rgb_randomly_flipped = tf.cond(pred=tf.greater(random_var, 0.5), 202 | true_fn=lambda: tf.image.flip_left_right(rgb), 203 | false_fn=lambda: rgb) 204 | depth_randomly_flipped = tf.cond(pred=tf.greater(random_var, 0.5), 205 | true_fn=lambda: tf.image.flip_left_right(depth), 206 | false_fn=lambda: depth) 207 | return rgb_randomly_flipped, depth_randomly_flipped 208 | -------------------------------------------------------------------------------- /filenames/nyu_depth_v2_test_654.txt: -------------------------------------------------------------------------------- 1 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/001.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/001.png 2 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/002.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/002.png 3 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/003.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/003.png 4 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/004.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/004.png 5 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/005.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/005.png 6 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/006.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/006.png 7 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/007.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/007.png 8 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/008.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/008.png 9 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/009.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/009.png 10 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/010.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/010.png 11 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/011.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/011.png 12 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/012.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/012.png 13 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/013.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/013.png 14 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/014.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/014.png 15 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/015.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/015.png 16 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/016.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/016.png 17 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/017.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/017.png 18 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/018.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/018.png 19 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/019.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/019.png 20 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/020.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/020.png 21 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/021.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/021.png 22 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/022.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/022.png 23 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/023.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/023.png 24 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/024.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/024.png 25 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/025.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/025.png 26 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/026.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/026.png 27 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/027.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/027.png 28 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/028.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/028.png 29 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/029.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/029.png 30 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/030.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/030.png 31 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/031.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/031.png 32 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/032.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/032.png 33 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/033.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/033.png 34 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/034.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/034.png 35 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/035.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/035.png 36 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/036.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/036.png 37 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/037.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/037.png 38 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/038.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/038.png 39 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/039.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/039.png 40 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/040.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/040.png 41 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/041.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/041.png 42 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/042.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/042.png 43 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/043.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/043.png 44 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/044.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/044.png 45 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/045.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/045.png 46 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/046.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/046.png 47 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/047.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/047.png 48 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/048.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/048.png 49 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/049.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/049.png 50 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/050.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/050.png 51 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/051.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/051.png 52 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/052.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/052.png 53 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/053.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/053.png 54 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/054.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/054.png 55 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/055.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/055.png 56 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/056.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/056.png 57 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/057.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/057.png 58 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/058.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/058.png 59 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/059.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/059.png 60 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/060.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/060.png 61 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/061.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/061.png 62 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/062.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/062.png 63 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/063.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/063.png 64 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/064.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/064.png 65 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/065.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/065.png 66 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/066.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/066.png 67 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/067.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/067.png 68 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/068.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/068.png 69 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/069.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/069.png 70 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/070.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/070.png 71 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/071.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/071.png 72 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/072.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/072.png 73 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/073.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/073.png 74 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/074.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/074.png 75 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/075.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/075.png 76 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/076.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/076.png 77 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/077.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/077.png 78 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/078.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/078.png 79 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/079.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/079.png 80 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/080.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/080.png 81 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/081.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/081.png 82 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/082.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/082.png 83 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/083.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/083.png 84 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/084.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/084.png 85 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/085.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/085.png 86 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/086.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/086.png 87 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/087.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/087.png 88 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/088.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/088.png 89 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/089.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/089.png 90 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/090.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/090.png 91 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/091.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/091.png 92 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/092.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/092.png 93 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/093.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/093.png 94 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/094.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/094.png 95 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/095.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/095.png 96 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/096.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/096.png 97 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/097.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/097.png 98 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/098.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/098.png 99 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/099.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/099.png 100 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/100.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/100.png 101 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/101.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/101.png 102 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/102.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/102.png 103 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/103.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/103.png 104 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/104.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/104.png 105 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/105.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/105.png 106 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/106.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/106.png 107 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/107.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/107.png 108 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/108.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/108.png 109 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/109.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/109.png 110 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/110.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/110.png 111 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/111.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/111.png 112 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/112.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/112.png 113 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/113.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/113.png 114 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/114.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/114.png 115 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/115.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/115.png 116 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/116.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/116.png 117 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/117.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/117.png 118 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/118.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/118.png 119 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/119.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/119.png 120 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/120.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/120.png 121 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/121.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/121.png 122 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/122.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/122.png 123 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/123.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/123.png 124 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/124.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/124.png 125 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/125.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/125.png 126 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/126.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/126.png 127 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/127.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/127.png 128 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/128.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/128.png 129 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/129.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/129.png 130 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/130.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/130.png 131 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/131.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/131.png 132 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/132.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/132.png 133 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/133.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/133.png 134 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/134.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/134.png 135 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/135.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/135.png 136 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/136.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/136.png 137 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/137.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/137.png 138 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/138.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/138.png 139 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/139.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/139.png 140 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/140.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/140.png 141 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/141.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/141.png 142 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/142.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/142.png 143 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/143.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/143.png 144 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/144.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/144.png 145 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/145.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/145.png 146 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/146.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/146.png 147 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/147.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/147.png 148 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/148.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/148.png 149 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/149.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/149.png 150 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/150.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/150.png 151 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/151.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/151.png 152 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/152.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/152.png 153 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/153.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/153.png 154 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/154.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/154.png 155 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/155.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/155.png 156 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/156.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/156.png 157 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/157.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/157.png 158 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/158.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/158.png 159 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/159.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/159.png 160 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/160.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/160.png 161 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/161.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/161.png 162 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/162.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/162.png 163 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/163.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/163.png 164 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/164.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/164.png 165 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/165.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/165.png 166 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/166.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/166.png 167 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/167.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/167.png 168 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/168.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/168.png 169 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/169.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/169.png 170 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/170.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/170.png 171 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/171.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/171.png 172 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/172.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/172.png 173 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/173.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/173.png 174 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/174.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/174.png 175 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/175.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/175.png 176 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/176.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/176.png 177 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/177.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/177.png 178 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/178.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/178.png 179 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/179.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/179.png 180 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/180.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/180.png 181 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/181.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/181.png 182 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/182.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/182.png 183 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/183.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/183.png 184 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/184.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/184.png 185 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/185.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/185.png 186 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/186.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/186.png 187 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/187.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/187.png 188 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/188.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/188.png 189 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/189.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/189.png 190 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/190.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/190.png 191 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/191.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/191.png 192 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/192.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/192.png 193 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/193.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/193.png 194 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/194.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/194.png 195 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/195.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/195.png 196 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/196.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/196.png 197 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/197.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/197.png 198 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/198.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/198.png 199 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/199.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/199.png 200 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/200.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/200.png 201 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/201.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/201.png 202 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/202.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/202.png 203 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/203.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/203.png 204 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/204.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/204.png 205 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/205.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/205.png 206 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/206.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/206.png 207 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/207.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/207.png 208 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/208.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/208.png 209 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/209.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/209.png 210 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/210.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/210.png 211 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/211.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/211.png 212 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/212.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/212.png 213 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/213.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/213.png 214 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/214.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/214.png 215 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/215.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/215.png 216 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/216.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/216.png 217 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/217.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/217.png 218 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/218.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/218.png 219 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/219.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/219.png 220 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/220.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/220.png 221 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/221.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/221.png 222 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/222.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/222.png 223 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/223.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/223.png 224 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/224.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/224.png 225 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/225.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/225.png 226 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/226.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/226.png 227 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/227.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/227.png 228 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/228.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/228.png 229 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/229.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/229.png 230 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/230.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/230.png 231 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/231.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/231.png 232 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/232.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/232.png 233 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/233.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/233.png 234 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/234.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/234.png 235 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/235.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/235.png 236 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/236.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/236.png 237 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/237.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/237.png 238 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/238.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/238.png 239 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/239.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/239.png 240 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/240.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/240.png 241 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/241.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/241.png 242 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/242.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/242.png 243 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/243.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/243.png 244 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/244.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/244.png 245 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/245.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/245.png 246 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/246.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/246.png 247 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/247.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/247.png 248 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/248.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/248.png 249 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/249.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/249.png 250 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/250.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/250.png 251 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/251.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/251.png 252 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/252.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/252.png 253 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/253.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/253.png 254 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/254.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/254.png 255 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/255.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/255.png 256 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/256.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/256.png 257 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/257.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/257.png 258 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/258.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/258.png 259 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/259.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/259.png 260 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/260.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/260.png 261 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/261.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/261.png 262 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/262.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/262.png 263 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/263.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/263.png 264 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/264.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/264.png 265 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/265.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/265.png 266 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/266.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/266.png 267 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/267.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/267.png 268 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/268.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/268.png 269 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/269.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/269.png 270 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/270.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/270.png 271 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/271.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/271.png 272 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/272.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/272.png 273 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/273.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/273.png 274 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/274.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/274.png 275 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/275.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/275.png 276 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/276.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/276.png 277 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/277.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/277.png 278 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/278.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/278.png 279 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/279.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/279.png 280 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/280.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/280.png 281 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/281.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/281.png 282 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/282.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/282.png 283 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/283.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/283.png 284 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/284.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/284.png 285 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/285.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/285.png 286 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/286.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/286.png 287 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/287.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/287.png 288 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/288.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/288.png 289 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/289.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/289.png 290 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/290.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/290.png 291 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/291.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/291.png 292 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/292.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/292.png 293 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/293.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/293.png 294 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/294.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/294.png 295 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/295.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/295.png 296 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/296.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/296.png 297 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/297.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/297.png 298 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/298.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/298.png 299 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/299.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/299.png 300 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/300.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/300.png 301 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/301.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/301.png 302 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/302.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/302.png 303 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/303.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/303.png 304 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/304.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/304.png 305 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/305.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/305.png 306 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/306.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/306.png 307 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/307.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/307.png 308 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/308.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/308.png 309 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/309.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/309.png 310 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/310.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/310.png 311 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/311.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/311.png 312 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/312.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/312.png 313 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/313.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/313.png 314 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/314.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/314.png 315 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/315.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/315.png 316 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/316.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/316.png 317 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/317.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/317.png 318 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/318.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/318.png 319 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/319.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/319.png 320 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/320.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/320.png 321 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/321.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/321.png 322 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/322.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/322.png 323 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/323.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/323.png 324 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/324.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/324.png 325 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/325.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/325.png 326 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/326.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/326.png 327 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/327.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/327.png 328 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/328.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/328.png 329 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/329.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/329.png 330 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/330.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/330.png 331 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/331.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/331.png 332 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/332.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/332.png 333 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/333.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/333.png 334 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/334.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/334.png 335 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/335.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/335.png 336 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/336.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/336.png 337 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/337.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/337.png 338 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/338.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/338.png 339 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/339.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/339.png 340 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/340.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/340.png 341 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/341.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/341.png 342 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/342.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/342.png 343 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/343.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/343.png 344 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/344.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/344.png 345 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/345.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/345.png 346 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/346.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/346.png 347 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/347.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/347.png 348 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/348.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/348.png 349 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/349.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/349.png 350 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/350.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/350.png 351 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/351.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/351.png 352 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/352.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/352.png 353 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/353.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/353.png 354 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/354.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/354.png 355 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/355.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/355.png 356 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/356.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/356.png 357 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/357.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/357.png 358 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/358.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/358.png 359 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/359.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/359.png 360 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/360.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/360.png 361 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/361.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/361.png 362 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/362.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/362.png 363 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/363.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/363.png 364 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/364.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/364.png 365 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/365.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/365.png 366 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/366.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/366.png 367 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/367.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/367.png 368 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/368.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/368.png 369 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/369.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/369.png 370 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/370.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/370.png 371 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/371.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/371.png 372 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/372.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/372.png 373 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/373.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/373.png 374 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/374.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/374.png 375 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/375.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/375.png 376 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/376.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/376.png 377 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/377.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/377.png 378 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/378.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/378.png 379 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/379.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/379.png 380 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/380.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/380.png 381 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/381.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/381.png 382 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/382.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/382.png 383 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/383.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/383.png 384 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/384.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/384.png 385 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/385.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/385.png 386 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/386.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/386.png 387 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/387.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/387.png 388 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/388.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/388.png 389 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/389.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/389.png 390 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/390.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/390.png 391 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/391.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/391.png 392 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/392.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/392.png 393 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/393.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/393.png 394 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/394.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/394.png 395 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/395.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/395.png 396 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/396.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/396.png 397 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/397.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/397.png 398 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/398.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/398.png 399 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/399.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/399.png 400 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/400.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/400.png 401 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/401.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/401.png 402 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/402.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/402.png 403 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/403.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/403.png 404 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/404.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/404.png 405 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/405.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/405.png 406 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/406.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/406.png 407 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/407.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/407.png 408 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/408.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/408.png 409 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/409.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/409.png 410 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/410.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/410.png 411 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/411.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/411.png 412 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/412.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/412.png 413 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/413.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/413.png 414 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/414.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/414.png 415 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/415.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/415.png 416 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/416.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/416.png 417 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/417.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/417.png 418 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/418.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/418.png 419 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/419.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/419.png 420 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/420.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/420.png 421 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/421.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/421.png 422 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/422.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/422.png 423 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/423.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/423.png 424 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/424.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/424.png 425 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/425.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/425.png 426 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/426.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/426.png 427 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/427.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/427.png 428 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/428.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/428.png 429 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/429.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/429.png 430 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/430.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/430.png 431 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/431.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/431.png 432 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/432.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/432.png 433 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/433.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/433.png 434 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/434.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/434.png 435 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/435.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/435.png 436 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/436.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/436.png 437 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/437.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/437.png 438 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/438.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/438.png 439 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/439.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/439.png 440 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/440.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/440.png 441 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/441.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/441.png 442 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/442.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/442.png 443 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/443.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/443.png 444 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/444.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/444.png 445 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/445.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/445.png 446 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/446.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/446.png 447 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/447.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/447.png 448 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/448.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/448.png 449 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/449.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/449.png 450 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/450.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/450.png 451 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/451.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/451.png 452 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/452.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/452.png 453 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/453.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/453.png 454 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/454.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/454.png 455 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/455.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/455.png 456 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/456.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/456.png 457 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/457.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/457.png 458 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/458.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/458.png 459 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/459.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/459.png 460 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/460.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/460.png 461 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/461.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/461.png 462 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/462.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/462.png 463 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/463.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/463.png 464 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/464.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/464.png 465 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/465.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/465.png 466 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/466.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/466.png 467 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/467.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/467.png 468 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/468.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/468.png 469 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/469.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/469.png 470 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/470.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/470.png 471 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/471.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/471.png 472 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/472.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/472.png 473 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/473.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/473.png 474 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/474.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/474.png 475 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/475.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/475.png 476 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/476.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/476.png 477 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/477.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/477.png 478 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/478.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/478.png 479 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/479.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/479.png 480 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/480.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/480.png 481 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/481.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/481.png 482 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/482.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/482.png 483 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/483.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/483.png 484 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/484.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/484.png 485 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/485.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/485.png 486 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/486.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/486.png 487 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/487.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/487.png 488 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/488.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/488.png 489 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/489.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/489.png 490 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/490.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/490.png 491 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/491.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/491.png 492 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/492.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/492.png 493 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/493.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/493.png 494 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/494.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/494.png 495 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/495.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/495.png 496 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/496.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/496.png 497 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/497.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/497.png 498 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/498.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/498.png 499 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/499.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/499.png 500 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/500.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/500.png 501 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/501.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/501.png 502 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/502.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/502.png 503 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/503.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/503.png 504 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/504.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/504.png 505 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/505.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/505.png 506 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/506.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/506.png 507 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/507.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/507.png 508 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/508.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/508.png 509 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/509.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/509.png 510 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/510.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/510.png 511 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/511.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/511.png 512 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/512.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/512.png 513 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/513.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/513.png 514 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/514.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/514.png 515 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/515.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/515.png 516 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/516.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/516.png 517 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/517.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/517.png 518 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/518.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/518.png 519 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/519.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/519.png 520 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/520.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/520.png 521 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/521.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/521.png 522 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/522.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/522.png 523 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/523.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/523.png 524 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/524.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/524.png 525 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/525.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/525.png 526 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/526.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/526.png 527 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/527.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/527.png 528 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/528.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/528.png 529 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/529.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/529.png 530 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/530.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/530.png 531 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/531.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/531.png 532 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/532.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/532.png 533 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/533.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/533.png 534 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/534.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/534.png 535 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/535.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/535.png 536 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/536.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/536.png 537 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/537.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/537.png 538 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/538.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/538.png 539 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/539.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/539.png 540 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/540.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/540.png 541 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/541.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/541.png 542 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/542.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/542.png 543 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/543.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/543.png 544 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/544.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/544.png 545 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/545.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/545.png 546 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/546.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/546.png 547 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/547.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/547.png 548 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/548.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/548.png 549 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/549.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/549.png 550 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/550.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/550.png 551 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/551.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/551.png 552 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/552.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/552.png 553 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/553.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/553.png 554 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/554.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/554.png 555 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/555.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/555.png 556 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/556.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/556.png 557 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/557.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/557.png 558 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/558.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/558.png 559 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/559.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/559.png 560 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/560.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/560.png 561 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/561.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/561.png 562 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/562.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/562.png 563 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/563.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/563.png 564 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/564.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/564.png 565 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/565.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/565.png 566 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/566.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/566.png 567 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/567.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/567.png 568 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/568.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/568.png 569 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/569.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/569.png 570 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/570.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/570.png 571 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/571.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/571.png 572 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/572.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/572.png 573 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/573.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/573.png 574 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/574.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/574.png 575 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/575.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/575.png 576 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/576.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/576.png 577 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/577.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/577.png 578 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/578.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/578.png 579 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/579.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/579.png 580 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/580.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/580.png 581 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/581.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/581.png 582 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/582.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/582.png 583 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/583.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/583.png 584 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/584.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/584.png 585 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/585.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/585.png 586 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/586.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/586.png 587 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/587.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/587.png 588 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/588.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/588.png 589 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/589.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/589.png 590 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/590.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/590.png 591 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/591.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/591.png 592 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/592.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/592.png 593 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/593.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/593.png 594 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/594.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/594.png 595 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/595.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/595.png 596 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/596.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/596.png 597 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/597.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/597.png 598 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/598.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/598.png 599 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/599.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/599.png 600 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/600.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/600.png 601 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/601.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/601.png 602 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/602.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/602.png 603 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/603.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/603.png 604 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/604.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/604.png 605 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/605.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/605.png 606 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/606.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/606.png 607 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/607.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/607.png 608 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/608.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/608.png 609 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/609.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/609.png 610 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/610.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/610.png 611 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/611.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/611.png 612 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/612.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/612.png 613 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/613.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/613.png 614 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/614.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/614.png 615 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/615.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/615.png 616 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/616.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/616.png 617 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/617.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/617.png 618 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/618.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/618.png 619 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/619.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/619.png 620 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/620.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/620.png 621 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/621.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/621.png 622 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/622.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/622.png 623 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/623.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/623.png 624 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/624.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/624.png 625 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/625.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/625.png 626 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/626.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/626.png 627 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/627.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/627.png 628 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/628.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/628.png 629 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/629.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/629.png 630 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/630.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/630.png 631 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/631.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/631.png 632 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/632.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/632.png 633 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/633.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/633.png 634 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/634.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/634.png 635 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/635.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/635.png 636 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/636.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/636.png 637 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/637.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/637.png 638 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/638.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/638.png 639 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/639.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/639.png 640 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/640.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/640.png 641 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/641.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/641.png 642 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/642.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/642.png 643 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/643.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/643.png 644 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/644.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/644.png 645 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/645.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/645.png 646 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/646.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/646.png 647 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/647.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/647.png 648 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/648.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/648.png 649 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/649.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/649.png 650 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/650.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/650.png 651 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/651.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/651.png 652 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/652.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/652.png 653 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/653.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/653.png 654 | /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/rgb/654.jpg /home/vslam/work/NYU_Depth_V2/nyu_depth_v2/test654/depth/654.png 655 | -------------------------------------------------------------------------------- /losses.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | 4 | def berhu(targets, predictions): 5 | residual = tf.abs(predictions - targets) 6 | max_residual = tf.reduce_max(residual, axis=0) 7 | delta = 0.2 * max_residual 8 | condition = tf.less(residual, delta) 9 | large_res = 0.5 * (delta + tf.square(residual) / delta) 10 | return tf.where(condition, residual, large_res) 11 | 12 | 13 | def compute_depth_loss(gt_depth_maps, pred_depth_maps, loss_depth_norm, using_log_depth): 14 | 15 | if using_log_depth: 16 | gt_depth_maps = tf.log(gt_depth_maps+0.5) 17 | pred_depth_maps = tf.log(pred_depth_maps+0.5) 18 | if loss_depth_norm == 'l1': 19 | depth_diff = tf.abs(gt_depth_maps - pred_depth_maps) 20 | elif loss_depth_norm == 'l2': 21 | depth_diff = tf.square(gt_depth_maps - pred_depth_maps) 22 | elif loss_depth_norm == 'berhu': 23 | depth_diff = berhu(gt_depth_maps, pred_depth_maps) 24 | else: 25 | raise NameError("loss_depth_norm must l1, l2 or beuhu.") 26 | loss_depth = tf.reduce_mean(depth_diff) 27 | 28 | return loss_depth 29 | 30 | 31 | def compute_gradient_loss(gt_depth_maps, pred_depth_maps, using_log_gradient_magnitude, loss_gradient_magnitude_norm, loss_gradient_direction_norm): 32 | 33 | gt_gradients = tf.image.sobel_edges(gt_depth_maps) 34 | pd_gradients = tf.image.sobel_edges(pred_depth_maps) 35 | gt_gradients_y = gt_gradients[:, :, :, :, 0] 36 | gt_gradients_x = gt_gradients[:, :, :, :, 1] 37 | pd_gradients_y = pd_gradients[:, :, :, :, 0] 38 | pd_gradients_x = pd_gradients[:, :, :, :, 1] 39 | 40 | if loss_gradient_direction_norm == 'l1': 41 | grad_direc_diff = tf.abs(gt_gradients_y*pd_gradients_x - gt_gradients_x*pd_gradients_y) 42 | elif loss_gradient_direction_norm == 'l2': 43 | grad_direc_diff = tf.square(gt_gradients_y*pd_gradients_x - gt_gradients_x*pd_gradients_y) 44 | elif loss_gradient_direction_norm == 'berhu': 45 | grad_direc_diff = berhu(gt_gradients_y*pd_gradients_x, gt_gradients_x*pd_gradients_y) 46 | else: 47 | raise NameError("loss_gradient_magnitude_norm must l1, l2 or beuhu.") 48 | loss_grad_direc = tf.reduce_mean(grad_direc_diff) 49 | 50 | normal_product = 1 + gt_gradients_y*pd_gradients_y + gt_gradients_x*pd_gradients_x 51 | gt_normal_magni = tf.sqrt(1 + tf.square(gt_gradients_y) + tf.square(gt_gradients_x)) 52 | pd_normal_magni = tf.sqrt(1 + tf.square(pd_gradients_y) + tf.square(pd_gradients_x)) 53 | loss_normal = tf.reduce_mean(1 - tf.divide(normal_product, gt_normal_magni*pd_normal_magni)) 54 | 55 | if using_log_gradient_magnitude: 56 | gt_gradients = tf.log(gt_gradients_y+0.5) 57 | pd_gradients = tf.log(pd_gradients_y+0.5) 58 | 59 | if loss_gradient_magnitude_norm == 'l1': 60 | grad_magni_diff = tf.abs(gt_gradients - pd_gradients) 61 | elif loss_gradient_magnitude_norm == 'l2': 62 | grad_magni_diff = tf.square(gt_gradients - pd_gradients) 63 | elif loss_gradient_magnitude_norm == 'berhu': 64 | grad_magni_diff = berhu(gt_gradients, pd_gradients) 65 | else: 66 | raise NameError("loss_gradient_magnitude_norm must l1, l2 or beuhu.") 67 | loss_grad_magni = tf.reduce_mean(grad_magni_diff) 68 | 69 | return loss_grad_magni, loss_grad_direc, loss_normal 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /metrics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def compute_metrics_for_multi_maps(gt_depth_maps, pd_depth_maps, cap = None): 5 | gt_depth_maps = np.squeeze(gt_depth_maps) 6 | pd_depth_maps = np.squeeze(pd_depth_maps) 7 | if len(np.shape(gt_depth_maps)) == 2: 8 | return compute_metrics_for_single_map(gt_depth_maps, pd_depth_maps, cap) 9 | else: 10 | num_maps, height, width = np.shape(gt_depth_maps) 11 | ABS_REL = np.zeros(num_maps, np.float32) 12 | SQ_REL = np.zeros(num_maps, np.float32) 13 | RMSE = np.zeros(num_maps, np.float32) 14 | RMSE_log = np.zeros(num_maps, np.float32) 15 | Log10 = np.zeros(num_maps, np.float32) 16 | ACCURACY1 = np.zeros(num_maps, np.float32) 17 | ACCURACY2 = np.zeros(num_maps, np.float32) 18 | ACCURACY3 = np.zeros(num_maps, np.float32) 19 | for i in range(num_maps): 20 | ABS_REL[i], SQ_REL[i], RMSE[i], RMSE_log[i], Log10[i], ACCURACY1[i], ACCURACY2[i], ACCURACY3[i] = \ 21 | compute_metrics_for_single_map(gt_depth_maps[i], pd_depth_maps[i], cap) 22 | return ABS_REL.mean(), SQ_REL.mean(), RMSE.mean(), RMSE_log.mean(), Log10.mean(), ACCURACY1.mean(), ACCURACY2.mean(), ACCURACY3.mean() 23 | 24 | 25 | def compute_metrics_for_single_map(gt_depth_map, pd_depth_map, cap = None): 26 | # Create mask for valid pixels 27 | mask = gt_depth_map > 0.01 28 | gt_depth_map[gt_depth_map <= 0.01] = 0.01 # epsilon 29 | pd_depth_map[pd_depth_map <= 0.01] = 0.01 30 | if cap: 31 | mask_cap = gt_depth_map <= cap 32 | mask = np.logical_and(mask, mask_cap) 33 | 34 | # Count number of valid pixels 35 | val_pxls = np.sum(mask) 36 | 37 | # Compute absolute relative error 38 | abs_rel = np.abs(gt_depth_map - pd_depth_map) / gt_depth_map 39 | abs_rel[~mask] = 0 40 | S_abs_rel = np.sum(abs_rel) 41 | 42 | # Compute square relative error 43 | sq_rel = np.square(gt_depth_map - pd_depth_map) / gt_depth_map 44 | sq_rel[~mask] = 0 45 | S_sq_rel = np.sum(sq_rel) 46 | 47 | # Compute root mean square error 48 | rmse = np.square(gt_depth_map - pd_depth_map) 49 | rmse[~mask] = 0 50 | S_rmse = np.sum(rmse) 51 | 52 | # Compute root mean square error log 53 | rmse_log = np.square(np.log(gt_depth_map) - np.log(pd_depth_map)) 54 | rmse_log[~mask] = 0 55 | S_rmse_log = np.sum(rmse_log) 56 | 57 | # Compute log10 error 58 | log10 = np.abs(np.log10(gt_depth_map) - np.log10(pd_depth_map)) 59 | log10[~mask] = 0 60 | S_log10 = np.sum(log10) 61 | 62 | max_ratio = np.maximum(gt_depth_map / pd_depth_map, pd_depth_map / gt_depth_map) 63 | 64 | # Compute accuracies for different deltas(thresholds) 65 | acc1 = np.asarray(np.logical_and(max_ratio < 1.25, mask), dtype=np.float32) 66 | acc2 = np.asarray(np.logical_and(max_ratio < 1.25 ** 2, mask), dtype=np.float32) 67 | acc3 = np.asarray(np.logical_and(max_ratio < 1.25 ** 3, mask), dtype=np.float32) 68 | 69 | S_acc1 = np.sum(acc1) 70 | S_acc2 = np.sum(acc2) 71 | S_acc3 = np.sum(acc3) 72 | 73 | ABS_REL = S_abs_rel / val_pxls 74 | SQ_REL = S_sq_rel / val_pxls 75 | RMSE = np.sqrt(S_rmse / val_pxls) 76 | RMSE_log = np.sqrt(S_rmse_log / val_pxls) 77 | Log10 = S_log10/val_pxls 78 | ACCURACY1 = S_acc1 / val_pxls 79 | ACCURACY2 = S_acc2 / val_pxls 80 | ACCURACY3 = S_acc3 / val_pxls 81 | 82 | return ABS_REL, SQ_REL, RMSE, RMSE_log, Log10, ACCURACY1, ACCURACY2, ACCURACY3 83 | -------------------------------------------------------------------------------- /nets/resnet_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Contains building blocks for various versions of Residual Networks. 16 | 17 | Residual networks (ResNets) were proposed in: 18 | Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun 19 | Deep Residual Learning for Image Recognition. arXiv:1512.03385, 2015 20 | 21 | More variants were introduced in: 22 | Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun 23 | Identity Mappings in Deep Residual Networks. arXiv: 1603.05027, 2016 24 | 25 | We can obtain different ResNet variants by changing the network depth, width, 26 | and form of residual unit. This module implements the infrastructure for 27 | building them. Concrete ResNet units and full ResNet networks are implemented in 28 | the accompanying resnet_v1.py and resnet_v2.py modules. 29 | 30 | Compared to https://github.com/KaimingHe/deep-residual-networks, in the current 31 | implementation we subsample the output activations in the last residual unit of 32 | each block, instead of subsampling the input activations in the first residual 33 | unit of each block. The two implementations give identical results but our 34 | implementation is more memory efficient. 35 | """ 36 | from __future__ import absolute_import 37 | from __future__ import division 38 | from __future__ import print_function 39 | 40 | import collections 41 | import tensorflow as tf 42 | 43 | slim = tf.contrib.slim 44 | 45 | 46 | class Block(collections.namedtuple('Block', ['scope', 'unit_fn', 'args'])): 47 | """A named tuple describing a ResNet block. 48 | 49 | Its parts are: 50 | scope: The scope of the `Block`. 51 | unit_fn: The ResNet unit function which takes as input a `Tensor` and 52 | returns another `Tensor` with the output of the ResNet unit. 53 | args: A list of length equal to the number of units in the `Block`. The list 54 | contains one (depth, depth_bottleneck, stride) tuple for each unit in the 55 | block to serve as argument to unit_fn. 56 | """ 57 | 58 | 59 | def subsample(inputs, factor, scope=None): 60 | """Subsamples the input along the spatial dimensions. 61 | 62 | Args: 63 | inputs: A `Tensor` of size [batch, height_in, width_in, channels]. 64 | factor: The subsampling factor. 65 | scope: Optional variable_scope. 66 | 67 | Returns: 68 | output: A `Tensor` of size [batch, height_out, width_out, channels] with the 69 | input, either intact (if factor == 1) or subsampled (if factor > 1). 70 | """ 71 | if factor == 1: 72 | return inputs 73 | else: 74 | return slim.max_pool2d(inputs, [1, 1], stride=factor, scope=scope) 75 | 76 | 77 | def conv2d_same(inputs, num_outputs, kernel_size, stride, rate=1, scope=None, trainable=True): 78 | """Strided 2-D convolution with 'SAME' padding. 79 | 80 | When stride > 1, then we do explicit zero-padding, followed by conv2d with 81 | 'VALID' padding. 82 | 83 | Note that 84 | 85 | net = conv2d_same(inputs, num_outputs, 3, stride=stride) 86 | 87 | is equivalent to 88 | 89 | net = slim.conv2d(inputs, num_outputs, 3, stride=1, padding='SAME') 90 | net = subsample(net, factor=stride) 91 | 92 | whereas 93 | 94 | net = slim.conv2d(inputs, num_outputs, 3, stride=stride, padding='SAME') 95 | 96 | is different when the input's height or width is even, which is why we add the 97 | current function. For more details, see ResnetUtilsTest.testConv2DSameEven(). 98 | 99 | Args: 100 | inputs: A 4-D tensor of size [batch, height_in, width_in, channels]. 101 | num_outputs: An integer, the number of output filters. 102 | kernel_size: An int with the kernel_size of the filters. 103 | stride: An integer, the output stride. 104 | rate: An integer, rate for atrous convolution. 105 | scope: Scope. 106 | 107 | Returns: 108 | output: A 4-D tensor of size [batch, height_out, width_out, channels] with 109 | the convolution output. 110 | """ 111 | if stride == 1: 112 | return slim.conv2d(inputs, num_outputs, kernel_size, stride=1, rate=rate, 113 | padding='SAME', scope=scope, trainable=trainable) 114 | else: 115 | kernel_size_effective = kernel_size + (kernel_size - 1) * (rate - 1) 116 | pad_total = kernel_size_effective - 1 117 | pad_beg = pad_total // 2 118 | pad_end = pad_total - pad_beg 119 | inputs = tf.pad(inputs, 120 | [[0, 0], [pad_beg, pad_end], [pad_beg, pad_end], [0, 0]]) 121 | return slim.conv2d(inputs, num_outputs, kernel_size, stride=stride, 122 | rate=rate, padding='VALID', scope=scope, trainable=trainable) 123 | 124 | 125 | @slim.add_arg_scope 126 | def stack_blocks_dense(net, blocks, multi_grid, output_stride=None, 127 | outputs_collections=None): 128 | """Stacks ResNet `Blocks` and controls output feature density. 129 | 130 | First, this function creates scopes for the ResNet in the form of 131 | 'block_name/unit_1', 'block_name/unit_2', etc. 132 | 133 | Second, this function allows the user to explicitly control the ResNet 134 | output_stride, which is the ratio of the input to output spatial resolution. 135 | This is useful for dense prediction tasks such as semantic segmentation or 136 | object detection. 137 | 138 | Most ResNets consist of 4 ResNet blocks and subsample the activations by a 139 | factor of 2 when transitioning between consecutive ResNet blocks. This results 140 | to a nominal ResNet output_stride equal to 8. If we set the output_stride to 141 | half the nominal network stride (e.g., output_stride=4), then we compute 142 | responses twice. 143 | 144 | Control of the output feature density is implemented by atrous convolution. 145 | 146 | Args: 147 | net: A `Tensor` of size [batch, height, width, channels]. 148 | blocks: A list of length equal to the number of ResNet `Blocks`. Each 149 | element is a ResNet `Block` object describing the units in the `Block`. 150 | output_stride: If `None`, then the output will be computed at the nominal 151 | network stride. If output_stride is not `None`, it specifies the requested 152 | ratio of input to output spatial resolution, which needs to be equal to 153 | the product of unit strides from the start up to some level of the ResNet. 154 | For example, if the ResNet employs units with strides 1, 2, 1, 3, 4, 1, 155 | then valid values for the output_stride are 1, 2, 6, 24 or None (which 156 | is equivalent to output_stride=24). 157 | outputs_collections: Collection to add the ResNet block outputs. 158 | 159 | Returns: 160 | net: Output tensor with stride equal to the specified output_stride. 161 | 162 | Raises: 163 | ValueError: If the target output_stride is not valid. 164 | """ 165 | # The current_stride variable keeps track of the effective stride of the 166 | # activations. This allows us to invoke atrous convolution whenever applying 167 | # the next residual unit would result in the activations having stride larger 168 | # than the target output_stride. 169 | current_stride = 1 170 | 171 | # The atrous convolution rate parameter. 172 | rate = 1 173 | trainable = False 174 | for block in blocks: 175 | with tf.variable_scope(block.scope, 'block', [net]) as sc: 176 | for i, unit in enumerate(block.args): 177 | if output_stride is not None and current_stride > output_stride: 178 | raise ValueError('The target output_stride cannot be reached.') 179 | 180 | with tf.variable_scope('unit_%d' % (i + 1), values=[net]): 181 | # If we have reached the target output_stride, then we need to employ 182 | # atrous convolution with stride=1 and multiply the atrous rate by the 183 | # current unit's stride for use in subsequent layers. 184 | if output_stride is not None and current_stride == output_stride: 185 | # Only uses atrous convolutions with multi-graid rates in the last (block4) block 186 | if block.scope == "block4": 187 | net = block.unit_fn(net, rate=rate * multi_grid[i], **dict(unit, stride=1)) 188 | else: 189 | net = block.unit_fn(net, rate=rate, **dict(unit, stride=1)) 190 | rate *= unit.get('stride', 1) 191 | else: 192 | net = block.unit_fn(net, rate=1, **unit, trainable=trainable) 193 | current_stride *= unit.get('stride', 1) 194 | trainable = True 195 | net = slim.utils.collect_named_outputs(outputs_collections, sc.name, net) 196 | 197 | if output_stride is not None and current_stride != output_stride: 198 | raise ValueError('The target output_stride cannot be reached.') 199 | 200 | return net 201 | 202 | 203 | def resnet_arg_scope(weight_decay=0.0001, 204 | is_training=True, 205 | batch_norm_decay=0.997, 206 | batch_norm_epsilon=1e-5, 207 | batch_norm_scale=True, 208 | activation_fn=tf.nn.relu, 209 | use_batch_norm=True): 210 | """Defines the default ResNet arg scope. 211 | 212 | TODO(gpapan): The batch-normalization related default values above are 213 | appropriate for use in conjunction with the reference ResNet models 214 | released at https://github.com/KaimingHe/deep-residual-networks. When 215 | training ResNets from scratch, they might need to be tuned. 216 | 217 | Args: 218 | weight_decay: The weight decay to use for regularizing the model. 219 | batch_norm_decay: The moving average decay when estimating layer activation 220 | statistics in batch normalization. 221 | batch_norm_epsilon: Small constant to prevent division by zero when 222 | normalizing activations by their variance in batch normalization. 223 | batch_norm_scale: If True, uses an explicit `gamma` multiplier to scale the 224 | activations in the batch normalization layer. 225 | activation_fn: The activation function which is used in ResNet. 226 | use_batch_norm: Whether or not to use batch normalization. 227 | 228 | Returns: 229 | An `arg_scope` to use for the resnet models. 230 | """ 231 | batch_norm_params = { 232 | 'decay': batch_norm_decay, 233 | 'epsilon': batch_norm_epsilon, 234 | 'scale': batch_norm_scale, 235 | 'updates_collections': None, 236 | 'is_training': is_training, 237 | 'fused': True, # Use fused batch norm if possible. 238 | } 239 | 240 | with slim.arg_scope( 241 | [slim.conv2d], 242 | weights_regularizer=slim.l2_regularizer(weight_decay), 243 | weights_initializer=slim.variance_scaling_initializer(), 244 | activation_fn=activation_fn, 245 | normalizer_fn=slim.batch_norm if use_batch_norm else None, 246 | normalizer_params=batch_norm_params): 247 | with slim.arg_scope([slim.batch_norm], **batch_norm_params): 248 | # The following implies padding='SAME' for pool1, which makes feature 249 | # alignment easier for dense prediction tasks. This is also used in 250 | # https://github.com/facebook/fb.resnet.torch. However the accompanying 251 | # code of 'Deep Residual Learning for Image Recognition' uses 252 | # padding='VALID' for pool1. You can switch to that choice by setting 253 | # slim.arg_scope([slim.max_pool2d], padding='VALID'). 254 | with slim.arg_scope([slim.max_pool2d], padding='SAME') as arg_sc: 255 | return arg_sc 256 | 257 | -------------------------------------------------------------------------------- /nets/resnet_v2.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Contains definitions for the preactivation form of Residual Networks. 16 | 17 | Residual networks (ResNets) were originally proposed in: 18 | [1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun 19 | Deep Residual Learning for Image Recognition. arXiv:1512.03385 20 | 21 | The full preactivation 'v2' ResNet variant implemented in this module was 22 | introduced by: 23 | [2] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun 24 | Identity Mappings in Deep Residual Networks. arXiv: 1603.05027 25 | 26 | The key difference of the full preactivation 'v2' variant compared to the 27 | 'v1' variant in [1] is the use of batch normalization before every weight layer. 28 | 29 | Typical use: 30 | 31 | from tensorflow.contrib.slim.nets import resnet_v2 32 | 33 | ResNet-101 for image classification into 1000 classes: 34 | 35 | # inputs has shape [batch, 224, 224, 3] 36 | with slim.arg_scope(resnet_v2.resnet_arg_scope()): 37 | net, end_points = resnet_v2.resnet_v2_101(inputs, 1000, is_training=False) 38 | 39 | ResNet-101 for semantic segmentation into 21 classes: 40 | 41 | # inputs has shape [batch, 513, 513, 3] 42 | with slim.arg_scope(resnet_v2.resnet_arg_scope()): 43 | net, end_points = resnet_v2.resnet_v2_101(inputs, 44 | 21, 45 | is_training=False, 46 | global_pool=False, 47 | output_stride=16) 48 | """ 49 | from __future__ import absolute_import 50 | from __future__ import division 51 | from __future__ import print_function 52 | 53 | import tensorflow as tf 54 | 55 | from nets import resnet_utils 56 | 57 | slim = tf.contrib.slim 58 | resnet_arg_scope = resnet_utils.resnet_arg_scope 59 | 60 | 61 | @slim.add_arg_scope 62 | def bottleneck(inputs, depth, depth_bottleneck, stride, rate=1, 63 | outputs_collections=None, scope=None, trainable=True): 64 | """Bottleneck residual unit variant with BN before convolutions. 65 | 66 | This is the full preactivation residual unit variant proposed in [2]. See 67 | Fig. 1(b) of [2] for its definition. Note that we use here the bottleneck 68 | variant which has an extra bottleneck layer. 69 | 70 | When putting together two consecutive ResNet blocks that use this unit, one 71 | should use stride = 2 in the last unit of the first block. 72 | 73 | Args: 74 | inputs: A tensor of size [batch, height, width, channels]. 75 | depth: The depth of the ResNet unit output. 76 | depth_bottleneck: The depth of the bottleneck layers. 77 | stride: The ResNet unit's stride. Determines the amount of downsampling of 78 | the units output compared to its input. 79 | rate: An integer, rate for atrous convolution. 80 | outputs_collections: Collection to add the ResNet unit output. 81 | scope: Optional variable_scope. 82 | 83 | Returns: 84 | The ResNet unit's output. 85 | """ 86 | with tf.variable_scope(scope, 'bottleneck_v2', [inputs]) as sc: 87 | depth_in = slim.utils.last_dimension(inputs.get_shape(), min_rank=4) 88 | preact = slim.batch_norm(inputs, activation_fn=tf.nn.relu, scope='preact') 89 | if depth == depth_in: 90 | shortcut = resnet_utils.subsample(inputs, stride, 'shortcut') 91 | else: 92 | shortcut = slim.conv2d(preact, depth, [1, 1], stride=stride, 93 | normalizer_fn=None, activation_fn=None, 94 | scope='shortcut', trainable=trainable) 95 | 96 | residual = slim.conv2d(preact, depth_bottleneck, [1, 1], stride=1, 97 | scope='conv1', trainable=trainable) 98 | residual = resnet_utils.conv2d_same(residual, depth_bottleneck, 3, stride, 99 | rate=rate, scope='conv2', trainable=trainable) 100 | residual = slim.conv2d(residual, depth, [1, 1], stride=1, 101 | normalizer_fn=None, activation_fn=None, 102 | scope='conv3', trainable=trainable) 103 | 104 | output = shortcut + residual 105 | 106 | return slim.utils.collect_named_outputs(outputs_collections, 107 | sc.name, 108 | output) 109 | 110 | 111 | def resnet_v2(inputs, 112 | blocks, 113 | num_classes=None, 114 | multi_grid=None, 115 | is_training=True, 116 | global_pool=True, 117 | output_stride=None, 118 | include_root_block=True, 119 | spatial_squeeze=True, 120 | reuse=None, 121 | scope=None): 122 | """Generator for v2 (preactivation) ResNet models. 123 | 124 | This function generates a family of ResNet v2 models. See the resnet_v2_*() 125 | methods for specific model instantiations, obtained by selecting different 126 | block instantiations that produce ResNets of various depths. 127 | 128 | Training for image classification on Imagenet is usually done with [224, 224] 129 | inputs, resulting in [7, 7] feature maps at the output of the last ResNet 130 | block for the ResNets defined in [1] that have nominal stride equal to 32. 131 | However, for dense prediction tasks we advise that one uses inputs with 132 | spatial dimensions that are multiples of 32 plus 1, e.g., [321, 321]. In 133 | this case the feature maps at the ResNet output will have spatial shape 134 | [(height - 1) / output_stride + 1, (width - 1) / output_stride + 1] 135 | and corners exactly aligned with the input image corners, which greatly 136 | facilitates alignment of the features to the image. Using as input [225, 225] 137 | images results in [8, 8] feature maps at the output of the last ResNet block. 138 | 139 | For dense prediction tasks, the ResNet needs to run in fully-convolutional 140 | (FCN) mode and global_pool needs to be set to False. The ResNets in [1, 2] all 141 | have nominal stride equal to 32 and a good choice in FCN mode is to use 142 | output_stride=16 in order to increase the density of the computed features at 143 | small computational and memory overhead, cf. http://arxiv.org/abs/1606.00915. 144 | 145 | Args: 146 | inputs: A tensor of size [batch, height_in, width_in, channels]. 147 | blocks: A list of length equal to the number of ResNet blocks. Each element 148 | is a resnet_utils.Block object describing the units in the block. 149 | num_classes: Number of predicted classes for classification tasks. 150 | If 0 or None, we return the features before the logit layer. 151 | is_training: whether batch_norm layers are in training mode. 152 | global_pool: If True, we perform global average pooling before computing the 153 | logits. Set to True for image classification, False for dense prediction. 154 | output_stride: If None, then the output will be computed at the nominal 155 | network stride. If output_stride is not None, it specifies the requested 156 | ratio of input to output spatial resolution. 157 | include_root_block: If True, include the initial convolution followed by 158 | max-pooling, if False excludes it. If excluded, `inputs` should be the 159 | results of an activation-less convolution. 160 | spatial_squeeze: if True, logits is of shape [B, C], if false logits is 161 | of shape [B, 1, 1, C], where B is batch_size and C is number of classes. 162 | To use this parameter, the input images must be smaller than 300x300 163 | pixels, in which case the output logit layer does not contain spatial 164 | information and can be removed. 165 | reuse: whether or not the network and its variables should be reused. To be 166 | able to reuse 'scope' must be given. 167 | scope: Optional variable_scope. 168 | 169 | 170 | Returns: 171 | net: A rank-4 tensor of size [batch, height_out, width_out, channels_out]. 172 | If global_pool is False, then height_out and width_out are reduced by a 173 | factor of output_stride compared to the respective height_in and width_in, 174 | else both height_out and width_out equal one. If num_classes is 0 or None, 175 | then net is the output of the last ResNet block, potentially after global 176 | average pooling. If num_classes is a non-zero integer, net contains the 177 | pre-softmax activations. 178 | end_points: A dictionary from components of the network to the corresponding 179 | activation. 180 | 181 | Raises: 182 | ValueError: If the target output_stride is not valid. 183 | """ 184 | with tf.variable_scope(scope, 'resnet_v2', [inputs], reuse=reuse) as sc: 185 | end_points_collection = sc.original_name_scope + '_end_points' 186 | with slim.arg_scope([slim.conv2d, bottleneck, 187 | resnet_utils.stack_blocks_dense], 188 | outputs_collections=end_points_collection): 189 | 190 | net = inputs 191 | if include_root_block: 192 | if output_stride is not None: 193 | if output_stride % 4 != 0: 194 | raise ValueError('The output_stride needs to be a multiple of 4.') 195 | output_stride /= 4 196 | # We do not include batch normalization or activation functions in 197 | # conv1 because the first ResNet unit will perform these. Cf. 198 | # Appendix of [2]. 199 | with slim.arg_scope([slim.conv2d], 200 | activation_fn=None, normalizer_fn=None): 201 | net = resnet_utils.conv2d_same(net, 64, 7, stride=2, scope='conv1', trainable=False) 202 | net = slim.max_pool2d(net, [3, 3], stride=2, scope='pool1') 203 | net = resnet_utils.stack_blocks_dense(net, blocks, multi_grid, output_stride) 204 | # This is needed because the pre-activation variant does not have batch 205 | # normalization or activation functions in the residual unit output. See 206 | # Appendix of [2]. 207 | net = slim.batch_norm(net, activation_fn=tf.nn.relu, scope='postnorm') 208 | # Convert end_points_collection into a dictionary of end_points. 209 | end_points = slim.utils.convert_collection_to_dict( 210 | end_points_collection) 211 | 212 | if global_pool: 213 | # Global average pooling. 214 | net = tf.reduce_mean(net, [1, 2], name='pool5', keep_dims=True) 215 | end_points['global_pool'] = net 216 | if num_classes is not None: 217 | net = slim.conv2d(net, num_classes, [1, 1], activation_fn=None, 218 | normalizer_fn=None, scope='logits') 219 | end_points[sc.name + '/logits'] = net 220 | if spatial_squeeze: 221 | net = tf.squeeze(net, [1, 2], name='SpatialSqueeze') 222 | end_points[sc.name + '/spatial_squeeze'] = net 223 | end_points['predictions'] = slim.softmax(net, scope='predictions') 224 | 225 | 226 | return net, end_points 227 | 228 | 229 | resnet_v2.default_image_size = 224 230 | 231 | 232 | def resnet_v2_block(scope, base_depth, num_units, stride): 233 | """Helper function for creating a resnet_v2 bottleneck block. 234 | 235 | Args: 236 | scope: The scope of the block. 237 | base_depth: The depth of the bottleneck layer for each unit. 238 | num_units: The number of units in the block. 239 | stride: The stride of the block, implemented as a stride in the last unit. 240 | All other units have stride=1. 241 | 242 | Returns: 243 | A resnet_v2 bottleneck block. 244 | """ 245 | return resnet_utils.Block(scope, bottleneck, [{ 246 | 'depth': base_depth * 4, 247 | 'depth_bottleneck': base_depth, 248 | 'stride': 1 249 | }] * (num_units - 1) + [{ 250 | 'depth': base_depth * 4, 251 | 'depth_bottleneck': base_depth, 252 | 'stride': stride 253 | }]) 254 | 255 | 256 | resnet_v2.default_image_size = 224 257 | 258 | 259 | def resnet_v2_50(inputs, 260 | num_classes=None, 261 | is_training=True, 262 | multi_grid=[1, 2, 4], 263 | global_pool=True, 264 | output_stride=None, 265 | spatial_squeeze=True, 266 | reuse=None, 267 | scope='resnet_v2_50'): 268 | """ResNet-50 model of [1]. See resnet_v2() for arg and return description.""" 269 | blocks = [ 270 | resnet_v2_block('block1', base_depth=64, num_units=3, stride=2), 271 | resnet_v2_block('block2', base_depth=128, num_units=4, stride=2), 272 | resnet_v2_block('block3', base_depth=256, num_units=6, stride=2), 273 | resnet_v2_block('block4', base_depth=512, num_units=3, stride=1), 274 | ] 275 | return resnet_v2(inputs, blocks, num_classes, is_training=is_training, 276 | global_pool=global_pool, output_stride=output_stride, multi_grid=multi_grid, 277 | include_root_block=True, spatial_squeeze=spatial_squeeze, 278 | reuse=reuse, scope=scope) 279 | 280 | 281 | resnet_v2_50.default_image_size = resnet_v2.default_image_size 282 | 283 | 284 | def resnet_v2_101(inputs, 285 | num_classes=None, 286 | is_training=True, 287 | multi_grid=[1, 2, 4], 288 | global_pool=True, 289 | output_stride=None, 290 | spatial_squeeze=True, 291 | reuse=None, 292 | scope='resnet_v2_101'): 293 | """ResNet-101 model of [1]. See resnet_v2() for arg and return description.""" 294 | blocks = [ 295 | resnet_v2_block('block1', base_depth=64, num_units=3, stride=2), 296 | resnet_v2_block('block2', base_depth=128, num_units=4, stride=2), 297 | resnet_v2_block('block3', base_depth=256, num_units=23, stride=2), 298 | resnet_v2_block('block4', base_depth=512, num_units=3, stride=1), 299 | ] 300 | return resnet_v2(inputs, blocks, num_classes, is_training=is_training, 301 | global_pool=global_pool, output_stride=output_stride, multi_grid=multi_grid, 302 | include_root_block=True, spatial_squeeze=spatial_squeeze, 303 | reuse=reuse, scope=scope) 304 | 305 | 306 | resnet_v2_101.default_image_size = resnet_v2.default_image_size 307 | 308 | 309 | def resnet_v2_152(inputs, 310 | num_classes=None, 311 | is_training=True, 312 | multi_grid=[1, 2, 4], 313 | global_pool=True, 314 | output_stride=None, 315 | spatial_squeeze=True, 316 | reuse=None, 317 | scope='resnet_v2_152'): 318 | """ResNet-152 model of [1]. See resnet_v2() for arg and return description.""" 319 | blocks = [ 320 | resnet_v2_block('block1', base_depth=64, num_units=3, stride=2), 321 | resnet_v2_block('block2', base_depth=128, num_units=8, stride=2), 322 | resnet_v2_block('block3', base_depth=256, num_units=36, stride=2), 323 | resnet_v2_block('block4', base_depth=512, num_units=3, stride=1), 324 | ] 325 | return resnet_v2(inputs, blocks, num_classes, is_training=is_training, 326 | global_pool=global_pool, output_stride=output_stride, multi_grid=multi_grid, 327 | include_root_block=True, spatial_squeeze=spatial_squeeze, 328 | reuse=reuse, scope=scope) 329 | 330 | 331 | resnet_v2_152.default_image_size = resnet_v2.default_image_size 332 | 333 | 334 | def resnet_v2_200(inputs, 335 | num_classes=None, 336 | is_training=True, 337 | multi_grid=[1, 2, 4], 338 | global_pool=True, 339 | output_stride=None, 340 | spatial_squeeze=True, 341 | reuse=None, 342 | scope='resnet_v2_200'): 343 | """ResNet-200 model of [2]. See resnet_v2() for arg and return description.""" 344 | blocks = [ 345 | resnet_v2_block('block1', base_depth=64, num_units=3, stride=2), 346 | resnet_v2_block('block2', base_depth=128, num_units=24, stride=2), 347 | resnet_v2_block('block3', base_depth=256, num_units=36, stride=2), 348 | resnet_v2_block('block4', base_depth=512, num_units=3, stride=1), 349 | ] 350 | return resnet_v2(inputs, blocks, num_classes, is_training=is_training, 351 | global_pool=global_pool, output_stride=output_stride, multi_grid=multi_grid, 352 | include_root_block=True, spatial_squeeze=spatial_squeeze, 353 | reuse=reuse, scope=scope) 354 | 355 | 356 | resnet_v2_200.default_image_size = resnet_v2.default_image_size 357 | 358 | -------------------------------------------------------------------------------- /nets/vgg.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Contains model definitions for versions of the Oxford VGG network. 16 | 17 | These model definitions were introduced in the following technical report: 18 | 19 | Very Deep Convolutional Networks For Large-Scale Image Recognition 20 | Karen Simonyan and Andrew Zisserman 21 | arXiv technical report, 2015 22 | PDF: http://arxiv.org/pdf/1409.1556.pdf 23 | ILSVRC 2014 Slides: http://www.robots.ox.ac.uk/~karen/pdf/ILSVRC_2014.pdf 24 | CC-BY-4.0 25 | 26 | More information can be obtained from the VGG website: 27 | www.robots.ox.ac.uk/~vgg/research/very_deep/ 28 | 29 | Usage: 30 | with slim.arg_scope(vgg.vgg_arg_scope()): 31 | outputs, end_points = vgg.vgg_a(inputs) 32 | 33 | with slim.arg_scope(vgg.vgg_arg_scope()): 34 | outputs, end_points = vgg.vgg_16(inputs) 35 | 36 | @@vgg_a 37 | @@vgg_16 38 | @@vgg_19 39 | """ 40 | from __future__ import absolute_import 41 | from __future__ import division 42 | from __future__ import print_function 43 | 44 | import tensorflow as tf 45 | 46 | slim = tf.contrib.slim 47 | 48 | 49 | def vgg_arg_scope(weight_decay=0.0005, 50 | is_training=True, 51 | batch_norm_decay=0.997, 52 | batch_norm_epsilon=1e-5, 53 | batch_norm_scale=True, 54 | activation_fn=tf.nn.relu, 55 | use_batch_norm=True): 56 | """Defines the VGG arg scope. 57 | 58 | Args: 59 | weight_decay: The l2 regularization coefficient. 60 | 61 | Returns: 62 | An arg_scope. 63 | """ 64 | batch_norm_params = { 65 | 'decay': batch_norm_decay, 66 | 'epsilon': batch_norm_epsilon, 67 | 'scale': batch_norm_scale, 68 | 'updates_collections': tf.GraphKeys.UPDATE_OPS, 69 | 'is_training': is_training, 70 | 'fused': True, # Use fused batch norm if possible. 71 | } 72 | 73 | with slim.arg_scope( 74 | [slim.conv2d], 75 | weights_regularizer=slim.l2_regularizer(weight_decay), 76 | weights_initializer=slim.variance_scaling_initializer(), 77 | biases_initializer=tf.zeros_initializer(), 78 | activation_fn=activation_fn, 79 | normalizer_fn=slim.batch_norm if use_batch_norm else None, 80 | normalizer_params=batch_norm_params): 81 | with slim.arg_scope([slim.batch_norm], **batch_norm_params): 82 | with slim.arg_scope([slim.conv2d, slim.max_pool2d], padding='SAME') as arg_sc: 83 | return arg_sc 84 | 85 | 86 | def vgg_16(inputs, 87 | output_stride=None, 88 | multi_grid = [1, 2, 4], 89 | scope='vgg_16', 90 | reuse=None): 91 | """Oxford Net VGG 16-Layers version D Example. 92 | 93 | Note: All the fully_connected layers have been transformed to conv2d layers. 94 | To use in classification mode, resize input to 224x224. 95 | 96 | Args: 97 | inputs: a tensor of size [batch_size, height, width, channels]. 98 | num_classes: number of predicted classes. If 0 or None, the logits layer is 99 | omitted and the input features to the logits layer are returned instead. 100 | is_training: whether or not the model is being trained. 101 | dropout_keep_prob: the probability that activations are kept in the dropout 102 | layers during training. 103 | spatial_squeeze: whether or not should squeeze the spatial dimensions of the 104 | outputs. Useful to remove unnecessary dimensions for classification. 105 | scope: Optional scope for the variables. 106 | fc_conv_padding: the type of padding to use for the fully connected layer 107 | that is implemented as a convolutional layer. Use 'SAME' padding if you 108 | are applying the network in a fully convolutional manner and want to 109 | get a prediction map downsampled by a factor of 32 as an output. 110 | Otherwise, the output prediction map will be (input / 32) - 6 in case of 111 | 'VALID' padding. 112 | global_pool: Optional boolean flag. If True, the input to the classification 113 | layer is avgpooled to size 1x1, for any input size. (This is not part 114 | of the original VGG architecture.) 115 | 116 | Returns: 117 | net: the output of the logits layer (if num_classes is a non-zero integer), 118 | or the input to the logits layer (if num_classes is 0 or None). 119 | end_points: a dict of tensors with intermediate activations. 120 | """ 121 | 122 | # The current_stride variable keeps track of the effective stride of the 123 | # activations. This allows us to invoke atrous convolution whenever applying 124 | # the next residual unit would result in the activations having stride larger 125 | # than the target output_stride. 126 | current_stride = 1 127 | 128 | # The atrous convolution rate parameter. 129 | rate = 1 130 | 131 | with tf.variable_scope(scope, 'vgg_16', [inputs], reuse=reuse) as sc: 132 | end_points_collection = sc.original_name_scope + '_end_points' 133 | # Collect outputs for conv2d, fully_connected and max_pool2d. 134 | with slim.arg_scope([slim.conv2d], 135 | outputs_collections=end_points_collection): 136 | net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], rate=rate, scope='conv1', trainable=False) 137 | if output_stride is None or current_stride < output_stride: 138 | net = slim.max_pool2d(net, [3, 3], padding='SAME', scope='pool1') 139 | net = slim.utils.collect_named_outputs(end_points_collection, sc.name + '/pool1', net) 140 | current_stride *=2 141 | else: 142 | rate *=2 143 | 144 | net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], rate=rate, scope='conv2', trainable=False) 145 | if output_stride is None or current_stride < output_stride: 146 | net = slim.max_pool2d(net, [3, 3], padding='SAME', scope='pool2') 147 | net = slim.utils.collect_named_outputs(end_points_collection, sc.name + '/pool2', net) 148 | current_stride *=2 149 | else: 150 | rate *=2 151 | 152 | net = slim.repeat(net, 3, slim.conv2d, 256, [3, 3], rate=rate, scope='conv3') 153 | if output_stride is None or current_stride < output_stride: 154 | net = slim.max_pool2d(net, [3, 3], padding='SAME', scope='pool3') 155 | net = slim.utils.collect_named_outputs(end_points_collection, sc.name + '/pool3', net) 156 | current_stride *=2 157 | else: 158 | rate *=2 159 | 160 | net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], rate=rate, scope='conv4') 161 | if output_stride is None or current_stride < output_stride: 162 | net = slim.max_pool2d(net, [3, 3], padding='SAME', scope='pool4') 163 | net = slim.utils.collect_named_outputs(end_points_collection, sc.name+ '/pool4', net) 164 | current_stride *=2 165 | else: 166 | rate *=2 167 | #net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv5') 168 | 169 | net = slim.conv2d(net, 512, [3, 3], rate=rate*multi_grid[0], scope='conv5/conv5_1') 170 | net = slim.conv2d(net, 512, [3, 3], rate=rate*multi_grid[1], scope='conv5/conv5_2') 171 | net = slim.conv2d(net, 512, [3, 3], rate=rate*multi_grid[2], scope='conv5/conv5_3') 172 | 173 | # Convert end_points_collection into a end_point dict. 174 | end_points = slim.utils.convert_collection_to_dict(end_points_collection) 175 | 176 | return net, end_points 177 | 178 | 179 | vgg_16.default_image_size = 224 180 | vgg_d = vgg_16 181 | 182 | 183 | -------------------------------------------------------------------------------- /network.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | slim = tf.contrib.slim 3 | from nets import resnet_v2, resnet_utils, vgg 4 | 5 | # ImageNet mean statistics 6 | _R_MEAN = 123.68 7 | _G_MEAN = 116.78 8 | _B_MEAN = 103.94 9 | 10 | 11 | def mdr_arg_scope(weight_decay=0.0001, 12 | is_training=True, 13 | batch_norm_decay=0.997, 14 | batch_norm_epsilon=1e-5, 15 | batch_norm_scale=True, 16 | activation_fn=tf.nn.relu, 17 | use_batch_norm=True): 18 | 19 | batch_norm_params = { 20 | 'decay': batch_norm_decay, 21 | 'epsilon': batch_norm_epsilon, 22 | 'scale': batch_norm_scale, 23 | 'updates_collections': tf.GraphKeys.UPDATE_OPS, 24 | 'is_training': is_training, 25 | 'fused': True, # Use fused batch norm if possible. 26 | } 27 | 28 | with slim.arg_scope( 29 | [slim.conv2d, slim.separable_conv2d], 30 | weights_regularizer=slim.l2_regularizer(weight_decay), 31 | weights_initializer=slim.variance_scaling_initializer(), 32 | activation_fn=activation_fn, 33 | normalizer_fn=slim.batch_norm if use_batch_norm else None, 34 | normalizer_params=batch_norm_params): 35 | with slim.arg_scope([slim.batch_norm], **batch_norm_params): 36 | with slim.arg_scope([slim.max_pool2d], padding='SAME') as arg_sc: 37 | return arg_sc 38 | 39 | 40 | @slim.add_arg_scope 41 | def atrous_spatial_pyramid_pooling(net, scope, is_training, fea_map_size, aspp_rates=[6,12,18], depth=256, reuse=None): 42 | """ 43 | ASPP consists of (a) one 1×1 convolution and three 3×3 convolutions with rates = (6, 12, 18) when output stride = 16 44 | (all with 256 filters and batch normalization), and (b) the image-level features as described in https://arxiv.org/abs/1706.05587 45 | :param net: tensor of shape [BATCH_SIZE, WIDTH, HEIGHT, DEPTH] 46 | :param scope: scope name of the aspp layer 47 | :return: network layer with aspp applyed to it. 48 | """ 49 | with tf.variable_scope(scope, reuse=reuse): 50 | 51 | 52 | # apply global average pooling 53 | image_level_features = tf.reduce_mean(net, [1, 2], name='image_level_global_pool', keepdims=True) 54 | image_level_features = slim.conv2d(image_level_features, depth, [1, 1], scope="image_level_conv_1x1") 55 | image_level_features = tf.image.resize_bilinear(image_level_features, 56 | (fea_map_size[0], fea_map_size[1]), 57 | align_corners=True) 58 | 59 | 60 | at_pool1x1 = slim.conv2d(net, depth, [1, 1], scope="conv_1x1_0_0") 61 | at_pool3x3_1 = slim.conv2d(net, depth, [3, 3], scope="conv_3x3_1_0", rate=aspp_rates[0]) 62 | at_pool3x3_2 = slim.conv2d(net, depth, [3, 3], scope="conv_3x3_2_0", rate=aspp_rates[1]) 63 | at_pool3x3_3 = slim.conv2d(net, depth, [3, 3], scope="conv_3x3_3_0", rate=aspp_rates[2]) 64 | 65 | net = tf.concat((image_level_features, at_pool1x1, at_pool3x3_1, at_pool3x3_2, at_pool3x3_3), 66 | axis=3, 67 | name="concat") 68 | net = slim.conv2d(net, depth, [1, 1], scope="conv_1x1_output") 69 | 70 | return net 71 | 72 | 73 | def mdr_net(inputs, args, image_size, is_training, reuse): 74 | 75 | # mean subtraction normalization 76 | inputs = inputs - [_R_MEAN, _G_MEAN, _B_MEAN] 77 | 78 | if "resnet" in args.cnn_model: 79 | # inputs has shape - Original: [batch, height, width, 3] 80 | with slim.arg_scope(resnet_utils.resnet_arg_scope(args.l2_regularizer, 81 | is_training, 82 | args.batch_norm_decay, 83 | args.batch_norm_epsilon)): 84 | resnet = getattr(resnet_v2, args.cnn_model) 85 | net, end_points = resnet(inputs, 86 | multi_grid=args.multi_grid, 87 | output_stride=args.output_stride, 88 | global_pool=False, 89 | num_classes=None, 90 | reuse=reuse) 91 | lower_level_features = end_points[args.cnn_model + '/block1/unit_3/bottleneck_v2/conv1'] 92 | # low_level_features = end_points[args.cnn_model + 'block1/unit_2/bottleneck_v1/conv3'] 93 | elif "vgg" in args.cnn_model: 94 | with slim.arg_scope(vgg.vgg_arg_scope(args.l2_regularizer, 95 | is_training, 96 | args.batch_norm_decay, 97 | args.batch_norm_epsilon)): 98 | net, end_points = vgg.vgg_16(inputs, 99 | multi_grid=args.multi_grid, 100 | output_stride=args.output_stride, 101 | reuse=reuse) 102 | lower_level_features = end_points[args.cnn_model + '/pool2'] 103 | else: 104 | raise NameError("cnn_model must contain resnet or vgg!") 105 | 106 | feature_map_size = [int((sz-1)/args.output_stride+1) for sz in image_size] 107 | 108 | arg_sc = mdr_arg_scope(args.l2_regularizer, 109 | is_training, 110 | args.batch_norm_decay, 111 | args.batch_norm_epsilon) 112 | 113 | with slim.arg_scope(arg_sc): 114 | with tf.variable_scope("MDR_Net", reuse=reuse): 115 | 116 | encoder_output = atrous_spatial_pyramid_pooling(net, 117 | "ASPP_layer", 118 | is_training, 119 | feature_map_size, 120 | args.aspp_rates, 121 | depth=256, 122 | reuse=reuse) 123 | 124 | with tf.variable_scope("decoder", reuse=reuse): 125 | decoder_depth_1 = 256 126 | if args.decoding_at_image_size: 127 | decoder_depth_2 = 16 128 | else: 129 | decoder_depth_2 = 1 130 | lower_level_feature_depth = 48 131 | with tf.variable_scope("lower_level_features"): 132 | lower_level_features = slim.conv2d(lower_level_features, 133 | lower_level_feature_depth, 134 | [1, 1], 135 | stride=1, 136 | scope='conv_1x1') 137 | lower_level_features_size = tf.shape(lower_level_features)[1:3] 138 | with tf.variable_scope("upsampling_logits_1"): 139 | net = tf.image.resize_bilinear(encoder_output, 140 | lower_level_features_size, 141 | name='upsample_1', 142 | align_corners=True) 143 | net = tf.concat([net, lower_level_features], axis=3, name='concat') 144 | 145 | num_convs = 2 146 | decoder_features = slim.repeat(net, 147 | num_convs, 148 | slim.conv2d, 149 | decoder_depth_1, 150 | 3, 151 | scope='decoder_conv1') 152 | 153 | 154 | with tf.variable_scope("upsampling_logits_2"): 155 | if args.decoding_at_image_size: 156 | decoder_features = slim.conv2d(decoder_features, 157 | decoder_depth_2, 158 | [3, 3], 159 | scope='decoder_conv2') 160 | net = tf.image.resize_bilinear(decoder_features, 161 | image_size, 162 | name='upsample_2', 163 | align_corners=True) 164 | logits = slim.conv2d(net, 165 | 1, 166 | [1, 1], 167 | activation_fn=None, 168 | normalizer_fn=None, 169 | scope='conv_logits') 170 | else: 171 | decoder_features = slim.conv2d(decoder_features, 172 | decoder_depth_2, 173 | [1, 1], 174 | activation_fn=None, 175 | normalizer_fn=None, 176 | scope='conv_logits') 177 | logits = tf.image.resize_bilinear(decoder_features, 178 | image_size, 179 | name='upsample_2', 180 | align_corners=True) 181 | return logits 182 | 183 | -------------------------------------------------------------------------------- /test_nyu_depth_v2.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | # only keep warnings and errors 4 | import os 5 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1' 6 | import json 7 | import numpy as np 8 | import argparse 9 | import time 10 | import tensorflow as tf 11 | slim = tf.contrib.slim 12 | from dataloader import MDEdataloader 13 | import network 14 | import cv2 15 | from metrics import compute_metrics_for_single_map 16 | 17 | 18 | parser = argparse.ArgumentParser(description="Evaluation code for nyu_depth_v2 of " 19 | "Regression for Depth for Monocular Depth Prediction.") 20 | parser.add_argument("--output_stride", 21 | type=int, 22 | default=16, 23 | help="output stride for testing.") 24 | ######################################################################################################################## 25 | # checkpoint path to restore trained model from, and the process id of the model 26 | parser.add_argument("--checkpoint_path", 27 | type=str, 28 | default="./checkpoint/", 29 | help="path to a specific checkpoint to load.") 30 | parser.add_argument("--process_id_for_evaluation", 31 | type=int, 32 | default=12582, 33 | help="process_id_of_the_trained_model.") 34 | 35 | test_args = parser.parse_args() 36 | 37 | test_file = "./filenames/nyu_depth_v2_test_654.txt" 38 | dataset = "nyu_depth_v2" 39 | 40 | # eigen's crop for evaluation 41 | crop = [-460, -20, 25, 617] 42 | 43 | process_str_id = str(test_args.process_id_for_evaluation) 44 | CKPT_PATH = os.path.join(test_args.checkpoint_path, dataset, process_str_id) 45 | result_path = os.path.join(CKPT_PATH, 'result/') 46 | if not os.path.exists(result_path): 47 | print("CheckPoint folder:", result_path) 48 | os.makedirs(result_path) 49 | os.makedirs(result_path+'/preds') 50 | 51 | with open(CKPT_PATH+'/args'+process_str_id+'.json', 'r') as fp: 52 | train_args = json.load(fp) 53 | 54 | 55 | class Dotdict(dict): 56 | """dot.notation access to dictionary attributes""" 57 | __getattr__ = dict.get 58 | __setattr__ = dict.__setitem__ 59 | __delattr__ = dict.__delitem__ 60 | 61 | train_args = Dotdict(train_args) 62 | 63 | train_args.aspp_rates = list(map(lambda r: int(r*train_args.output_stride/test_args.output_stride), 64 | train_args.aspp_rates)) 65 | train_args.output_stride = test_args.output_stride 66 | 67 | 68 | with tf.Graph().as_default(): 69 | # Dataloader 70 | dataloader = MDEdataloader(dataset, 71 | 1, 1, 1, 72 | test_file=test_file) 73 | 74 | test_dataset = dataloader.test_dataset 75 | print("total number of test samples: {}".format(dataloader.num_test_samples)) 76 | 77 | iterator = test_dataset.make_one_shot_iterator() 78 | image_ts, depth_ts, im_size_ts = iterator.get_next() 79 | 80 | logits_ts = network.mdr_net(image_ts, 81 | train_args, 82 | [dataloader.INPUT_HEIGHT, dataloader.INPUT_WIDTH], 83 | is_training=False, 84 | reuse=False) 85 | 86 | pd_normlized_depth_ts = tf.nn.sigmoid(logits_ts) 87 | pd_depth_map_ts = dataloader.MAX_DEPTH * pd_normlized_depth_ts 88 | 89 | variables_to_restore = slim.get_variables_to_restore() 90 | restorer = tf.train.Saver(variables_to_restore) 91 | with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess: 92 | sess.run(tf.local_variables_initializer()) 93 | sess.run(tf.global_variables_initializer()) 94 | ckpt = tf.train.get_checkpoint_state(CKPT_PATH + '/') 95 | if ckpt and ckpt.model_checkpoint_path: 96 | restorer.restore(sess, ckpt.model_checkpoint_path) 97 | else: 98 | raise NameError("Trained model with process id " + process_str_id + "may not exist") 99 | print("Model checkpoits for process id " + process_str_id + " restored!") 100 | 101 | times = np.zeros(dataloader.num_test_samples, np.float32) 102 | 103 | ABS_REL = np.zeros(dataloader.num_test_samples, np.float32) 104 | SQ_REL = np.zeros(dataloader.num_test_samples, np.float32) 105 | RMSE = np.zeros(dataloader.num_test_samples, np.float32) 106 | RMSE_log = np.zeros(dataloader.num_test_samples, np.float32) 107 | Log10 = np.zeros(dataloader.num_test_samples, np.float32) 108 | ACC1 = np.zeros(dataloader.num_test_samples, np.float32) 109 | ACC2 = np.zeros(dataloader.num_test_samples, np.float32) 110 | ACC3 = np.zeros(dataloader.num_test_samples, np.float32) 111 | 112 | for step in range(0, dataloader.num_test_samples): 113 | start_time = time.time() 114 | gt_depth, im_size, depth_map = sess.run([depth_ts, 115 | im_size_ts, 116 | pd_depth_map_ts]) 117 | times[step] = time.time() - start_time 118 | 119 | 120 | im_size = im_size[0] 121 | resized_depth_map = cv2.resize(depth_map[0], 122 | (im_size[1], im_size[0]), 123 | interpolation=cv2.INTER_LINEAR) 124 | saved_depth_map = resized_depth_map*256 125 | saved_depth_map = saved_depth_map.astype(np.uint16) 126 | cv2.imwrite(result_path + '/preds/pred_' + '%03d'%(step + 1) + '.png', saved_depth_map) 127 | 128 | 129 | ABS_REL[step], SQ_REL[step], RMSE[step], RMSE_log[step], \ 130 | Log10[step], ACC1[step], ACC2[step], ACC3[step] \ 131 | = compute_metrics_for_single_map(gt_depth[0, crop[0]:crop[1], crop[2]:crop[3], 0], 132 | resized_depth_map[crop[0]:crop[1], crop[2]:crop[3]], 133 | cap=10) 134 | 135 | print_string = 's/example: {:2.4f} \n |abs_rel: {:2.3f} |sq_rel: {:2.3f} ' \ 136 | '|rmse: {:2.3f} |rmse_log: {:2.3f} |log10: {:2.3f} ' \ 137 | '|acc1: {:.3f} |acc2: {:.3f} |acc3: {:.3f}' 138 | print(print_string.format(times.mean(), 139 | ABS_REL.mean(), 140 | SQ_REL.mean(), 141 | RMSE.mean(), 142 | RMSE_log.mean(), 143 | Log10.mean(), 144 | ACC1.mean(), 145 | ACC2.mean(), 146 | ACC3.mean())) 147 | 148 | f = open(result_path+'metrics.txt', 'w+') 149 | print(print_string.format(times.mean(), 150 | ABS_REL.mean(), 151 | SQ_REL.mean(), 152 | RMSE.mean(), 153 | RMSE_log.mean(), 154 | Log10.mean(), 155 | ACC1.mean(), 156 | ACC2.mean(), 157 | ACC3.mean()), file=f) 158 | f.close() 159 | 160 | 161 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | # only keep warnings and errors 4 | import os 5 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1' 6 | import json 7 | import numpy as np 8 | import argparse 9 | import time 10 | import tensorflow as tf 11 | slim = tf.contrib.slim 12 | from dataloader import MDEdataloader 13 | from utils import download_cnn_checkpoint_if_necessary, assign_to_device, average_gradients 14 | import network 15 | from losses import compute_depth_loss, compute_gradient_loss 16 | from metrics import compute_metrics_for_multi_maps 17 | 18 | parser = argparse.ArgumentParser(description="Monocular Depth Prediction Using Deeplab V3 +.") 19 | ######################################################################################################################## 20 | # dataset arguments 21 | parser.add_argument("--dataset", 22 | type=str, 23 | default="nyu_depth_v2", 24 | help="dataset to train on nyu_depth_v2.") 25 | parser.add_argument("--train_file", 26 | type=str, 27 | default="./filenames/nyu_depth_v2_train_even.txt", 28 | help="path to the train filenames text file.") 29 | parser.add_argument("--val_file", 30 | type=str, 31 | default="./filenames/nyu_depth_v2_val_2k.txt", 32 | help="path to the validation filenames text file.") 33 | 34 | ######################################################################################################################## 35 | # network settings 36 | parser.add_argument("--cnn_model", 37 | type=str, 38 | default="resnet_v2_50", 39 | choices=["resnet_v2_50", "resnet_v2_101", "vgg_16"], 40 | help="CNN model to use as feature extractor. Choose one of resnet or vgg variants.") 41 | parser.add_argument("--decoding_at_image_size", 42 | action='store_true', 43 | default=False, 44 | help="Using original image in decoder.") 45 | parser.add_argument("--output_stride", 46 | type=int, 47 | default=16, 48 | help="Total output stride") 49 | parser.add_argument("--multi_grid", 50 | nargs="+", 51 | type=int, 52 | default=[1, 2, 4], 53 | help="Multi grid atrous rates") 54 | parser.add_argument("--aspp_rates", 55 | nargs="+", 56 | type=int, 57 | default=[4, 8, 12], 58 | help="Atrous Spatial Pyramid Pooling rates.") 59 | 60 | ######################################################################################################################## 61 | # regression loss argument 62 | parser.add_argument("--loss_depth_norm", 63 | type=str, 64 | default="berhu", 65 | help="norm for loss on depth, l1, l2 or berhu.") 66 | parser.add_argument("--using_log_depth", 67 | action='store_true', 68 | default=False, 69 | help="compute depth loss in log space.") 70 | 71 | parser.add_argument("--loss_gradient_magnitude_norm", 72 | type=str, 73 | default="l1", 74 | help="norm for loss on depth gradient magnitude, l1, l2 or berhu.") 75 | 76 | parser.add_argument("--using_log_gradient_magnitude", 77 | action='store_true', 78 | default=False, 79 | help="compute gradient magnitude loss in log space.") 80 | 81 | parser.add_argument("--loss_gradient_magnitude_weight", 82 | type=float, 83 | default=0.0, 84 | help="weight for loss on depth gradient magnitude.") 85 | 86 | parser.add_argument("--loss_gradient_direction_norm", 87 | type=str, 88 | default="l1", 89 | help="norm for loss on depth gradient direction, l1, l2 or berhu.") 90 | 91 | parser.add_argument("--loss_gradient_direction_weight", 92 | type=float, 93 | default=0.0, 94 | help="weight for loss on depth gradient direction.") 95 | 96 | parser.add_argument("--loss_normal_weight", 97 | type=float, 98 | default=0.0, 99 | help="weight for loss on depth normal.") 100 | 101 | 102 | ######################################################################################################################## 103 | # training hyperparameters 104 | parser.add_argument("--batch_size", 105 | type=int, 106 | default=8, 107 | help="batch size per GPU.") 108 | parser.add_argument("--num_epochs", 109 | type=int, 110 | default=20, 111 | help="number of epochs.") 112 | parser.add_argument("--learning_rate", 113 | type=float, 114 | default=1e-4, 115 | help="initial learning rate.") 116 | parser.add_argument("--num_gpus", 117 | type=int, 118 | default=1, 119 | help="number of GPUs to use for training.") 120 | parser.add_argument("--num_threads", 121 | type=int, 122 | default=4, 123 | help='number of threads to use for data loading.') 124 | parser.add_argument("--batch_norm_epsilon", 125 | type=float, 126 | default=1e-5, 127 | help="batch norm epsilon argument for batch normalization.") 128 | parser.add_argument('--batch_norm_decay', 129 | type=float, 130 | default=0.9997, 131 | help='batch norm decay argument for batch normalization.') 132 | parser.add_argument("--l2_regularizer", 133 | type=float, 134 | default=1e-4, 135 | help="l2 regularizer parameter.") 136 | 137 | ######################################################################################################################## 138 | # Imagenet-pretrained model checkpoint path, checkpoint path to save to and restore from, tensorboard log folder and 139 | # process id for further model refining 140 | parser.add_argument("--pretrained_ckpts_path", 141 | type=str, 142 | default="./nets/checkpoints/", 143 | help="path to imagenet-pretrained resnet checkpoint to load.") 144 | parser.add_argument("--checkpoint_path", 145 | type=str, 146 | default="./checkpoint/", 147 | help="path to a specific checkpoint to load.") 148 | parser.add_argument("--process_id_for_refining", 149 | type=int, 150 | default=None, 151 | help='process_id_for_further_refining_the_model.') 152 | 153 | args = parser.parse_args() 154 | 155 | 156 | 157 | def main(_): 158 | process_str_id = str(args.process_id_for_refining if args.process_id_for_refining else os.getpid()) 159 | CKPT_PATH = os.path.join(args.checkpoint_path, args.dataset, process_str_id) 160 | if not os.path.exists(CKPT_PATH): 161 | print("Checkpoint folder:", CKPT_PATH) 162 | os.makedirs(CKPT_PATH) 163 | with open(CKPT_PATH + '/args' + str(os.getpid()) +'.json', 'w') as fp: 164 | json.dump(args.__dict__, fp, sort_keys=True, indent=4) 165 | 166 | download_cnn_checkpoint_if_necessary(args.pretrained_ckpts_path, args.cnn_model) 167 | 168 | 169 | with tf.Graph().as_default(), tf.device('/cpu:0'): 170 | 171 | # Dataloader 172 | dataloader = MDEdataloader(args.dataset, 173 | args.num_threads, 174 | args.batch_size * args.num_gpus, 175 | args.num_epochs, 176 | train_file=args.train_file, 177 | val_file=args.val_file) 178 | training_dataset = dataloader.train_dataset 179 | validation_dataset = dataloader.val_dataset 180 | print("total number of samples: {}".format(dataloader.num_train_samples)) 181 | 182 | # A feedable iterator is defined by a handle placeholder and its structure. We could use the `output_types` and 183 | # `output_shapes` properties of either `training_dataset` or `validation_dataset` here, because they have 184 | # identical structure. 185 | handle = tf.placeholder(tf.string, shape=[]) 186 | 187 | iterator = tf.data.Iterator.from_string_handle(handle, 188 | training_dataset.output_types, 189 | training_dataset.output_shapes) 190 | batch_images_ts, batch_depth_ts, max_depth_ts = iterator.get_next() 191 | 192 | # You can use feedable iterators with a variety of different kinds of iterator 193 | # (such as one-shot and initializable iterators). 194 | training_iterator = training_dataset.make_initializable_iterator() 195 | validation_iterator = validation_dataset.make_initializable_iterator() 196 | 197 | is_training_ts = tf.placeholder(tf.bool, shape=[]) 198 | 199 | 200 | # OPTIMIZER 201 | steps_per_epoch = np.ceil(dataloader.num_train_samples/args.batch_size/args.num_gpus).astype(np.int32) 202 | num_total_steps = args.num_epochs * steps_per_epoch 203 | print("total number of steps: {}".format(num_total_steps)) 204 | start_learning_rate = args.learning_rate 205 | 206 | with tf.variable_scope("optimizer_vars"): 207 | global_step = tf.Variable(0, trainable=False) 208 | learning_rate = tf.train.exponential_decay(start_learning_rate, global_step, 10000, 0.9, staircase=True) 209 | optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate) 210 | 211 | tower_pd_depth_maps = [] 212 | tower_total_losses = [] 213 | tower_depth_losses = [] 214 | tower_grad_magni_losses = [] 215 | tower_grad_direc_losses = [] 216 | tower_normal_losses = [] 217 | 218 | tower_grads = [] 219 | reuse_vars = False 220 | for i in range(args.num_gpus): 221 | #with tf.device('/gpu:%d' % i): 222 | with tf.device(assign_to_device('/gpu:{}'.format(i), ps_device='/cpu:0')): 223 | logits_ts = tf.cond(is_training_ts, 224 | true_fn=lambda: network.mdr_net( 225 | batch_images_ts[i * args.batch_size: (i + 1)*args.batch_size], 226 | args, [dataloader.INPUT_HEIGHT, dataloader.INPUT_WIDTH], 227 | is_training=True, reuse=reuse_vars), 228 | false_fn=lambda: network.mdr_net( 229 | batch_images_ts[i * args.batch_size: (i + 1)*args.batch_size], 230 | args, [dataloader.INPUT_HEIGHT, dataloader.INPUT_WIDTH], 231 | is_training=False, 232 | reuse=True)) 233 | 234 | pd_normlized_depth_ts = tf.nn.sigmoid(logits_ts) 235 | pd_depth_maps = max_depth_ts[i * args.batch_size: (i + 1)*args.batch_size] * pd_normlized_depth_ts 236 | tower_pd_depth_maps.append(pd_depth_maps) 237 | 238 | loss_depth = compute_depth_loss(batch_depth_ts[i * args.batch_size: (i + 1)*args.batch_size], 239 | pd_depth_maps, 240 | args.loss_depth_norm, 241 | args.using_log_depth) 242 | tower_depth_losses.append(loss_depth) 243 | 244 | loss_grad_magni, loss_grad_direc, loss_normal = \ 245 | compute_gradient_loss(batch_depth_ts[i * args.batch_size: (i + 1) * args.batch_size], 246 | pd_depth_maps, 247 | args.using_log_gradient_magnitude, 248 | args.loss_gradient_magnitude_norm, 249 | args.loss_gradient_direction_norm) 250 | tower_grad_magni_losses.append(loss_grad_magni) 251 | tower_grad_direc_losses.append(loss_grad_direc) 252 | tower_normal_losses.append(loss_normal) 253 | 254 | 255 | 256 | regularization_loss = tf.add_n(tf.losses.get_regularization_losses()) 257 | loss = loss_depth + \ 258 | args.loss_gradient_magnitude_weight*loss_grad_magni + \ 259 | args.loss_gradient_direction_weight *loss_grad_direc + \ 260 | args.loss_normal_weight *loss_normal + \ 261 | regularization_loss 262 | tower_total_losses.append(loss) 263 | 264 | reuse_vars = True 265 | 266 | with tf.variable_scope("optimizer_vars"): 267 | grads = optimizer.compute_gradients(loss) 268 | tower_grads.append(grads) 269 | 270 | pd_depth_maps_ts = tf.concat(tower_pd_depth_maps, axis=0) 271 | 272 | 273 | depth_loss = tf.reduce_mean(tower_depth_losses) 274 | grad_magni_loss = tf.reduce_mean(tower_grad_magni_losses) 275 | grad_direc_loss = tf.reduce_mean(tower_grad_direc_losses) 276 | normal_loss = tf.reduce_mean(tower_normal_losses) 277 | 278 | mean_grads = average_gradients(tower_grads) 279 | 280 | update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) 281 | with tf.control_dependencies(update_ops): 282 | with tf.variable_scope("optimizer_vars"): 283 | train_step = optimizer.apply_gradients(mean_grads, global_step=global_step) 284 | 285 | 286 | # make summary 287 | tf.summary.scalar('depth_loss', depth_loss) 288 | tf.summary.scalar('grad_magni_loss', grad_magni_loss) 289 | tf.summary.scalar('grad_direc_loss', grad_direc_loss) 290 | tf.summary.scalar('normal_loss', normal_loss) 291 | 292 | tf.summary.image('image', batch_images_ts) 293 | tf.summary.image("gt_depth", batch_depth_ts) 294 | tf.summary.image("pd_depth", pd_depth_maps_ts) 295 | 296 | 297 | # COUNT PARAMS 298 | total_num_parameters = 0 299 | total_num_trainable_parameters = 0 300 | for variable in tf.global_variables(): 301 | if variable in tf.trainable_variables(): 302 | total_num_trainable_parameters += np.array(variable.get_shape().as_list()).prod() 303 | total_num_parameters += np.array(variable.get_shape().as_list()).prod() 304 | elif variable in tf.global_variables(scope=args.cnn_model) \ 305 | or variable in tf.global_variables(scope="MDR_Net"): 306 | total_num_parameters += np.array(variable.get_shape().as_list()).prod() 307 | print("number of trainable parameters: {}".format(total_num_trainable_parameters)) 308 | print("number of parameters: {}".format(total_num_parameters)) 309 | 310 | # Put all summary ops into one op. Produces string when you run it. 311 | merged_summary_op = tf.summary.merge_all() 312 | 313 | if args.process_id_for_refining: 314 | variables_to_restore = slim.get_variables_to_restore( 315 | exclude=["optimizer_vars"]) 316 | elif "resnet" in args.cnn_model: 317 | variables_to_restore = slim.get_variables_to_restore( 318 | exclude=[args.cnn_model + "/logits", "optimizer_vars", "MDR_Net"]) 319 | elif "vgg" in args.cnn_model: 320 | variables_to_restore = slim.get_variables_to_restore( 321 | exclude=[args.cnn_model + "/fc6", args.cnn_model + "/fc7", args.cnn_model + "/fc8", 322 | args.cnn_model + "/conv1/conv1_1/BatchNorm", 323 | args.cnn_model + "/conv1/conv1_2/BatchNorm", 324 | args.cnn_model + "/conv2/conv2_1/BatchNorm", 325 | args.cnn_model + "/conv2/conv2_2/BatchNorm", 326 | args.cnn_model + "/conv3/conv3_1/BatchNorm", 327 | args.cnn_model + "/conv3/conv3_2/BatchNorm", 328 | args.cnn_model + "/conv3/conv3_3/BatchNorm", 329 | args.cnn_model + "/conv4/conv4_1/BatchNorm", 330 | args.cnn_model + "/conv4/conv4_2/BatchNorm", 331 | args.cnn_model + "/conv4/conv4_3/BatchNorm", 332 | args.cnn_model + "/conv5/conv5_1/BatchNorm", 333 | args.cnn_model + "/conv5/conv5_2/BatchNorm", 334 | args.cnn_model + "/conv5/conv5_3/BatchNorm", 335 | "optimizer_vars", "MDR_Net"]) 336 | else: 337 | raise NameError("Process id of a trained model must be provided or cnn_model_name must contain resnet or vgg!") 338 | 339 | 340 | # Add ops to restore all the variables. 341 | restorer = tf.train.Saver(variables_to_restore) 342 | saver = tf.train.Saver() 343 | 344 | with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess: 345 | # Create the summary writer -- to write all the tboard_log 346 | # into a specified file. This file can be later read by tensorboard. 347 | train_writer = tf.summary.FileWriter(CKPT_PATH + "/train", sess.graph) 348 | validation_writer = tf.summary.FileWriter(CKPT_PATH + "/validation") 349 | # Create a saver. 350 | sess.run(tf.local_variables_initializer()) 351 | sess.run(tf.global_variables_initializer()) 352 | 353 | if args.process_id_for_refining: 354 | ckpt = tf.train.get_checkpoint_state(CKPT_PATH + '/') 355 | restorer.restore(sess, ckpt.model_checkpoint_path) 356 | print("Model checkpoits for process id " + process_str_id + " restored!") 357 | else: 358 | # load resnet checkpoints 359 | try: 360 | restorer.restore(sess, args.pretrained_ckpts_path + args.cnn_model + '/' + args.cnn_model + ".ckpt") 361 | print("Model checkpoits for " + args.cnn_model + " restored!") 362 | except FileNotFoundError: 363 | print("CNN checkpoints not found. Please download " + args.resnet_model + 364 | " model checkpoints from: https://github.com/tensorflow/models/tree/master/research/slim") 365 | 366 | # The `Iterator.string_handle()` method returns a tensor that can be evaluated 367 | # and used to feed the `handle` placeholder. 368 | training_handle = sess.run(training_iterator.string_handle()) 369 | validation_handle = sess.run(validation_iterator.string_handle()) 370 | 371 | sess.run(training_iterator.initializer) 372 | 373 | 374 | train_steps_before_eval = 1000 375 | validation_steps = 10 376 | saving_steps = 5000 377 | start_step = global_step.eval(session=sess) 378 | start_time = time.time() 379 | before_op_time = start_time 380 | training_average_loss = 0 381 | for step in range(start_step, num_total_steps): 382 | _, global_step_np, train_loss, summary_string = sess.run([train_step, 383 | global_step, 384 | depth_loss, 385 | merged_summary_op], 386 | feed_dict={is_training_ts: True, 387 | handle: training_handle}) 388 | 389 | if step % 10 == 0: 390 | train_writer.add_summary(summary_string, global_step_np) 391 | training_average_loss += train_loss 392 | 393 | 394 | if (step+1)%train_steps_before_eval==0: 395 | training_average_loss /= train_steps_before_eval 396 | duration = time.time() - before_op_time 397 | examples_per_sec = train_steps_before_eval*args.batch_size*args.num_gpus / duration 398 | time_sofar = (time.time() - start_time) / 3600 399 | training_time_left = (num_total_steps / step - 1.0) * time_sofar 400 | 401 | print_string = 'Batch {:>6d}| training loss: {:8.5f} |examples/s: {:4.2f} | time elapsed: {:.2f}h |' \ 402 | ' time left: {:.2f}h' 403 | print(print_string.format(step+1, 404 | training_average_loss, 405 | examples_per_sec, 406 | time_sofar, 407 | training_time_left)) 408 | training_average_loss = 0 409 | 410 | sess.run(validation_iterator.initializer) 411 | validation_average_loss = 0 412 | abs_rel_average = 0 413 | sq_rel_average = 0 414 | rmse_average = 0 415 | rmse_log_average = 0 416 | log10_average = 0 417 | acc1_average = 0 418 | acc2_average = 0 419 | acc3_average = 0 420 | 421 | for i in range(validation_steps): 422 | pd_depth_maps, depths, val_loss, summary_string = sess.run([pd_depth_maps_ts, 423 | batch_depth_ts, 424 | depth_loss, 425 | merged_summary_op], 426 | feed_dict={is_training_ts: False, 427 | handle: validation_handle}) 428 | validation_average_loss += val_loss 429 | 430 | ABS_REL, SQ_REL, RMSE, RMSE_log, Log10, ACC1, ACC2, ACC3 = \ 431 | compute_metrics_for_multi_maps(depths, pd_depth_maps) 432 | abs_rel_average += ABS_REL 433 | sq_rel_average += SQ_REL 434 | rmse_average += RMSE 435 | rmse_log_average += RMSE_log 436 | log10_average += Log10 437 | acc1_average += ACC1 438 | acc2_average += ACC2 439 | acc3_average += ACC3 440 | 441 | validation_average_loss /= validation_steps 442 | abs_rel_average /= validation_steps 443 | sq_rel_average /= validation_steps 444 | rmse_average /= validation_steps 445 | rmse_log_average /= validation_steps 446 | log10_average /= validation_steps 447 | acc1_average /= validation_steps 448 | acc2_average /= validation_steps 449 | acc3_average /= validation_steps 450 | print_string = 'Batch {:>6d}| validation loss: {:5.3f} | abs_rel: {:2.3f} | sq_rel: {:2.3f} '\ 451 | '| rmse: {:2.3f} | rmse_log: {:2.3f} |log10: {:2.3f} ' \ 452 | '| acc1: {:.3f} | acc2: {:.3f} |acc3: {:.3f}' 453 | print(print_string.format(step+1, 454 | validation_average_loss, 455 | abs_rel_average, 456 | sq_rel_average, 457 | rmse_average, 458 | rmse_log_average, 459 | log10_average, 460 | acc1_average, 461 | acc2_average, 462 | acc3_average)) 463 | validation_writer.add_summary(summary_string, global_step_np) 464 | before_op_time = time.time() 465 | if step and step % saving_steps == 0: 466 | saver.save(sess, CKPT_PATH +"/model.ckpt", global_step=step) 467 | 468 | saver.save(sess, CKPT_PATH +"/model.ckpt", global_step=num_total_steps) 469 | train_writer.close() 470 | 471 | if __name__ == '__main__': 472 | tf.app.run() 473 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | import urllib 3 | import tarfile 4 | import os 5 | import tensorflow as tf 6 | 7 | # credit: tensorflow 8 | 9 | def download_cnn_checkpoint_if_necessary(checkpoints_path, model_name): 10 | """ 11 | Check if the cnn checkpoints are already downloaded, if not download it 12 | :param cnn_checkpoints_path: string: path where the properly cnn checkpoint files should be found 13 | :param model_name: one of resnet_v2_50, resnet_v2_101, resnet_v2_152, vgg_16 or vgg_19 14 | :return: None 15 | """ 16 | resnet_checkpoints_path = checkpoints_path+model_name+'/' 17 | if not os.path.exists(resnet_checkpoints_path): 18 | # create the path and download the resnet checkpoints 19 | os.mkdir(resnet_checkpoints_path) 20 | if "resnet" in model_name: 21 | filename = model_name + "_2017_04_14.tar.gz" 22 | elif "vgg" in model_name: 23 | filename = model_name + "_2016_08_28.tar.gz" 24 | else: 25 | raise NameError("model_name must contain resnet or vgg!") 26 | 27 | url = "http://download.tensorflow.org/models/" + filename 28 | full_file_path = os.path.join(resnet_checkpoints_path, filename) 29 | urllib.request.urlretrieve(url, full_file_path) 30 | thetarfile = tarfile.open(full_file_path, "r:gz") 31 | thetarfile.extractall(path=resnet_checkpoints_path) 32 | thetarfile.close() 33 | print("CNN:", model_name, "successfully downloaded.") 34 | else: 35 | print("CNN checkpoints file successfully found.") 36 | 37 | 38 | 39 | # By default, all variables will be placed on '/gpu:0' 40 | # So we need a custom device function, to assign all variables to '/cpu:0' 41 | # Note: If GPUs are peered, '/gpu:0' can be a faster option 42 | PS_OPS = ['Variable', 'VariableV2', 'AutoReloadVariable'] 43 | 44 | def assign_to_device(device, ps_device='/cpu:0'): 45 | def _assign(op): 46 | node_def = op if isinstance(op, tf.NodeDef) else op.node_def 47 | if node_def.op in PS_OPS: 48 | return "/" + ps_device 49 | else: 50 | return device 51 | 52 | return _assign 53 | 54 | 55 | # Build the function to average the gradients 56 | def average_gradients(tower_grads): 57 | average_grads = [] 58 | for grad_and_vars in zip(*tower_grads): 59 | # Note that each grad_and_vars looks like the following: 60 | # ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN)) 61 | grads = [] 62 | for g, _ in grad_and_vars: 63 | # Add 0 dimension to the gradients to represent the tower. 64 | expanded_g = tf.expand_dims(g, 0) 65 | 66 | # Append on a 'tower' dimension which we will average over below. 67 | grads.append(expanded_g) 68 | 69 | # Average over the 'tower' dimension. 70 | grad = tf.concat(grads, 0) 71 | grad = tf.reduce_mean(grad, 0) 72 | 73 | # Keep in mind that the Variables are redundant because they are shared 74 | # across towers. So .. we will just return the first tower's pointer to 75 | # the Variable. 76 | v = grad_and_vars[0][1] 77 | grad_and_var = (grad, v) 78 | average_grads.append(grad_and_var) 79 | return average_grads --------------------------------------------------------------------------------