├── .gitignore ├── LICENSE ├── LongPresentation.pdf ├── README.md ├── backbone ├── ResNet.py ├── VGG.py └── __init__.py ├── config.py ├── hdfnet.yaml ├── loss ├── HEL.py └── __init__.py ├── module ├── BaseBlocks.py ├── MyModules.py └── __init__.py ├── network ├── HDFNet.py └── __init__.py ├── output └── .gitkeep ├── pyproject.toml ├── test.py ├── test.sh ├── train.py └── utils ├── __init__.py ├── cal_fps.py ├── data ├── __init__.py ├── create_loader_imgs.py ├── create_rgb_datasets_imgs.py ├── create_rgbd_datasets_imgs.py └── data_list │ ├── njud_test_jw.lst │ ├── njud_train_jw.lst │ ├── nlpr_test_jw.lst │ ├── nlpr_train_jw.lst │ └── rgbd_train_jw.lst ├── metric.py ├── misc.py ├── tensor_ops.py └── transforms ├── __init__.py ├── joint_transforms.py └── triple_transforms.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | 3 | /output/* 4 | # except for .gitkeep 5 | !/output/.gitkeep 6 | 7 | # IntelliJ project files 8 | .idea 9 | *.iml 10 | out 11 | gen 12 | ### Python template 13 | # Byte-compiled / optimized / DLL files 14 | __pycache__/ 15 | *.py[cod] 16 | *$py.class 17 | 18 | # C extensions 19 | *.so 20 | 21 | # Distribution / packaging 22 | .Python 23 | build/ 24 | develop-eggs/ 25 | dist/ 26 | downloads/ 27 | eggs/ 28 | .eggs/ 29 | lib/ 30 | lib64/ 31 | parts/ 32 | sdist/ 33 | var/ 34 | wheels/ 35 | pip-wheel-metadata/ 36 | share/python-wheels/ 37 | *.egg-info/ 38 | .installed.cfg 39 | *.egg 40 | MANIFEST 41 | 42 | # PyInstaller 43 | # Usually these files are written by a python script from a template 44 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 45 | *.manifest 46 | *.spec 47 | 48 | # Installer logs 49 | pip-log.txt 50 | pip-delete-this-directory.txt 51 | 52 | # Unit test / coverage reports 53 | htmlcov/ 54 | .tox/ 55 | .nox/ 56 | .coverage 57 | .coverage.* 58 | .cache 59 | nosetests.xml 60 | coverage.xml 61 | *.cover 62 | *.py,cover 63 | .hypothesis/ 64 | .pytest_cache/ 65 | cover/ 66 | 67 | # Translations 68 | *.mo 69 | *.pot 70 | 71 | # Django stuff: 72 | *.log 73 | local_settings.py 74 | db.sqlite3 75 | db.sqlite3-journal 76 | 77 | # Flask stuff: 78 | instance/ 79 | .webassets-cache 80 | 81 | # Scrapy stuff: 82 | .scrapy 83 | 84 | # Sphinx documentation 85 | docs/_build/ 86 | 87 | # PyBuilder 88 | .pybuilder/ 89 | target/ 90 | 91 | # Jupyter Notebook 92 | .ipynb_checkpoints 93 | 94 | # IPython 95 | profile_default/ 96 | ipython_config.py 97 | 98 | # pyenv 99 | # For a library or package, you might want to ignore these files since the code is 100 | # intended to run in multiple environments; otherwise, check them in: 101 | # .python-version 102 | 103 | # pipenv 104 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 105 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 106 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 107 | # install all needed dependencies. 108 | #Pipfile.lock 109 | 110 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 111 | __pypackages__/ 112 | 113 | # Celery stuff 114 | celerybeat-schedule 115 | celerybeat.pid 116 | 117 | # SageMath parsed files 118 | *.sage.py 119 | 120 | # Environments 121 | .env 122 | .venv 123 | env/ 124 | venv/ 125 | ENV/ 126 | env.bak/ 127 | venv.bak/ 128 | 129 | # Spyder project settings 130 | .spyderproject 131 | .spyproject 132 | 133 | # Rope project settings 134 | .ropeproject 135 | 136 | # mkdocs documentation 137 | /site 138 | 139 | # mypy 140 | .mypy_cache/ 141 | .dmypy.json 142 | dmypy.json 143 | 144 | # Pyre type checker 145 | .pyre/ 146 | 147 | # pytype static type analyzer 148 | .pytype/ 149 | 150 | # Cython debug symbols 151 | cython_debug/ 152 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 MY_ 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /LongPresentation.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lartpang/HDFNet/455ec8191043ee76e715eb3db6c70949cbeee80f/LongPresentation.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HDFNet 2 | 3 | ![GitHub License](https://img.shields.io/github/license/lartpang/HDFNet?style=flat-square) 4 | ![GitHub last commit](https://img.shields.io/github/last-commit/lartpang/HDFNet?style=flat-square) 5 | ![GitHub issues](https://img.shields.io/github/issues/lartpang/HDFNet?style=flat-square) 6 | ![GitHub stars](https://img.shields.io/github/stars/lartpang/HDFNet?style=flat-square) 7 | [![Arxiv Page](https://img.shields.io/badge/Arxiv-2007.06227-red?style=flat-square)](https://arxiv.org/abs/2007.06227) 8 | 9 | (ECCV 2020) Hierarchical Dynamic Filtering Network for RGB-D Salient Object Detection 10 | 11 | Gitee Mirror: 12 | 13 | > Author: Lart Pang(`lartpang@163.com`) 14 | > 15 | > This is a complete, modular and easily modified code base based on PyTorch, which is suitable for the training and testing of significant target detection task model. 16 | 17 | ```text 18 | @inproceedings{HDFNet-ECCV2020, 19 | author = {Youwei Pang and Lihe Zhang and Xiaoqi Zhao and Huchuan Lu}, 20 | title = {Hierarchical Dynamic Filtering Network for RGB-D Salient Object Detection}, 21 | booktitle = ECCV, 22 | year = {2020} 23 | } 24 | ``` 25 | 26 | **News**: 27 | * The proposed model (HDFNet) is an important baseline of the winning solution in NTIRE 2021 (Depth Guided Image Relighting Challenge) hosted in CVPR 2021 workshop (winner: AICSNTU-MBNet team (Asustek Computer Inc & National Taiwan University)). [[PAPER](https://arxiv.org/pdf/2105.00690.pdf)] [[COOD](https://github.com/weitingchen83/NTIRE2021-Depth-Guided-Image-Relighting-MBNet)] 28 | 29 | **NOTE**: 30 | * In the link below, we provide the results of the two versions (with/without `_STEREO`). 31 | * Specifically, in the file with `_STEREO`, two versions of the STEREO dataset are provided. 32 | * One of them contains 797 pictures, and the other contains 1000 pictures. 33 | * In our paper, the results evaluated on the latter are used, which is exactly what is provided in the file without `_STEREO`. 34 | 35 | [[Results & PretrainedParams (j9qu)](https://pan.baidu.com/s/1hExlf0uZ0kuar99xzpL0Sw)] 36 | 37 | * RGBD-DATASET 38 | * https://github.com/jiwei0921/RGBD-SOD-datasets 39 | * http://dpfan.net/d3netbenchmark/ 40 | 41 | ![image](https://user-images.githubusercontent.com/26847524/87150231-45f15f80-c2e4-11ea-8ce8-fb8588faf5f5.png) 42 | 43 | ![image](https://user-images.githubusercontent.com/26847524/87150259-4e499a80-c2e4-11ea-94d2-1427a7a59bfa.png) 44 | 45 | ![image](https://user-images.githubusercontent.com/26847524/87150301-5acdf300-c2e4-11ea-8bff-2f9178675730.png) 46 | 47 | ![image](https://user-images.githubusercontent.com/26847524/87150362-789b5800-c2e4-11ea-81ea-8c70778efc6a.png) 48 | 49 | ## Repository Details 50 | 51 | * `backbone`: Store some code for backbone networks. 52 | * `loss`: The code of the loss function. 53 | * `module`: The code of important modules. 54 | * `network`: The code of the network. 55 | * `output`: It saves all results. 56 | * `utils`: Some instrumental code. 57 | * `data/*py`: Some files about creating the dataloader. 58 | * `transforms/*py`: Some operations on data augmentation. 59 | * `metric.py`: max/mean/weighted F-measure, S-measure, E-measure and MAE. (**NOTE: If you find a problem in this part of the code, please notify me in time, thank you.**) 60 | * `misc.py`: Some useful utility functions. 61 | * `tensor_ops.py`: Some operations about tensors. 62 | * `config.py`: Configuration file for model training and testing. 63 | * `train.py`: I think you can understand. 64 | * `test.py` and `test.sh`: These files can evaluate the performance of the model on the specified dataset. And the file `test.sh` is a simple example about how to configure and run `test.py`. 65 | 66 | ## Usage 67 | 68 | ### Environment 69 | 70 | I provided conda environment configuration file (hdfnet.yaml), you can refer to the package version information. 71 | 72 | And you can try `conda env create -f hdfnet.yaml` to create an environment to run our code. 73 | 74 | ### Train your own model 75 | 76 | * Add your own module into the `module`. 77 | * Add your own network into the `network` and import your model in the `network/__init__.py`. 78 | * Modify `config.py`: 79 | * change the dataset path: `datasets_root` 80 | * change items in `arg_config` 81 | * `model` corresponds to the name of the model in `network` 82 | * `suffix`: finally, the form of `_` is used to form the alias of the model of this experiment and all files related to this experiment will be saved to the folder `_` in `output` folder 83 | * `resume`: set it to `False` to train normally 84 | * `data_mode`: set it to `RGBD` or `RGB` for using RGBD SOD datasets or RGB SOD datasets to train mdoel. 85 | * other items, like `lr`, `batch_size` and so on... 86 | * Run the script: `python train.py` 87 | 88 | If the training process is interrupted, you can use the following strategy to resume the training process. 89 | 90 | * Set `resume` to `True`. 91 | * Run the script `train.py` again. 92 | 93 | ### Evaluate model performance 94 | 95 | There are two ways: 96 | 1. For models that have been trained, you can set `resume` to `True` and run the script `train.py` again. 97 | 2. Use the scripts `test.sh` and `test.py`. The specific method of use can be obtained by executing this command: `python test.py --help`. 98 | 99 | ### Only evaluate generated predictions 100 | 101 | You can use the toolkit released by us: . 102 | 103 | ## Related Works 104 | 105 | * (ECCV 2020 Oral) Suppress and Balance: A Simple Gated Network for Salient Object Detection: https://github.com/Xiaoqi-Zhao-DLUT/GateNet-RGB-Saliency 106 | * (ECCV 2020) A Single Stream Network for Robust and Real-time RGB-D Salient Object Detection: https://github.com/Xiaoqi-Zhao-DLUT/DANet-RGBD-Saliency 107 | * (CVPR 2020) Multi-scale Interactive Network for Salient Object Detection: https://github.com/lartpang/MINet 108 | -------------------------------------------------------------------------------- /backbone/ResNet.py: -------------------------------------------------------------------------------- 1 | # import torchvision.models as models 2 | # import torch.nn as nn 3 | # # https://pytorch.org/docs/stable/torchvision/models.html#id3 4 | # 5 | import torch 6 | import torch.nn as nn 7 | import torch.utils.model_zoo as model_zoo 8 | 9 | model_urls = { 10 | "resnet18": "https://download.pytorch.org/models/resnet18-5c106cde.pth", 11 | "resnet34": "https://download.pytorch.org/models/resnet34-333f7ec4.pth", 12 | "resnet50": "https://download.pytorch.org/models/resnet50-19c8e357.pth", 13 | "resnet101": "https://download.pytorch.org/models/resnet101-5d3b4d8f.pth", 14 | "resnet152": "https://download.pytorch.org/models/resnet152-b121ed2d.pth", 15 | } 16 | 17 | 18 | def conv3x3(in_planes, out_planes, stride=1): 19 | """3x3 convolution with padding""" 20 | return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) 21 | 22 | 23 | def conv1x1(in_planes, out_planes, stride=1): 24 | """1x1 convolution""" 25 | return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) 26 | 27 | 28 | class BasicBlock(nn.Module): 29 | expansion = 1 30 | 31 | def __init__(self, inplanes, planes, stride=1, downsample=None): 32 | super(BasicBlock, self).__init__() 33 | self.conv1 = conv3x3(inplanes, planes, stride) 34 | self.bn1 = nn.BatchNorm2d(planes) 35 | self.relu = nn.ReLU(inplace=True) 36 | self.conv2 = conv3x3(planes, planes) 37 | self.bn2 = nn.BatchNorm2d(planes) 38 | self.downsample = downsample 39 | self.stride = stride 40 | 41 | def forward(self, x): 42 | identity = x 43 | 44 | out = self.conv1(x) 45 | out = self.bn1(out) 46 | out = self.relu(out) 47 | 48 | out = self.conv2(out) 49 | out = self.bn2(out) 50 | 51 | if self.downsample is not None: 52 | identity = self.downsample(x) 53 | 54 | out += identity 55 | out = self.relu(out) 56 | 57 | return out 58 | 59 | 60 | class Bottleneck(nn.Module): 61 | expansion = 4 62 | 63 | def __init__(self, inplanes, planes, stride=1, downsample=None): 64 | super(Bottleneck, self).__init__() 65 | self.conv1 = conv1x1(inplanes, planes) 66 | self.bn1 = nn.BatchNorm2d(planes) 67 | self.conv2 = conv3x3(planes, planes, stride) 68 | self.bn2 = nn.BatchNorm2d(planes) 69 | self.conv3 = conv1x1(planes, planes * self.expansion) 70 | self.bn3 = nn.BatchNorm2d(planes * self.expansion) 71 | self.relu = nn.ReLU(inplace=True) 72 | self.downsample = downsample 73 | self.stride = stride 74 | 75 | def forward(self, x): 76 | identity = x 77 | 78 | out = self.conv1(x) 79 | out = self.bn1(out) 80 | out = self.relu(out) 81 | 82 | out = self.conv2(out) 83 | out = self.bn2(out) 84 | out = self.relu(out) 85 | 86 | out = self.conv3(out) 87 | out = self.bn3(out) 88 | 89 | if self.downsample is not None: 90 | identity = self.downsample(x) 91 | 92 | out += identity 93 | out = self.relu(out) 94 | 95 | return out 96 | 97 | 98 | class ResNet(nn.Module): 99 | def __init__(self, block, layers, zero_init_residual=False): 100 | super(ResNet, self).__init__() 101 | self.inplanes = 64 102 | self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False) 103 | self.bn1 = nn.BatchNorm2d(64) 104 | self.relu = nn.ReLU(inplace=True) 105 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) 106 | self.layer1 = self._make_layer(block, 64, layers[0]) 107 | self.layer2 = self._make_layer(block, 128, layers[1], stride=2) 108 | self.layer3 = self._make_layer(block, 256, layers[2], stride=2) # 6 109 | self.layer4 = self._make_layer(block, 512, layers[3], stride=2) # 3 110 | 111 | for m in self.modules(): 112 | if isinstance(m, nn.Conv2d): 113 | nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") 114 | elif isinstance(m, nn.BatchNorm2d): 115 | nn.init.constant_(m.weight, 1) 116 | nn.init.constant_(m.bias, 0) 117 | 118 | # Zero-initialize the last BN in each residual branch, 119 | # so that the residual branch starts with zeros, and each residual block behaves like an identity. 120 | # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 121 | if zero_init_residual: 122 | for m in self.modules(): 123 | if isinstance(m, Bottleneck): 124 | nn.init.constant_(m.bn3.weight, 0) 125 | elif isinstance(m, BasicBlock): 126 | nn.init.constant_(m.bn2.weight, 0) 127 | 128 | def _make_layer(self, block, planes, blocks, stride=1): 129 | downsample = None 130 | if stride != 1 or self.inplanes != planes * block.expansion: 131 | downsample = nn.Sequential( 132 | conv1x1(self.inplanes, planes * block.expansion, stride), nn.BatchNorm2d(planes * block.expansion), 133 | ) 134 | 135 | layers = [] 136 | layers.append(block(self.inplanes, planes, stride, downsample)) 137 | self.inplanes = planes * block.expansion 138 | for _ in range(1, blocks): 139 | layers.append(block(self.inplanes, planes)) 140 | 141 | return nn.Sequential(*layers) 142 | 143 | def forward(self, x): 144 | x = self.conv1(x) 145 | x = self.bn1(x) 146 | x = self.relu(x) 147 | x = self.maxpool(x) 148 | 149 | x = self.layer1(x) 150 | x = self.layer2(x) 151 | x = self.layer3(x) 152 | x = self.layer4(x) 153 | 154 | return x 155 | 156 | 157 | def resnet18(pretrained=False, **kwargs): 158 | """Constructs a ResNet-18 model. 159 | 160 | Args: 161 | pretrained (bool): If True, returns a model pre-trained on ImageNet 162 | """ 163 | model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs) 164 | if pretrained: 165 | pretrained_dict = model_zoo.load_url(model_urls["resnet18"]) 166 | 167 | model_dict = model.state_dict() 168 | # 1. filter out unnecessary keys 169 | pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} 170 | # 2. overwrite entries in the existing state dict 171 | model_dict.update(pretrained_dict) 172 | # 3. load the new state dict 173 | model.load_state_dict(model_dict) 174 | return model 175 | 176 | 177 | def resnet34(pretrained=False, **kwargs): 178 | """Constructs a ResNet-34 model. 179 | 180 | Args: 181 | pretrained (bool): If True, returns a model pre-trained on ImageNet 182 | """ 183 | model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs) 184 | if pretrained: 185 | pretrained_dict = model_zoo.load_url(model_urls["resnet34"]) 186 | 187 | model_dict = model.state_dict() 188 | # 1. filter out unnecessary keys 189 | pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} 190 | # 2. overwrite entries in the existing state dict 191 | model_dict.update(pretrained_dict) 192 | # 3. load the new state dict 193 | model.load_state_dict(model_dict) 194 | return model 195 | 196 | 197 | def resnet50(pretrained=False, **kwargs): 198 | """Constructs a ResNet-50 model. 199 | 200 | Args: 201 | pretrained (bool): If True, returns a model pre-trained on ImageNet 202 | """ 203 | model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs) 204 | 205 | if pretrained: 206 | pretrained_dict = model_zoo.load_url(model_urls["resnet50"]) 207 | 208 | model_dict = model.state_dict() 209 | # 1. filter out unnecessary keys 210 | pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} 211 | # 2. overwrite entries in the existing state dict 212 | model_dict.update(pretrained_dict) 213 | # 3. load the new state dict 214 | model.load_state_dict(model_dict) 215 | 216 | return model 217 | 218 | 219 | def resnet101(pretrained=False, **kwargs): 220 | """Constructs a ResNet-101 model. 221 | 222 | Args: 223 | pretrained (bool): If True, returns a model pre-trained on ImageNet 224 | """ 225 | model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs) 226 | if pretrained: 227 | pretrained_dict = model_zoo.load_url(model_urls["resnet101"]) 228 | 229 | model_dict = model.state_dict() 230 | # 1. filter out unnecessary keys 231 | pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} 232 | # 2. overwrite entries in the existing state dict 233 | model_dict.update(pretrained_dict) 234 | # 3. load the new state dict 235 | model.load_state_dict(model_dict) 236 | return model 237 | 238 | 239 | def resnet152(pretrained=False, **kwargs): 240 | """Constructs a ResNet-152 model. 241 | 242 | Args: 243 | pretrained (bool): If True, returns a model pre-trained on ImageNet 244 | """ 245 | model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs) 246 | 247 | if pretrained: 248 | pretrained_dict = model_zoo.load_url(model_urls["resnet152"]) 249 | 250 | model_dict = model.state_dict() 251 | # 1. filter out unnecessary keys 252 | pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} 253 | # 2. overwrite entries in the existing state dict 254 | model_dict.update(pretrained_dict) 255 | # 3. load the new state dict 256 | model.load_state_dict(model_dict) 257 | 258 | return model 259 | 260 | 261 | def Backbone_ResNet50_in3(pretrained=True): 262 | if pretrained: 263 | print("The backbone model loads the pretrained parameters...") 264 | net = resnet50(pretrained=pretrained) 265 | div_2 = nn.Sequential(*list(net.children())[:3]) 266 | div_4 = nn.Sequential(*list(net.children())[3:5]) 267 | div_8 = net.layer2 268 | div_16 = net.layer3 269 | div_32 = net.layer4 270 | 271 | return div_2, div_4, div_8, div_16, div_32 272 | 273 | 274 | def Backbone_ResNet50_in1(pretrained=True): 275 | if pretrained: 276 | print("The backbone model loads the pretrained parameters...") 277 | net = resnet50(pretrained=pretrained) 278 | net.conv1 = nn.Conv2d(1, 64, kernel_size=7, stride=2, padding=3, bias=False) 279 | div_2 = nn.Sequential(*list(net.children())[:3]) 280 | div_4 = nn.Sequential(*list(net.children())[3:5]) 281 | div_8 = net.layer2 282 | div_16 = net.layer3 283 | div_32 = net.layer4 284 | 285 | return div_2, div_4, div_8, div_16, div_32 286 | 287 | 288 | if __name__ == "__main__": 289 | div_2, div_4, div_8, div_16, div_32 = Backbone_ResNet50_in1() 290 | indata = torch.rand(4, 1, 320, 320) 291 | print(div_32) 292 | -------------------------------------------------------------------------------- /backbone/VGG.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torchvision.models.utils import load_state_dict_from_url 4 | 5 | __all__ = ["Backbone_VGG_in1", "Backbone_VGG_in3", "Backbone_VGG_in4"] 6 | 7 | model_urls = { 8 | "vgg11": "https://download.pytorch.org/models/vgg11-bbd30ac9.pth", 9 | "vgg13": "https://download.pytorch.org/models/vgg13-c768596a.pth", 10 | "vgg16": "https://download.pytorch.org/models/vgg16-397923af.pth", 11 | "vgg19": "https://download.pytorch.org/models/vgg19-dcbb9e9d.pth", 12 | "vgg11_bn": "https://download.pytorch.org/models/vgg11_bn-6002323d.pth", 13 | "vgg13_bn": "https://download.pytorch.org/models/vgg13_bn-abd245e5.pth", 14 | "vgg16_bn": "https://download.pytorch.org/models/vgg16_bn-6c64b313.pth", 15 | "vgg19_bn": "https://download.pytorch.org/models/vgg19_bn-c79401a0.pth", 16 | } 17 | 18 | 19 | class VGG(nn.Module): 20 | def __init__(self, features, num_classes=1000, init_weights=True): 21 | super(VGG, self).__init__() 22 | self.features = features 23 | 24 | if init_weights: 25 | self._initialize_weights() 26 | 27 | def forward(self, x): 28 | x = self.features(x) 29 | return x 30 | 31 | def _initialize_weights(self): 32 | for m in self.modules(): 33 | if isinstance(m, nn.Conv2d): 34 | nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") 35 | if m.bias is not None: 36 | nn.init.constant_(m.bias, 0) 37 | elif isinstance(m, nn.BatchNorm2d): 38 | nn.init.constant_(m.weight, 1) 39 | nn.init.constant_(m.bias, 0) 40 | elif isinstance(m, nn.Linear): 41 | nn.init.normal_(m.weight, 0, 0.01) 42 | nn.init.constant_(m.bias, 0) 43 | 44 | 45 | def make_layers(cfg, batch_norm=False): 46 | layers = [] 47 | in_channels = 3 48 | for v in cfg: 49 | if v == "M": 50 | layers += [nn.MaxPool2d(kernel_size=2, stride=2)] 51 | else: 52 | conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) 53 | if batch_norm: 54 | layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] 55 | else: 56 | layers += [conv2d, nn.ReLU(inplace=True)] 57 | in_channels = v 58 | return nn.Sequential(*layers) 59 | 60 | 61 | cfgs = { 62 | "A": [64, "M", 128, "M", 256, 256, "M", 512, 512, "M", 512, 512, "M"], 63 | "B": [64, 64, "M", 128, 128, "M", 256, 256, "M", 512, 512, "M", 512, 512, "M"], 64 | "D": [64, 64, "M", 128, 128, "M", 256, 256, 256, "M", 512, 512, 512, "M", 512, 512, 512, "M"], 65 | "E": [64, 64, "M", 128, 128, "M", 256, 256, 256, 256, "M", 512, 512, 512, 512, "M", 512, 512, 512, 512, "M"], 66 | } 67 | 68 | 69 | def _vgg(arch, cfg, batch_norm, pretrained, progress, **kwargs): 70 | if pretrained: 71 | kwargs["init_weights"] = False 72 | model = VGG(make_layers(cfgs[cfg], batch_norm=batch_norm), **kwargs) 73 | if pretrained: 74 | pretrained_dict = load_state_dict_from_url(model_urls[arch], progress=progress) 75 | model_dict = model.state_dict() 76 | # 1. filter out unnecessary keys 77 | pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} 78 | # 2. overwrite entries in the existing state dict 79 | model_dict.update(pretrained_dict) 80 | # 3. load the new state dict 81 | model.load_state_dict(model_dict) 82 | return model 83 | 84 | 85 | def vgg16(pretrained=False, progress=True, **kwargs): 86 | r"""VGG 16-layer model (configuration "D") 87 | `"Very Deep Convolutional Networks For Large-Scale Image Recognition" `_ 88 | 89 | Args: 90 | pretrained (bool): If True, returns a model pre-trained on ImageNet 91 | progress (bool): If True, displays a progress bar of the download to stderr 92 | """ 93 | return _vgg("vgg16", "D", False, pretrained, progress, **kwargs) 94 | 95 | 96 | def vgg16_bn(pretrained=False, progress=True, **kwargs): 97 | r"""VGG 16-layer model (configuration "D") with batch normalization 98 | `"Very Deep Convolutional Networks For Large-Scale Image Recognition" `_ 99 | 100 | Args: 101 | pretrained (bool): If True, returns a model pre-trained on ImageNet 102 | progress (bool): If True, displays a progress bar of the download to stderr 103 | """ 104 | return _vgg("vgg16_bn", "D", True, pretrained, progress, **kwargs) 105 | 106 | 107 | def vgg19(pretrained=False, progress=True, **kwargs): 108 | r"""VGG 19-layer model (configuration "E") 109 | `"Very Deep Convolutional Networks For Large-Scale Image Recognition" `_ 110 | 111 | Args: 112 | pretrained (bool): If True, returns a model pre-trained on ImageNet 113 | progress (bool): If True, displays a progress bar of the download to stderr 114 | """ 115 | return _vgg("vgg19", "E", False, pretrained, progress, **kwargs) 116 | 117 | 118 | def vgg19_bn(pretrained=False, progress=True, **kwargs): 119 | r"""VGG 19-layer model (configuration 'E') with batch normalization 120 | `"Very Deep Convolutional Networks For Large-Scale Image Recognition" `_ 121 | 122 | Args: 123 | pretrained (bool): If True, returns a model pre-trained on ImageNet 124 | progress (bool): If True, displays a progress bar of the download to stderr 125 | """ 126 | return _vgg("vgg19_bn", "E", True, pretrained, progress, **kwargs) 127 | 128 | 129 | def Backbone_VGG_in4(): 130 | net = vgg16_bn(pretrained=True, progress=True) 131 | div_1 = nn.Sequential(nn.Conv2d(4, 64, kernel_size=3, padding=1), *list(net.children())[0][1:6]) 132 | div_2 = nn.Sequential(*list(net.children())[0][6:13]) 133 | div_4 = nn.Sequential(*list(net.children())[0][13:23]) 134 | div_8 = nn.Sequential(*list(net.children())[0][23:33]) 135 | div_16 = nn.Sequential(*list(net.children())[0][33:43]) 136 | return div_1, div_2, div_4, div_8, div_16 137 | 138 | 139 | def Backbone_VGG_in1(pretrained=True): 140 | if pretrained: 141 | print("The backbone model loads the pretrained parameters...") 142 | net = vgg16_bn(pretrained=pretrained, progress=True) 143 | div_1 = nn.Sequential(nn.Conv2d(1, 64, kernel_size=3, padding=1), *list(net.children())[0][1:6]) 144 | div_2 = nn.Sequential(*list(net.children())[0][6:13]) 145 | div_4 = nn.Sequential(*list(net.children())[0][13:23]) 146 | div_8 = nn.Sequential(*list(net.children())[0][23:33]) 147 | div_16 = nn.Sequential(*list(net.children())[0][33:43]) 148 | return div_1, div_2, div_4, div_8, div_16 149 | 150 | 151 | def Backbone_VGG_in3(pretrained=True): 152 | if pretrained: 153 | print("The backbone model loads the pretrained parameters...") 154 | net = vgg16_bn(pretrained=pretrained, progress=True) 155 | div_1 = nn.Sequential(*list(net.children())[0][0:6]) 156 | div_2 = nn.Sequential(*list(net.children())[0][6:13]) 157 | div_4 = nn.Sequential(*list(net.children())[0][13:23]) 158 | div_8 = nn.Sequential(*list(net.children())[0][23:33]) 159 | div_16 = nn.Sequential(*list(net.children())[0][33:43]) 160 | return div_1, div_2, div_4, div_8, div_16 161 | 162 | 163 | def Backbone_VGG19_in1(pretrained=True): 164 | if pretrained: 165 | print("The backbone model loads the pretrained parameters...") 166 | net = vgg19_bn(pretrained=pretrained, progress=True) 167 | div_1 = nn.Sequential(nn.Conv2d(1, 64, kernel_size=3, padding=1), *list(net.children())[0][1:6]) 168 | div_2 = nn.Sequential(*list(net.children())[0][6:13]) 169 | div_4 = nn.Sequential(*list(net.children())[0][13:26]) 170 | div_8 = nn.Sequential(*list(net.children())[0][26:39]) 171 | div_16 = nn.Sequential(*list(net.children())[0][39:52]) 172 | return div_1, div_2, div_4, div_8, div_16 173 | 174 | 175 | def Backbone_VGG19_in3(pretrained=True): 176 | if pretrained: 177 | print("The backbone model loads the pretrained parameters...") 178 | net = vgg19_bn(pretrained=pretrained, progress=True) 179 | div_1 = nn.Sequential(*list(net.children())[0][0:6]) 180 | div_2 = nn.Sequential(*list(net.children())[0][6:13]) 181 | div_4 = nn.Sequential(*list(net.children())[0][13:26]) 182 | div_8 = nn.Sequential(*list(net.children())[0][26:39]) 183 | div_16 = nn.Sequential(*list(net.children())[0][39:52]) 184 | return div_1, div_2, div_4, div_8, div_16 185 | 186 | 187 | if __name__ == "__main__": 188 | div_1, div_2, div_4, div_8, div_16 = Backbone_VGG19_in3() 189 | in_data = torch.randn((1, 3, 320, 320)) 190 | 191 | x1 = div_1(in_data) 192 | print(x1.size()) 193 | x2 = div_2(x1) 194 | print(x2.size()) 195 | x4 = div_4(x2) 196 | print(x4.size()) 197 | x8 = div_8(x4) 198 | print(x8.size()) 199 | x16 = div_16(x8) 200 | print(x16.size()) 201 | -------------------------------------------------------------------------------- /backbone/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lartpang/HDFNet/455ec8191043ee76e715eb3db6c70949cbeee80f/backbone/__init__.py -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : config.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import os 8 | 9 | __all__ = ["proj_root", "arg_config"] 10 | 11 | proj_root = os.path.dirname(__file__) 12 | datasets_root = "/home/lart/Datasets/" 13 | 14 | msra10k_path = os.path.join(datasets_root, "Saliency/RGBSOD", "MSRA10K") 15 | ecssd_path = os.path.join(datasets_root, "Saliency/RGBSOD", "ECSSD") 16 | dutomron_path = os.path.join(datasets_root, "Saliency/RGBSOD", "DUT-OMRON") 17 | hkuis_path = os.path.join(datasets_root, "Saliency/RGBSOD", "HKU-IS") 18 | pascals_path = os.path.join(datasets_root, "Saliency/RGBSOD", "PASCAL-S") 19 | dutstr_path = os.path.join(datasets_root, "Saliency/RGBSOD", "DUTS/Train") 20 | dutste_path = os.path.join(datasets_root, "Saliency/RGBSOD", "DUTS/Test") 21 | 22 | lfsd_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "LFSD") 23 | rgbd135_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "RGBD135") 24 | dutrgbdtr_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "DUT-RGBD/Train") 25 | dutrgbdte_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "DUT-RGBD/Test") 26 | sip_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "SIP") 27 | ssd_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "SSD") 28 | stereo797_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "STEREO-797") 29 | stereo1000_path = os.path.join(datasets_root, "Saliency/RGBDSOD", "STEREO-1000") 30 | rgbdtr_path = os.path.join(proj_root, "utils/data/data_list", "rgbd_train_jw.lst") 31 | njudte_path = os.path.join(proj_root, "utils/data/data_list", "njud_test_jw.lst") 32 | nlprte_path = os.path.join(proj_root, "utils/data/data_list", "nlpr_test_jw.lst") 33 | 34 | # 配置区域 ##################################################################### 35 | arg_config = { 36 | # 常用配置 37 | "model": "HDFNet_VGG19", 38 | "suffix": "7Datasets", 39 | "resume": True, # 是否需要恢复模型 40 | "use_aux_loss": True, # 是否使用辅助损失 41 | "save_pre": True, # 是否保留最终的预测结果 42 | "epoch_num": 30, # 训练周期 43 | "lr": 0.005, 44 | "data_mode": "RGBD", # 'RGB'/'RGBD' 任务模式,支持RGB和RGBD两种类型任务的训练与测试 45 | # RGBD 46 | "rgbd_data": { 47 | "tr_data_path": rgbdtr_path, 48 | # "tr_data_path": dutrgbdtr_path, 49 | "te_data_list": { 50 | # "dutrgbd": dutrgbdte_path, 51 | "lfsd": lfsd_path, 52 | "njud": njudte_path, 53 | "nlpr": nlprte_path, 54 | "rgbd135": rgbd135_path, 55 | "sip": sip_path, 56 | "ssd": ssd_path, 57 | "stereo797": stereo797_path, 58 | "stereo1000": stereo1000_path, 59 | }, 60 | }, 61 | # RGB 62 | "rgb_data": { 63 | "tr_data_path": dutstr_path, 64 | "te_data_list": { 65 | "dutomron": dutomron_path, 66 | "hkuis": hkuis_path, 67 | "ecssd": ecssd_path, 68 | "pascals": pascals_path, 69 | "duts": dutste_path, 70 | }, 71 | }, 72 | "print_freq": 10, # >0, 保存迭代过程中的信息 73 | "prefix": (".jpg", ".png"), 74 | # img_prefix, gt_prefix,用在使用索引文件的时候的对应的扩展名,这里使用的索引文件不包含后缀 75 | "reduction": "mean", # 损失处理的方式,可选“mean”和“sum” 76 | "optim": "sgd_trick", # 自定义部分的学习率 77 | "weight_decay": 5e-4, # 微调时设置为0.0001 78 | "momentum": 0.9, 79 | "nesterov": False, 80 | "lr_type": "poly", 81 | "lr_decay": 0.9, # poly 82 | "batch_size": 4, # 要是继续训练, 最好使用相同的batchsize 83 | "num_workers": 8, # 不要太大, 不然运行多个程序同时训练的时候, 会造成数据读入速度受影响 84 | "input_size": 320, 85 | } 86 | -------------------------------------------------------------------------------- /hdfnet.yaml: -------------------------------------------------------------------------------- 1 | name: hdfnet 2 | channels: 3 | - pytorch 4 | - defaults 5 | dependencies: 6 | - _libgcc_mutex=0.1=main 7 | - blas=1.0=mkl 8 | - ca-certificates=2020.6.24=0 9 | - certifi=2020.6.20=py38_0 10 | - cudatoolkit=10.2.89=hfd86e86_1 11 | - cudnn=7.6.5=cuda10.2_0 12 | - freetype=2.10.2=h5ab3b9f_0 13 | - intel-openmp=2020.1=217 14 | - jpeg=9b=h024ee3a_2 15 | - ld_impl_linux-64=2.33.1=h53a641e_7 16 | - libedit=3.1.20191231=h7b6447c_0 17 | - libffi=3.3=he6710b0_2 18 | - libgcc-ng=9.1.0=hdf63c60_0 19 | - libgfortran-ng=7.3.0=hdf63c60_0 20 | - libpng=1.6.37=hbc83047_0 21 | - libstdcxx-ng=9.1.0=hdf63c60_0 22 | - libtiff=4.1.0=h2733197_1 23 | - lz4-c=1.9.2=he6710b0_0 24 | - mkl=2020.1=217 25 | - mkl-service=2.3.0=py38he904b0f_0 26 | - mkl_fft=1.1.0=py38h23d657b_0 27 | - mkl_random=1.1.1=py38h0573a6f_0 28 | - ncurses=6.2=he6710b0_1 29 | - ninja=1.9.0=py38hfd86e86_0 30 | - numpy=1.18.5=py38ha1c710e_0 31 | - numpy-base=1.18.5=py38hde5b4d6_0 32 | - olefile=0.46=py_0 33 | - openssl=1.1.1g=h7b6447c_0 34 | - pillow=6.2.1=py38h34e0f95_0 35 | - pip=20.1.1=py38_1 36 | - python=3.8.3=hcff3b4d_2 37 | - pytorch=1.5.1=py3.8_cuda10.2.89_cudnn7.6.5_0 38 | - readline=8.0=h7b6447c_0 39 | - setuptools=47.3.1=py38_0 40 | - six=1.15.0=py_0 41 | - sqlite=3.32.3=h62c20be_0 42 | - tk=8.6.10=hbc83047_0 43 | - torchvision=0.6.1=py38_cu102 44 | - wheel=0.34.2=py38_0 45 | - xz=5.2.5=h7b6447c_0 46 | - zlib=1.2.11=h7b6447c_3 47 | - zstd=1.4.4=h0b5b093_3 48 | - pip: 49 | - absl-py==0.9.0 50 | - cachetools==4.1.1 51 | - chardet==3.0.4 52 | - et-xmlfile==1.0.1 53 | - google-auth==1.18.0 54 | - google-auth-oauthlib==0.4.1 55 | - grpcio==1.30.0 56 | - idna==2.10 57 | - jdcal==1.4.1 58 | - markdown==3.2.2 59 | - oauthlib==3.1.0 60 | - openpyxl==3.0.4 61 | - prefetch-generator==1.0.1 62 | - protobuf==3.12.2 63 | - pyasn1==0.4.8 64 | - pyasn1-modules==0.2.8 65 | - requests==2.24.0 66 | - requests-oauthlib==1.3.0 67 | - rsa==4.6 68 | - scipy==1.5.1 69 | - tensorboard==2.2.2 70 | - tensorboard-plugin-wit==1.7.0 71 | - tqdm==4.47.0 72 | - urllib3==1.25.9 73 | - werkzeug==1.0.1 74 | prefix: /home/lart/miniconda3/envs/hdfnet 75 | 76 | -------------------------------------------------------------------------------- /loss/HEL.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time : 2019/8/24 下午10:02 3 | # @Author : Lart Pang 4 | # @FileName: HEL.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import torch.nn.functional as F 8 | from torch import nn 9 | 10 | 11 | class HEL(nn.Module): 12 | def __init__(self): 13 | super(HEL, self).__init__() 14 | print("You are using `HEL`!") 15 | self.eps = 1e-6 16 | 17 | def edge_loss(self, pred, target): 18 | edge = target - F.avg_pool2d(target, kernel_size=5, stride=1, padding=2) 19 | edge[edge != 0] = 1 20 | # input, kernel_size, stride=None, padding=0 21 | numerator = (edge * (pred - target).abs_()).sum([2, 3]) 22 | denominator = edge.sum([2, 3]) + self.eps 23 | return numerator / denominator 24 | 25 | def region_loss(self, pred, target): 26 | # 该部分损失更强调前景区域内部或者背景区域内部的预测一致性 27 | numerator_fore = (target - target * pred).sum([2, 3]) 28 | denominator_fore = target.sum([2, 3]) + self.eps 29 | 30 | numerator_back = ((1 - target) * pred).sum([2, 3]) 31 | denominator_back = (1 - target).sum([2, 3]) + self.eps 32 | return numerator_fore / denominator_fore + numerator_back / denominator_back 33 | 34 | def forward(self, pred, target): 35 | edge_loss = self.edge_loss(pred, target) 36 | region_loss = self.region_loss(pred, target) 37 | return (edge_loss + region_loss).mean() 38 | -------------------------------------------------------------------------------- /loss/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lartpang/HDFNet/455ec8191043ee76e715eb3db6c70949cbeee80f/loss/__init__.py -------------------------------------------------------------------------------- /module/BaseBlocks.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : BaseBlocks.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import torch.nn as nn 8 | 9 | 10 | class BasicConv2d(nn.Module): 11 | def __init__( 12 | self, in_planes, out_planes, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=False, 13 | ): 14 | super(BasicConv2d, self).__init__() 15 | 16 | self.basicconv = nn.Sequential( 17 | nn.Conv2d( 18 | in_planes, 19 | out_planes, 20 | kernel_size=kernel_size, 21 | stride=stride, 22 | padding=padding, 23 | dilation=dilation, 24 | groups=groups, 25 | bias=bias, 26 | ), 27 | nn.BatchNorm2d(out_planes), 28 | nn.ReLU(inplace=True), 29 | ) 30 | 31 | def forward(self, x): 32 | return self.basicconv(x) 33 | -------------------------------------------------------------------------------- /module/MyModules.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : MyModules.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import torch 8 | from torch import nn 9 | 10 | from module.BaseBlocks import BasicConv2d 11 | 12 | 13 | class DenseLayer(nn.Module): 14 | def __init__(self, in_C, out_C, down_factor=4, k=4): 15 | """ 16 | 更像是DenseNet的Block,从而构造特征内的密集连接 17 | """ 18 | super(DenseLayer, self).__init__() 19 | self.k = k 20 | self.down_factor = down_factor 21 | mid_C = out_C // self.down_factor 22 | 23 | self.down = nn.Conv2d(in_C, mid_C, 1) 24 | 25 | self.denseblock = nn.ModuleList() 26 | for i in range(1, self.k + 1): 27 | self.denseblock.append(BasicConv2d(mid_C * i, mid_C, 3, 1, 1)) 28 | 29 | self.fuse = BasicConv2d(in_C + mid_C, out_C, kernel_size=3, stride=1, padding=1) 30 | 31 | def forward(self, in_feat): 32 | down_feats = self.down(in_feat) 33 | out_feats = [] 34 | for denseblock in self.denseblock: 35 | feats = denseblock(torch.cat((*out_feats, down_feats), dim=1)) 36 | out_feats.append(feats) 37 | feats = torch.cat((in_feat, feats), dim=1) 38 | return self.fuse(feats) 39 | 40 | 41 | class DenseTransLayer(nn.Module): 42 | def __init__(self, in_C, out_C): 43 | super(DenseTransLayer, self).__init__() 44 | down_factor = in_C // out_C 45 | self.fuse_down_mul = BasicConv2d(in_C, in_C, 3, 1, 1) 46 | self.res_main = DenseLayer(in_C, in_C, down_factor=down_factor) 47 | self.fuse_main = BasicConv2d(in_C, out_C, kernel_size=3, stride=1, padding=1) 48 | 49 | def forward(self, rgb, depth): 50 | assert rgb.size() == depth.size() 51 | feat = self.fuse_down_mul(rgb + depth) 52 | return self.fuse_main(self.res_main(feat) + feat) 53 | 54 | 55 | class DDPM(nn.Module): 56 | def __init__(self, in_xC, in_yC, out_C, kernel_size=3, down_factor=4): 57 | """DDPM,利用nn.Unfold实现的动态卷积模块 58 | 59 | Args: 60 | in_xC (int): 第一个输入的通道数 61 | in_yC (int): 第二个输入的通道数 62 | out_C (int): 最终输出的通道数 63 | kernel_size (int): 指定的生成的卷积核的大小 64 | down_factor (int): 用来降低卷积核生成过程中的参数量的一个降低通道数的参数 65 | """ 66 | super(DDPM, self).__init__() 67 | self.kernel_size = kernel_size 68 | self.mid_c = out_C // 4 69 | self.down_input = nn.Conv2d(in_xC, self.mid_c, 1) 70 | self.branch_1 = DepthDC3x3_1(self.mid_c, in_yC, self.mid_c, down_factor=down_factor) 71 | self.branch_3 = DepthDC3x3_3(self.mid_c, in_yC, self.mid_c, down_factor=down_factor) 72 | self.branch_5 = DepthDC3x3_5(self.mid_c, in_yC, self.mid_c, down_factor=down_factor) 73 | self.fuse = BasicConv2d(4 * self.mid_c, out_C, 3, 1, 1) 74 | 75 | def forward(self, x, y): 76 | x = self.down_input(x) 77 | result_1 = self.branch_1(x, y) 78 | result_3 = self.branch_3(x, y) 79 | result_5 = self.branch_5(x, y) 80 | return self.fuse(torch.cat((x, result_1, result_3, result_5), dim=1)) 81 | 82 | 83 | class DepthDC3x3_1(nn.Module): 84 | def __init__(self, in_xC, in_yC, out_C, down_factor=4): 85 | """DepthDC3x3_1,利用nn.Unfold实现的动态卷积模块 86 | 87 | Args: 88 | in_xC (int): 第一个输入的通道数 89 | in_yC (int): 第二个输入的通道数 90 | out_C (int): 最终输出的通道数 91 | down_factor (int): 用来降低卷积核生成过程中的参数量的一个降低通道数的参数 92 | """ 93 | super(DepthDC3x3_1, self).__init__() 94 | self.kernel_size = 3 95 | self.fuse = nn.Conv2d(in_xC, out_C, 3, 1, 1) 96 | self.gernerate_kernel = nn.Sequential( 97 | nn.Conv2d(in_yC, in_yC, 3, 1, 1), 98 | DenseLayer(in_yC, in_yC, k=down_factor), 99 | nn.Conv2d(in_yC, in_xC * self.kernel_size ** 2, 1), 100 | ) 101 | self.unfold = nn.Unfold(kernel_size=3, dilation=1, padding=1, stride=1) 102 | 103 | def forward(self, x, y): 104 | N, xC, xH, xW = x.size() 105 | kernel = self.gernerate_kernel(y).reshape([N, xC, self.kernel_size ** 2, xH, xW]) 106 | unfold_x = self.unfold(x).reshape([N, xC, -1, xH, xW]) 107 | result = (unfold_x * kernel).sum(2) 108 | return self.fuse(result) 109 | 110 | 111 | class DepthDC3x3_3(nn.Module): 112 | def __init__(self, in_xC, in_yC, out_C, down_factor=4): 113 | """DepthDC3x3_3,利用nn.Unfold实现的动态卷积模块 114 | 115 | Args: 116 | in_xC (int): 第一个输入的通道数 117 | in_yC (int): 第二个输入的通道数 118 | out_C (int): 最终输出的通道数 119 | down_factor (int): 用来降低卷积核生成过程中的参数量的一个降低通道数的参数 120 | """ 121 | super(DepthDC3x3_3, self).__init__() 122 | self.fuse = nn.Conv2d(in_xC, out_C, 3, 1, 1) 123 | self.kernel_size = 3 124 | self.gernerate_kernel = nn.Sequential( 125 | nn.Conv2d(in_yC, in_yC, 3, 1, 1), 126 | DenseLayer(in_yC, in_yC, k=down_factor), 127 | nn.Conv2d(in_yC, in_xC * self.kernel_size ** 2, 1), 128 | ) 129 | self.unfold = nn.Unfold(kernel_size=3, dilation=3, padding=3, stride=1) 130 | 131 | def forward(self, x, y): 132 | N, xC, xH, xW = x.size() 133 | kernel = self.gernerate_kernel(y).reshape([N, xC, self.kernel_size ** 2, xH, xW]) 134 | unfold_x = self.unfold(x).reshape([N, xC, -1, xH, xW]) 135 | result = (unfold_x * kernel).sum(2) 136 | return self.fuse(result) 137 | 138 | 139 | class DepthDC3x3_5(nn.Module): 140 | def __init__(self, in_xC, in_yC, out_C, down_factor=4): 141 | """DepthDC3x3_5,利用nn.Unfold实现的动态卷积模块 142 | 143 | Args: 144 | in_xC (int): 第一个输入的通道数 145 | in_yC (int): 第二个输入的通道数 146 | out_C (int): 最终输出的通道数 147 | down_factor (int): 用来降低卷积核生成过程中的参数量的一个降低通道数的参数 148 | """ 149 | super(DepthDC3x3_5, self).__init__() 150 | self.kernel_size = 3 151 | self.fuse = nn.Conv2d(in_xC, out_C, 3, 1, 1) 152 | self.gernerate_kernel = nn.Sequential( 153 | nn.Conv2d(in_yC, in_yC, 3, 1, 1), 154 | DenseLayer(in_yC, in_yC, k=down_factor), 155 | nn.Conv2d(in_yC, in_xC * self.kernel_size ** 2, 1), 156 | ) 157 | self.unfold = nn.Unfold(kernel_size=3, dilation=5, padding=5, stride=1) 158 | 159 | def forward(self, x, y): 160 | N, xC, xH, xW = x.size() 161 | kernel = self.gernerate_kernel(y).reshape([N, xC, self.kernel_size ** 2, xH, xW]) 162 | unfold_x = self.unfold(x).reshape([N, xC, -1, xH, xW]) 163 | result = (unfold_x * kernel).sum(2) 164 | return self.fuse(result) 165 | -------------------------------------------------------------------------------- /module/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lartpang/HDFNet/455ec8191043ee76e715eb3db6c70949cbeee80f/module/__init__.py -------------------------------------------------------------------------------- /network/HDFNet.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : HDFNet.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import torch 8 | import torch.nn as nn 9 | 10 | from module.BaseBlocks import BasicConv2d 11 | from utils.tensor_ops import cus_sample, upsample_add 12 | from backbone.ResNet import Backbone_ResNet50_in1, Backbone_ResNet50_in3 13 | from backbone.VGG import ( 14 | Backbone_VGG19_in1, 15 | Backbone_VGG19_in3, 16 | Backbone_VGG_in1, 17 | Backbone_VGG_in3, 18 | ) 19 | from module.MyModules import ( 20 | DDPM, 21 | DenseTransLayer, 22 | ) 23 | 24 | 25 | class HDFNet_VGG16(nn.Module): 26 | def __init__(self, pretrained=True): 27 | super(HDFNet_VGG16, self).__init__() 28 | self.upsample_add = upsample_add 29 | self.upsample = cus_sample 30 | 31 | self.encoder1, self.encoder2, self.encoder4, self.encoder8, self.encoder16 = Backbone_VGG_in3( 32 | pretrained=pretrained 33 | ) 34 | ( 35 | self.depth_encoder1, 36 | self.depth_encoder2, 37 | self.depth_encoder4, 38 | self.depth_encoder8, 39 | self.depth_encoder16, 40 | ) = Backbone_VGG_in1(pretrained=pretrained) 41 | 42 | self.trans16 = nn.Conv2d(512, 64, 1) 43 | self.trans8 = nn.Conv2d(512, 64, 1) 44 | self.trans4 = nn.Conv2d(256, 64, 1) 45 | self.trans2 = nn.Conv2d(128, 64, 1) 46 | self.trans1 = nn.Conv2d(64, 32, 1) 47 | 48 | self.depth_trans16 = DenseTransLayer(512, 64) 49 | self.depth_trans8 = DenseTransLayer(512, 64) 50 | self.depth_trans4 = DenseTransLayer(256, 64) 51 | 52 | self.upconv16 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 53 | self.upconv8 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 54 | self.upconv4 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 55 | self.upconv2 = BasicConv2d(64, 32, kernel_size=3, stride=1, padding=1) 56 | self.upconv1 = BasicConv2d(32, 32, kernel_size=3, stride=1, padding=1) 57 | 58 | self.selfdc_16 = DDPM(64, 64, 64, 3, 4) 59 | self.selfdc_8 = DDPM(64, 64, 64, 3, 4) 60 | self.selfdc_4 = DDPM(64, 64, 64, 3, 4) 61 | 62 | self.classifier = nn.Conv2d(32, 1, 1) 63 | 64 | def forward(self, in_data, in_depth): 65 | in_data_1 = self.encoder1(in_data) 66 | del in_data 67 | in_data_1_d = self.depth_encoder1(in_depth) 68 | del in_depth 69 | 70 | in_data_2 = self.encoder2(in_data_1) 71 | in_data_2_d = self.depth_encoder2(in_data_1_d) 72 | in_data_4 = self.encoder4(in_data_2) 73 | in_data_4_d = self.depth_encoder4(in_data_2_d) 74 | del in_data_2_d, in_data_1_d 75 | 76 | in_data_8 = self.encoder8(in_data_4) 77 | in_data_8_d = self.depth_encoder8(in_data_4_d) 78 | in_data_16 = self.encoder16(in_data_8) 79 | in_data_16_d = self.depth_encoder16(in_data_8_d) 80 | 81 | in_data_4_aux = self.depth_trans4(in_data_4, in_data_4_d) 82 | in_data_8_aux = self.depth_trans8(in_data_8, in_data_8_d) 83 | in_data_16_aux = self.depth_trans16(in_data_16, in_data_16_d) 84 | del in_data_4_d, in_data_8_d, in_data_16_d 85 | 86 | in_data_1 = self.trans1(in_data_1) 87 | in_data_2 = self.trans2(in_data_2) 88 | in_data_4 = self.trans4(in_data_4) 89 | in_data_8 = self.trans8(in_data_8) 90 | in_data_16 = self.trans16(in_data_16) 91 | 92 | out_data_16 = in_data_16 93 | out_data_16 = self.upconv16(out_data_16) # 1024 94 | out_data_8 = self.upsample_add(self.selfdc_16(out_data_16, in_data_16_aux), in_data_8) 95 | del out_data_16, in_data_16_aux, in_data_8 96 | 97 | out_data_8 = self.upconv8(out_data_8) # 512 98 | out_data_4 = self.upsample_add(self.selfdc_8(out_data_8, in_data_8_aux), in_data_4) 99 | del out_data_8, in_data_8_aux, in_data_4 100 | 101 | out_data_4 = self.upconv4(out_data_4) # 256 102 | out_data_2 = self.upsample_add(self.selfdc_4(out_data_4, in_data_4_aux), in_data_2) 103 | del out_data_4, in_data_4_aux, in_data_2 104 | 105 | out_data_2 = self.upconv2(out_data_2) # 64 106 | out_data_1 = self.upsample_add(out_data_2, in_data_1) 107 | del out_data_2, in_data_1 108 | 109 | out_data_1 = self.upconv1(out_data_1) # 32 110 | 111 | out_data = self.classifier(out_data_1) 112 | 113 | return out_data.sigmoid() 114 | 115 | 116 | class HDFNet_VGG19(nn.Module): 117 | def __init__(self, pretrained=True): 118 | super(HDFNet_VGG19, self).__init__() 119 | self.upsample_add = upsample_add 120 | self.upsample = cus_sample 121 | 122 | self.encoder1, self.encoder2, self.encoder4, self.encoder8, self.encoder16 = Backbone_VGG19_in3( 123 | pretrained=pretrained 124 | ) 125 | ( 126 | self.depth_encoder1, 127 | self.depth_encoder2, 128 | self.depth_encoder4, 129 | self.depth_encoder8, 130 | self.depth_encoder16, 131 | ) = Backbone_VGG19_in1(pretrained=pretrained) 132 | 133 | self.trans16 = nn.Conv2d(512, 64, 1) 134 | self.trans8 = nn.Conv2d(512, 64, 1) 135 | self.trans4 = nn.Conv2d(256, 64, 1) 136 | self.trans2 = nn.Conv2d(128, 64, 1) 137 | self.trans1 = nn.Conv2d(64, 32, 1) 138 | 139 | self.depth_trans16 = DenseTransLayer(512, 64) 140 | self.depth_trans8 = DenseTransLayer(512, 64) 141 | self.depth_trans4 = DenseTransLayer(256, 64) 142 | 143 | self.upconv16 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 144 | self.upconv8 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 145 | self.upconv4 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 146 | self.upconv2 = BasicConv2d(64, 32, kernel_size=3, stride=1, padding=1) 147 | self.upconv1 = BasicConv2d(32, 32, kernel_size=3, stride=1, padding=1) 148 | 149 | self.selfdc_16 = DDPM(64, 64, 64, 3, 4) 150 | self.selfdc_8 = DDPM(64, 64, 64, 3, 4) 151 | self.selfdc_4 = DDPM(64, 64, 64, 3, 4) 152 | 153 | self.classifier = nn.Conv2d(32, 1, 1) 154 | 155 | def forward(self, in_data, in_depth): 156 | in_data_1 = self.encoder1(in_data) 157 | del in_data 158 | in_data_1_d = self.depth_encoder1(in_depth) 159 | del in_depth 160 | 161 | in_data_2 = self.encoder2(in_data_1) 162 | in_data_2_d = self.depth_encoder2(in_data_1_d) 163 | in_data_4 = self.encoder4(in_data_2) 164 | in_data_4_d = self.depth_encoder4(in_data_2_d) 165 | del in_data_2_d, in_data_1_d 166 | 167 | in_data_8 = self.encoder8(in_data_4) 168 | in_data_8_d = self.depth_encoder8(in_data_4_d) 169 | in_data_16 = self.encoder16(in_data_8) 170 | in_data_16_d = self.depth_encoder16(in_data_8_d) 171 | 172 | in_data_4_aux = self.depth_trans4(in_data_4, in_data_4_d) 173 | in_data_8_aux = self.depth_trans8(in_data_8, in_data_8_d) 174 | in_data_16_aux = self.depth_trans16(in_data_16, in_data_16_d) 175 | del in_data_4_d, in_data_8_d, in_data_16_d 176 | 177 | in_data_1 = self.trans1(in_data_1) 178 | in_data_2 = self.trans2(in_data_2) 179 | in_data_4 = self.trans4(in_data_4) 180 | in_data_8 = self.trans8(in_data_8) 181 | in_data_16 = self.trans16(in_data_16) 182 | 183 | out_data_16 = in_data_16 184 | out_data_16 = self.upconv16(out_data_16) # 1024 185 | out_data_8 = self.upsample_add(self.selfdc_16(out_data_16, in_data_16_aux), in_data_8) 186 | del out_data_16, in_data_16_aux, in_data_8 187 | 188 | out_data_8 = self.upconv8(out_data_8) # 512 189 | out_data_4 = self.upsample_add(self.selfdc_8(out_data_8, in_data_8_aux), in_data_4) 190 | del out_data_8, in_data_8_aux, in_data_4 191 | 192 | out_data_4 = self.upconv4(out_data_4) # 256 193 | out_data_2 = self.upsample_add(self.selfdc_4(out_data_4, in_data_4_aux), in_data_2) 194 | del out_data_4, in_data_4_aux, in_data_2 195 | 196 | out_data_2 = self.upconv2(out_data_2) # 64 197 | out_data_1 = self.upsample_add(out_data_2, in_data_1) 198 | del out_data_2, in_data_1 199 | 200 | out_data_1 = self.upconv1(out_data_1) # 32 201 | 202 | out_data = self.classifier(out_data_1) 203 | 204 | return out_data.sigmoid() 205 | 206 | 207 | class HDFNet_Res50(nn.Module): 208 | def __init__(self, pretrained=True): 209 | super(HDFNet_Res50, self).__init__() 210 | self.upsample_add = upsample_add 211 | self.upsample = cus_sample 212 | 213 | self.encoder2, self.encoder4, self.encoder8, self.encoder16, self.encoder32 = Backbone_ResNet50_in3( 214 | pretrained=pretrained 215 | ) 216 | ( 217 | self.depth_encoder2, 218 | self.depth_encoder4, 219 | self.depth_encoder8, 220 | self.depth_encoder16, 221 | self.depth_encoder32, 222 | ) = Backbone_ResNet50_in1(pretrained=pretrained) 223 | 224 | self.trans32 = nn.Conv2d(2048, 64, kernel_size=1) 225 | self.trans16 = nn.Conv2d(1024, 64, kernel_size=1) 226 | self.trans8 = nn.Conv2d(512, 64, kernel_size=1) 227 | self.trans4 = nn.Conv2d(256, 64, kernel_size=1) 228 | self.trans2 = nn.Conv2d(64, 64, kernel_size=1) 229 | 230 | self.depth_trans32 = DenseTransLayer(2048, 64) 231 | self.depth_trans16 = DenseTransLayer(1024, 64) 232 | self.depth_trans8 = DenseTransLayer(512, 64) 233 | 234 | self.upconv32 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 235 | self.upconv16 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 236 | self.upconv8 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 237 | self.upconv4 = BasicConv2d(64, 64, kernel_size=3, stride=1, padding=1) 238 | self.upconv2 = BasicConv2d(64, 32, kernel_size=3, stride=1, padding=1) 239 | self.upconv1 = BasicConv2d(32, 32, kernel_size=3, stride=1, padding=1) 240 | 241 | self.selfdc_32 = DDPM(64, 64, 64, 3, 4) 242 | self.selfdc_16 = DDPM(64, 64, 64, 3, 4) 243 | self.selfdc_8 = DDPM(64, 64, 64, 3, 4) 244 | 245 | self.classifier = nn.Conv2d(32, 1, 1) 246 | 247 | def forward(self, in_data, in_depth): 248 | in_data_2 = self.encoder2(in_data) 249 | del in_data 250 | in_data_2_d = self.depth_encoder2(in_depth) 251 | del in_depth 252 | in_data_4 = self.encoder4(in_data_2) 253 | in_data_4_d = self.depth_encoder4(in_data_2_d) 254 | del in_data_2_d 255 | in_data_8 = self.encoder8(in_data_4) 256 | in_data_8_d = self.depth_encoder8(in_data_4_d) 257 | del in_data_4_d 258 | in_data_16 = self.encoder16(in_data_8) 259 | in_data_16_d = self.depth_encoder16(in_data_8_d) 260 | in_data_32 = self.encoder32(in_data_16) 261 | in_data_32_d = self.depth_encoder32(in_data_16_d) 262 | 263 | in_data_8_aux = self.depth_trans8(in_data_8, in_data_8_d) 264 | del in_data_8_d 265 | in_data_16_aux = self.depth_trans16(in_data_16, in_data_16_d) 266 | del in_data_16_d 267 | in_data_32_aux = self.depth_trans32(in_data_32, in_data_32_d) 268 | del in_data_32_d 269 | 270 | in_data_2 = self.trans2(in_data_2) 271 | in_data_4 = self.trans4(in_data_4) 272 | in_data_8 = self.trans8(in_data_8) 273 | in_data_16 = self.trans16(in_data_16) 274 | in_data_32 = self.trans32(in_data_32) 275 | 276 | out_data_32 = self.upconv32(in_data_32) # 1024 277 | del in_data_32 278 | out_data_16 = self.upsample_add(self.selfdc_32(out_data_32, in_data_32_aux), in_data_16) 279 | del out_data_32, in_data_32_aux, in_data_16 280 | out_data_16 = self.upconv16(out_data_16) # 1024 281 | out_data_8 = self.upsample_add(self.selfdc_16(out_data_16, in_data_16_aux), in_data_8) 282 | del out_data_16, in_data_16_aux, in_data_8 283 | out_data_8 = self.upconv8(out_data_8) # 512 284 | out_data_4 = self.upsample_add(self.selfdc_8(out_data_8, in_data_8_aux), in_data_4) 285 | del out_data_8, in_data_8_aux, in_data_4 286 | out_data_4 = self.upconv4(out_data_4) # 256 287 | out_data_2 = self.upsample_add(out_data_4, in_data_2) 288 | del out_data_4, in_data_2 289 | out_data_2 = self.upconv2(out_data_2) # 64 290 | out_data_1 = self.upconv1(self.upsample(out_data_2, scale_factor=2)) # 32 291 | del out_data_2 292 | out_data = self.classifier(out_data_1) 293 | del out_data_1 294 | return out_data.sigmoid() 295 | 296 | 297 | if __name__ == "__main__": 298 | model_path = "../../HDFFile/output/HDFNet_Ablation/Model12/TestDCV3_SimpleCombineV1_ND_NL2/pth/state_final.pth" 299 | model = HDFNet_VGG16() 300 | model.load_state_dict(torch.load(model_path)) 301 | -------------------------------------------------------------------------------- /network/__init__.py: -------------------------------------------------------------------------------- 1 | from network.HDFNet import * 2 | -------------------------------------------------------------------------------- /output/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lartpang/HDFNet/455ec8191043ee76e715eb3db6c70949cbeee80f/output/.gitkeep -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # https://github.com/LongTengDao/TOML/ 2 | 3 | [tool.black] 4 | line-length = 119 5 | --verbose = true 6 | exclude = ''' 7 | /(\.eggs|\.git|\.hg|\.mypy|_cache|\.nox|\.tox|\.venv|\.svn|\.idea|\.vscode|output|_build|buck-out|build|dist)/ 8 | ''' 9 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time : 2020/7/5 3 | # @Author : Lart Pang 4 | # @Email : lartpang@163.com 5 | # @File : test.py 6 | # @Project : HDFNet 7 | # @GitHub : https://github.com/lartpang 8 | import argparse 9 | import os 10 | import os.path as osp 11 | from datetime import datetime 12 | from distutils.util import strtobool 13 | 14 | import numpy as np 15 | import torch 16 | from PIL import Image 17 | from torchvision import transforms 18 | from tqdm import tqdm 19 | 20 | import network 21 | from utils.metric import CalTotalMetric 22 | from utils.misc import check_dir_path_valid 23 | 24 | my_parser = argparse.ArgumentParser( 25 | prog="main script", 26 | description="The code is created by lartpang (Youwei Pang).", 27 | epilog="Enjoy the program! :)", 28 | allow_abbrev=False, 29 | ) 30 | my_parser.add_argument("--param_path", required=True, type=str, help="自定义参数文件路径") 31 | my_parser.add_argument("--model", required=True, type=str, help="选择使用的模型的名字,请把对应的模型类导入到network文件夹中的`__init__.py`文件中") 32 | my_parser.add_argument("--testset", required=True, type=str, help="测试集路径,该路径下至少包含两个文件夹: Image, Depth") 33 | # https://stackoverflow.com/a/46951029 34 | my_parser.add_argument( 35 | "--has_masks", 36 | default=False, 37 | type=lambda x: bool(strtobool(str(x))), 38 | help="是否存在对应的Mask数据,即`--testset`指定的路径下是否包含存放有mask文件的Mask文件夹", 39 | ) 40 | my_parser.add_argument("--save_pre", default=False, type=lambda x: bool(strtobool(str(x))), help="是否保存测试生成的结果") 41 | my_parser.add_argument("--save_path", default="", type=str, help="保存测试结果的路径") 42 | my_parser.add_argument( 43 | "--data_mode", default="RGBD", choices=["RGB", "RGBD"], type=str, help="测试的是RGB数据还是RGBD数据,注意请选择使用对应任务的模型" 44 | ) 45 | my_parser.add_argument("--use_gpu", default=True, type=lambda x: bool(strtobool(str(x))), help="测试是否使用GPU") 46 | my_args = my_parser.parse_args() 47 | 48 | 49 | class Tester: 50 | def __init__(self, args): 51 | if args.use_gpu and torch.cuda.is_available(): 52 | self.dev = torch.device("cuda:0") 53 | else: 54 | self.dev = torch.device("cpu") 55 | 56 | self.to_pil = transforms.ToPILImage() 57 | self.data_mode = args.data_mode 58 | self.model_name = args.model 59 | 60 | self.te_data_path = args.testset 61 | self.image_dir = os.path.join(self.te_data_path, "Image") 62 | if self.data_mode == "RGBD": 63 | self.depth_dir = os.path.join(self.te_data_path, "Depth") 64 | else: 65 | self.depth_dir = "" 66 | 67 | self.has_masks = args.has_masks 68 | if self.has_masks: 69 | self.mask_dir = os.path.join(self.te_data_path, "Mask") 70 | else: 71 | self.mask_dir = "" 72 | check_dir_path_valid([self.te_data_path, self.image_dir, self.mask_dir]) 73 | 74 | self.save_pre = args.save_pre 75 | if self.save_pre: 76 | self.save_path = args.save_path 77 | if not os.path.exists(self.save_path): 78 | print(f" ==>> {self.save_path} 不存在, 这里创建一个 <<==") 79 | os.makedirs(self.save_path) 80 | 81 | self.net = getattr(network, self.model_name)(pretrained=False).to(self.dev) 82 | self.resume_checkpoint(load_path=args.param_path) 83 | self.net.eval() 84 | 85 | self.rgb_transform = transforms.Compose( 86 | [ 87 | transforms.Resize((320, 320), interpolation=Image.BILINEAR), 88 | transforms.ToTensor(), 89 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 90 | ] 91 | ) 92 | if self.data_mode == "RGBD": 93 | self.depth_transform = transforms.Compose( 94 | [transforms.Resize((320, 320), interpolation=Image.BILINEAR), transforms.ToTensor()] 95 | ) 96 | 97 | def test(self): 98 | rgb_name_list = os.listdir(self.image_dir) 99 | 100 | cal_total_metrics = CalTotalMetric(num=len(rgb_name_list), beta_for_wfm=1) 101 | 102 | tqdm_iter = tqdm(enumerate(rgb_name_list), total=len(rgb_name_list), leave=False) 103 | for idx, rgb_name in tqdm_iter: 104 | tqdm_iter.set_description(f"{self.model_name}:" f"te=>{idx + 1}") 105 | 106 | depth_mask_name = rgb_name[:-4] + ".png" 107 | 108 | rgb_path = os.path.join(self.image_dir, rgb_name) 109 | rgb_pil = Image.open(rgb_path).convert("RGB") 110 | 111 | original_size = rgb_pil.size 112 | 113 | rgb_tensor = self.rgb_transform(rgb_pil).unsqueeze(0) 114 | rgb_tensor = rgb_tensor.to(self.dev, non_blocking=True) 115 | 116 | if self.data_mode == "RGBD": 117 | depth_path = os.path.join(self.depth_dir, depth_mask_name) 118 | depth_pil = Image.open(depth_path).convert("L") 119 | 120 | depth_tensor = self.depth_transform(depth_pil).unsqueeze(0) 121 | depth_tensor = depth_tensor.to(self.dev, non_blocking=True) 122 | 123 | with torch.no_grad(): 124 | pred_tensor = self.net(rgb_tensor, depth_tensor) 125 | else: 126 | with torch.no_grad(): 127 | pred_tensor = self.net(rgb_tensor) 128 | 129 | pred_tensor = pred_tensor.squeeze(0).cpu().detach() 130 | 131 | pred_pil = self.to_pil(pred_tensor).resize(original_size, resample=Image.NEAREST) 132 | if self.save_pre: 133 | pred_pil.save(osp.join(self.save_path, depth_mask_name)) 134 | 135 | if self.has_masks: 136 | pred_array = np.asarray(pred_pil) 137 | max_pred_array = pred_array.max() 138 | min_pred_array = pred_array.min() 139 | if max_pred_array == min_pred_array: 140 | pred_array = pred_array / 255 141 | else: 142 | pred_array = (pred_array - min_pred_array) / (max_pred_array - min_pred_array) 143 | 144 | mask_path = os.path.join(self.mask_dir, depth_mask_name) 145 | mask_pil = Image.open(mask_path).convert("L") 146 | mask_array = np.asarray(mask_pil) 147 | mask_array = mask_array / (mask_array.max() + 1e-8) 148 | mask_array = np.where(mask_array > 0.5, 1, 0) 149 | 150 | cal_total_metrics.update(pred_array, mask_array) 151 | 152 | if self.has_masks: 153 | results = cal_total_metrics.show() 154 | fixed_pre_results = {k: f"{v:.3f}" for k, v in results.items()} 155 | print(f" ==>> 在{self.te_data_path}上的测试结果\n >> {fixed_pre_results}") 156 | 157 | def resume_checkpoint(self, load_path): 158 | """ 159 | 从保存节点恢复模型 160 | 161 | Args: 162 | load_path (str): 模型存放路径 163 | """ 164 | if os.path.exists(load_path) and os.path.isfile(load_path): 165 | print(f" =>> loading checkpoint '{load_path}' <<== ") 166 | checkpoint = torch.load(load_path, map_location=self.dev) 167 | self.net.load_state_dict(checkpoint) 168 | print(f" ==> loaded checkpoint '{load_path}' " f"(only has the net's weight params) <<== ") 169 | else: 170 | raise Exception(f"{load_path}路径不正常,请检查") 171 | 172 | 173 | if __name__ == "__main__": 174 | # 保存备份数据 ########################################################### 175 | print(f" ===========>> {datetime.now()}: 初始化开始 <<=========== ") 176 | init_start = datetime.now() 177 | tester = Tester(args=my_args) 178 | print(f" ==>> 初始化完毕,用时:{datetime.now() - init_start} <<== ") 179 | 180 | # 训练模型 ############################################################### 181 | print(f" ===========>> {datetime.now()}: 开始测试 <<=========== ") 182 | tester.test() 183 | print(f" ===========>> {datetime.now()}: 结束测试 <<=========== ") 184 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | python test.py --param_path /home/lart/Coding/HDFFile/output/HDFNet/PretrainedParams/HDFNet_VGG16/HDFNet_VGG16_7Datasets.pth \ 4 | --model HDFNet_VGG16 \ 5 | --testset /home/lart/Datasets/Saliency/RGBDSOD/LFSD/ \ 6 | --has_masks True \ 7 | --save_pre True \ 8 | --save_path output/HDFNet/pre/test \ 9 | --data_mode RGBD \ 10 | --use_gpu True 11 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time : 2020/7/5 3 | # @Author : Lart Pang 4 | # @Email : lartpang@163.com 5 | # @File : main.py 6 | # @Project : HDFNet 7 | # @GitHub : https://github.com/lartpang 8 | import os 9 | import os.path as osp 10 | import shutil 11 | from datetime import datetime 12 | from pprint import pprint 13 | 14 | import numpy as np 15 | import torch 16 | import torch.backends.cudnn as torchcudnn 17 | from PIL import Image 18 | from torch.nn import BCELoss 19 | from torch.optim import SGD 20 | from torchvision import transforms 21 | from tqdm import tqdm 22 | import network 23 | from loss.HEL import HEL 24 | from config import arg_config, proj_root 25 | from utils.data.create_loader_imgs import create_loader 26 | from utils.misc import AvgMeter, construct_path_dict, make_log, pre_mkdir 27 | from utils.metric import CalTotalMetric 28 | 29 | torch.manual_seed(0) 30 | torch.cuda.manual_seed_all(0) 31 | torchcudnn.benchmark = True 32 | torchcudnn.enabled = True 33 | torchcudnn.deterministic = True 34 | 35 | 36 | class Trainer: 37 | def __init__(self, args): 38 | super(Trainer, self).__init__() 39 | self.args = args 40 | self.dev = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") 41 | self.to_pil = transforms.ToPILImage() 42 | pprint(self.args) 43 | 44 | self.data_mode = self.args["data_mode"] 45 | if self.args["suffix"]: 46 | self.model_name = self.args["model"] + "_" + self.args["suffix"] 47 | else: 48 | self.model_name = self.args["model"] 49 | self.path = construct_path_dict(proj_root=proj_root, exp_name=self.model_name) 50 | 51 | pre_mkdir(path_config=self.path) 52 | shutil.copy(f"{proj_root}/config.py", self.path["cfg_log"]) 53 | shutil.copy(f"{proj_root}/train.py", self.path["trainer_log"]) 54 | 55 | if self.data_mode == "RGBD": 56 | self.tr_data_path = self.args["rgbd_data"]["tr_data_path"] 57 | self.te_data_list = self.args["rgbd_data"]["te_data_list"] 58 | elif self.data_mode == "RGB": 59 | self.tr_data_path = self.args["rgb_data"]["tr_data_path"] 60 | self.te_data_list = self.args["rgb_data"]["te_data_list"] 61 | else: 62 | raise NotImplementedError 63 | 64 | self.save_path = self.path["save"] 65 | self.save_pre = self.args["save_pre"] 66 | 67 | self.tr_loader = create_loader( 68 | data_path=self.tr_data_path, mode="train", get_length=False, data_mode=self.data_mode, 69 | ) 70 | 71 | self.net = getattr(network, self.args["model"])(pretrained=True).to(self.dev) 72 | 73 | # 损失函数 74 | self.loss_funcs = [BCELoss(reduction=self.args["reduction"]).to(self.dev)] 75 | if self.args["use_aux_loss"]: 76 | self.loss_funcs.append(HEL().to(self.dev)) 77 | 78 | # 设置优化器 79 | self.opti = self.make_optim() 80 | 81 | # 训练相关 82 | self.end_epoch = self.args["epoch_num"] 83 | if self.args["resume"]: 84 | try: 85 | self.resume_checkpoint(load_path=self.path["final_full_net"], mode="all") 86 | except: 87 | print(f"{self.path['final_full_net']} does not exist and we will load {self.path['final_state_net']}") 88 | self.resume_checkpoint(load_path=self.path["final_state_net"], mode="onlynet") 89 | self.start_epoch = self.end_epoch 90 | else: 91 | self.start_epoch = 0 92 | self.iter_num = self.end_epoch * len(self.tr_loader) 93 | 94 | def total_loss(self, train_preds, train_alphas): 95 | loss_list = [] 96 | loss_item_list = [] 97 | 98 | assert len(self.loss_funcs) != 0, "请指定损失函数`self.loss_funcs`" 99 | for loss in self.loss_funcs: 100 | loss_out = loss(train_preds, train_alphas) 101 | loss_list.append(loss_out) 102 | loss_item_list.append(f"{loss_out.item():.5f}") 103 | 104 | train_loss = sum(loss_list) 105 | return train_loss, loss_item_list 106 | 107 | def train(self): 108 | for curr_epoch in range(self.start_epoch, self.end_epoch): 109 | train_loss_record = AvgMeter() 110 | 111 | if self.args["lr_type"] == "poly": 112 | self.change_lr(curr_epoch) 113 | else: 114 | raise NotImplementedError 115 | 116 | for train_batch_id, train_data in enumerate(self.tr_loader): 117 | curr_iter = curr_epoch * len(self.tr_loader) + train_batch_id 118 | 119 | self.opti.zero_grad() 120 | train_inputs, train_masks, *train_other_data = train_data 121 | train_inputs = train_inputs.to(self.dev, non_blocking=True) 122 | train_masks = train_masks.to(self.dev, non_blocking=True) 123 | if self.data_mode == "RGBD": 124 | # train_other_data是一个list 125 | train_depths = train_other_data[-1] 126 | train_depths = train_depths.to(self.dev, non_blocking=True) 127 | train_preds = self.net(train_inputs, train_depths) 128 | elif self.data_mode == "RGB": 129 | train_preds = self.net(train_inputs) 130 | else: 131 | raise NotImplementedError 132 | 133 | train_loss, loss_item_list = self.total_loss(train_preds, train_masks) 134 | train_loss.backward() 135 | self.opti.step() 136 | 137 | # 仅在累计的时候使用item()获取数据 138 | train_iter_loss = train_loss.item() 139 | train_batch_size = train_inputs.size(0) 140 | train_loss_record.update(train_iter_loss, train_batch_size) 141 | 142 | # 记录每一次迭代的数据 143 | if self.args["print_freq"] > 0 and (curr_iter + 1) % self.args["print_freq"] == 0: 144 | log = ( 145 | f"[I:{curr_iter}/{self.iter_num}][E:{curr_epoch}:{self.end_epoch}]>" 146 | f"[{self.model_name}]" 147 | f"[Lr:{self.opti.param_groups[0]['lr']:.7f}]" 148 | f"[Avg:{train_loss_record.avg:.5f}|Cur:{train_iter_loss:.5f}|" 149 | f"{loss_item_list}]" 150 | ) 151 | print(log) 152 | make_log(self.path["tr_log"], log) 153 | 154 | # 每个周期都进行保存测试,保存的是针对第curr_epoch+1周期的参数 155 | self.save_checkpoint( 156 | curr_epoch + 1, full_net_path=self.path["final_full_net"], state_net_path=self.path["final_state_net"], 157 | ) 158 | 159 | # 进行最终的测试,首先输出验证结果 160 | print(f" ==>> 训练结束 <<== ") 161 | 162 | for data_name, data_path in self.te_data_list.items(): 163 | print(f" ==>> 使用测试集{data_name}测试 <<== ") 164 | self.te_loader, self.te_length = create_loader( 165 | data_path=data_path, mode="test", get_length=True, data_mode=self.data_mode, 166 | ) 167 | self.save_path = os.path.join(self.path["save"], data_name) 168 | if not os.path.exists(self.save_path): 169 | print(f" ==>> {self.save_path} 不存在, 这里创建一个 <<==") 170 | os.makedirs(self.save_path) 171 | results = self.test(save_pre=self.save_pre) 172 | fixed_pre_results = {k: f"{v:.3f}" for k, v in results.items()} 173 | msg = f" ==>> 在{data_name}:'{data_path}'测试集上结果\n >> {fixed_pre_results}" 174 | print(msg) 175 | make_log(self.path["te_log"], msg) 176 | 177 | def test(self, save_pre): 178 | self.net.eval() 179 | 180 | cal_total_metrics = CalTotalMetric(num=self.te_length, beta_for_wfm=1) 181 | 182 | tqdm_iter = tqdm(enumerate(self.te_loader), total=len(self.te_loader), leave=False) 183 | for test_batch_id, test_data in tqdm_iter: 184 | tqdm_iter.set_description(f"{self.model_name}:" f"te=>{test_batch_id + 1}") 185 | with torch.no_grad(): 186 | in_imgs, in_names, in_mask_paths, *in_depths = test_data 187 | in_imgs = in_imgs.to(self.dev, non_blocking=True) 188 | if self.data_mode == "RGBD": 189 | in_depths = in_depths[0] 190 | in_depths = in_depths.to(self.dev, non_blocking=True) 191 | outputs = self.net(in_imgs, in_depths) 192 | elif self.data_mode == "RGB": 193 | outputs = self.net(in_imgs) 194 | else: 195 | raise NotImplementedError 196 | 197 | pred_array_tensor = outputs.cpu().detach() 198 | 199 | for item_id, pred_tensor in enumerate(pred_array_tensor): 200 | mask_path = osp.join(in_mask_paths[item_id]) 201 | mask_pil = Image.open(mask_path).convert("L") 202 | original_size = mask_pil.size 203 | mask_array = np.asarray(mask_pil) 204 | mask_array = mask_array / (mask_array.max() + 1e-8) 205 | mask_array = np.where(mask_array > 0.5, 1, 0) 206 | 207 | pred_pil = self.to_pil(pred_tensor).resize(original_size, resample=Image.NEAREST) 208 | if save_pre: 209 | pred_path = osp.join(self.save_path, in_names[item_id] + ".png") 210 | pred_pil.save(pred_path) 211 | 212 | pred_array = np.asarray(pred_pil) 213 | max_pred_array = pred_array.max() 214 | min_pred_array = pred_array.min() 215 | if max_pred_array == min_pred_array: 216 | pred_array = pred_array / 255 217 | else: 218 | pred_array = (pred_array - min_pred_array) / (max_pred_array - min_pred_array) 219 | 220 | cal_total_metrics.update(pred_array, mask_array) 221 | 222 | results = cal_total_metrics.show() 223 | return results 224 | 225 | def change_lr(self, curr): 226 | total_num = self.end_epoch 227 | if self.args["lr_type"] == "poly": 228 | ratio = pow((1 - float(curr) / total_num), self.args["lr_decay"]) 229 | self.opti.param_groups[0]["lr"] = self.opti.param_groups[0]["lr"] * ratio 230 | self.opti.param_groups[1]["lr"] = self.opti.param_groups[0]["lr"] 231 | else: 232 | raise NotImplementedError 233 | 234 | def make_optim(self): 235 | if self.args["optim"] == "sgd_trick": 236 | # https://github.com/implus/PytorchInsight/blob/master/classification/imagenet_tricks.py 237 | params = [ 238 | { 239 | "params": [p for name, p in self.net.named_parameters() if ("bias" in name or "bn" in name)], 240 | "weight_decay": 0, 241 | }, 242 | { 243 | "params": [ 244 | p for name, p in self.net.named_parameters() if ("bias" not in name and "bn" not in name) 245 | ] 246 | }, 247 | ] 248 | optimizer = SGD( 249 | params, 250 | lr=self.args["lr"], 251 | momentum=self.args["momentum"], 252 | weight_decay=self.args["weight_decay"], 253 | nesterov=self.args["nesterov"], 254 | ) 255 | elif self.args["optim"] == "f3_trick": 256 | backbone, head = [], [] 257 | for name, params_tensor in self.net.named_parameters(): 258 | if "encoder" in name: 259 | backbone.append(params_tensor) 260 | else: 261 | head.append(params_tensor) 262 | params = [ 263 | {"params": backbone, "lr": 0.1 * self.args["lr"]}, 264 | {"params": head, "lr": self.args["lr"]}, 265 | ] 266 | optimizer = SGD( 267 | params=params, 268 | momentum=self.args["momentum"], 269 | weight_decay=self.args["weight_decay"], 270 | nesterov=self.args["nesterov"], 271 | ) 272 | else: 273 | raise NotImplementedError 274 | print("optimizer = ", optimizer) 275 | return optimizer 276 | 277 | def save_checkpoint(self, current_epoch, full_net_path, state_net_path): 278 | """ 279 | 保存完整参数模型(大)和状态参数模型(小) 280 | 281 | Args: 282 | current_epoch (int): 当前周期 283 | full_net_path (str): 保存完整参数模型的路径 284 | state_net_path (str): 保存模型权重参数的路径 285 | """ 286 | state_dict = { 287 | "epoch": current_epoch, 288 | "net_state": self.net.state_dict(), 289 | "opti_state": self.opti.state_dict(), 290 | } 291 | torch.save(state_dict, full_net_path) 292 | torch.save(self.net.state_dict(), state_net_path) 293 | 294 | def resume_checkpoint(self, load_path, mode="all"): 295 | """ 296 | 从保存节点恢复模型 297 | 298 | Args: 299 | load_path (str): 模型存放路径 300 | mode (str): 选择哪种模型恢复模式:'all':回复完整模型,包括训练中的的参数;'onlynet':仅恢复模型权重参数 301 | """ 302 | if os.path.exists(load_path) and os.path.isfile(load_path): 303 | print(f" =>> loading checkpoint '{load_path}' <<== ") 304 | checkpoint = torch.load(load_path, map_location=self.dev) 305 | if mode == "all": 306 | self.start_epoch = checkpoint["epoch"] 307 | self.net.load_state_dict(checkpoint["net_state"]) 308 | self.opti.load_state_dict(checkpoint["opti_state"]) 309 | print(f" ==> loaded checkpoint '{load_path}' (epoch {checkpoint['epoch']})") 310 | elif mode == "onlynet": 311 | self.net.load_state_dict(checkpoint) 312 | print(f" ==> loaded checkpoint '{load_path}' " f"(only has the net's weight params) <<== ") 313 | else: 314 | raise NotImplementedError 315 | else: 316 | raise Exception(f"{load_path}路径不正常,请检查") 317 | 318 | 319 | if __name__ == "__main__": 320 | trainer = Trainer(arg_config) 321 | print(f" ===========>> {datetime.now()}: 开始训练 <<=========== ") 322 | trainer.train() 323 | print(f" ===========>> {datetime.now()}: 结束训练 <<=========== ") 324 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lartpang/HDFNet/455ec8191043ee76e715eb3db6c70949cbeee80f/utils/__init__.py -------------------------------------------------------------------------------- /utils/cal_fps.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time : 2019/7/18 上午9:54 3 | # @Author : Lart Pang 4 | # @FileName: cal_fps.py 5 | # @Project : Paper_Code 6 | # @GitHub : https://github.com/lartpang 7 | 8 | import os 9 | import time 10 | 11 | import torch 12 | from PIL import Image 13 | from torchvision import transforms 14 | from tqdm import tqdm 15 | 16 | from config import dutrgbdte_path 17 | from network import HDFNet_VGG16 18 | 19 | 20 | def check_mkdir(dir_name): 21 | if not os.path.exists(dir_name): 22 | os.makedirs(dir_name) 23 | 24 | 25 | class GPUFPSer: 26 | def __init__(self, proj_name, args, pth_path): 27 | super(GPUFPSer, self).__init__() 28 | self.args = args 29 | self.to_pil = transforms.ToPILImage() 30 | self.proj_name = proj_name 31 | self.dev = torch.device("cuda:0") 32 | self.net = self.args[proj_name]["net"](pretrained=False).to(self.dev) 33 | self.net.eval() 34 | 35 | self.test_image_transform = transforms.Compose( 36 | [ 37 | # 输入的如果是一个tuple,则按照数据缩放,但是如果是一个数字,则按比例缩放到短边等于该值 38 | transforms.Resize((self.args["new_size"], self.args["new_size"])), 39 | transforms.ToTensor(), 40 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 41 | ] 42 | ) 43 | self.test_depth_transform = transforms.Compose( 44 | [transforms.Resize((self.args["new_size"], self.args["new_size"])), transforms.ToTensor(),] 45 | ) 46 | 47 | if pth_path != None: 48 | print(f"导入模型...{pth_path}") 49 | checkpoint = torch.load(pth_path) 50 | model_dict = self.net.state_dict() 51 | # 1. filter out unnecessary keys 52 | pretrained_dict = {k: v for k, v in checkpoint.items() if k in model_dict} 53 | # 2. overwrite entries in the existing state dict 54 | model_dict.update(pretrained_dict) 55 | # 3. load the new state dict 56 | self.net.load_state_dict(model_dict) 57 | print("初始化完毕...") 58 | else: 59 | print("不加载权重") 60 | 61 | def test(self, data_path, save_path): 62 | 63 | if save_path: 64 | print(f"保存路径为{save_path}") 65 | check_mkdir(save_path) 66 | 67 | print(f"开始统计...{data_path}") 68 | img_path = os.path.join(data_path, "Image") 69 | dep_path = os.path.join(data_path, "Depth") 70 | img_list = os.listdir(img_path) 71 | total_time = 0 72 | 73 | tqdm_iter = tqdm(enumerate(img_list), total=len(img_list), leave=False) 74 | for idx, img_name in tqdm_iter: 75 | tqdm_iter.set_description(f"{self.proj_name}:te=>{idx + 1}") 76 | 77 | img_fullpath = os.path.join(img_path, img_name) 78 | test_image = Image.open(img_fullpath).convert("RGB") 79 | dep_fullpath = os.path.join(dep_path, img_name[:-3] + "png") 80 | test_depth = Image.open(dep_fullpath).convert("L") 81 | assert test_image.size == test_depth.size 82 | 83 | img_size = test_image.size 84 | test_image = self.test_image_transform(test_image) 85 | test_depth = self.test_depth_transform(test_depth) 86 | test_image = test_image.unsqueeze(0) 87 | test_depth = test_depth.unsqueeze(0) 88 | 89 | test_image = test_image.to(self.dev) 90 | test_depth = test_depth.to(self.dev) 91 | with torch.no_grad(): 92 | # https://discuss.pytorch.org/t/how-to-reduce-time-spent-by-torch-cuda-synchronize/29484 93 | # https://blog.csdn.net/u013548568/article/details/81368019 94 | torch.cuda.synchronize() 95 | start_time = time.time() 96 | outputs = self.net(test_image, test_depth) # 按照实际情况改写 97 | torch.cuda.synchronize() 98 | total_time += time.time() - start_time 99 | 100 | if save_path: 101 | outputs_np = outputs.squeeze(0).cpu().detach() 102 | out_img = self.to_pil(outputs_np).resize(img_size) 103 | oimg_path = os.path.join(save_path, img_name[:-4] + ".png") 104 | out_img.save(oimg_path) 105 | 106 | fps = len(img_list) / total_time 107 | return fps 108 | 109 | 110 | class CPUFPSer: 111 | def __init__(self, proj_name, args, pth_path): 112 | super(CPUFPSer, self).__init__() 113 | self.args = args 114 | self.to_pil = transforms.ToPILImage() 115 | self.proj_name = proj_name 116 | self.dev = torch.device("cpu") 117 | self.net = self.args[proj_name]["net"](pretrained=False).to(self.dev) 118 | self.net.eval() 119 | 120 | self.test_image_transform = transforms.Compose( 121 | [ 122 | # 输入的如果是一个tuple,则按照数据缩放,但是如果是一个数字,则按比例缩放到短边等于该值 123 | transforms.Resize((self.args["new_size"], self.args["new_size"])), 124 | transforms.ToTensor(), 125 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 126 | ] 127 | ) 128 | self.test_depth_transform = transforms.Compose( 129 | [transforms.Resize((self.args["new_size"], self.args["new_size"])), transforms.ToTensor(),] 130 | ) 131 | 132 | if pth_path != None: 133 | print(f"导入模型...{pth_path}") 134 | checkpoint = torch.load(pth_path) 135 | model_dict = self.net.state_dict() 136 | # 1. filter out unnecessary keys 137 | pretrained_dict = {k: v for k, v in checkpoint.items() if k in model_dict} 138 | # 2. overwrite entries in the existing state dict 139 | model_dict.update(pretrained_dict) 140 | # 3. load the new state dict 141 | self.net.load_state_dict(model_dict) 142 | print("初始化完毕...") 143 | else: 144 | print("不加载权重") 145 | 146 | def test(self, data_path, save_path): 147 | assert save_path != None 148 | 149 | print(f"保存路径为{save_path}") 150 | check_mkdir(save_path) 151 | 152 | print(f"开始统计...{data_path}") 153 | img_path = os.path.join(data_path, "Image") 154 | dep_path = os.path.join(data_path, "Depth") 155 | img_list = os.listdir(img_path) 156 | total_time = 0 157 | 158 | tqdm_iter = tqdm(enumerate(img_list), total=len(img_list), leave=False) 159 | for idx, img_name in tqdm_iter: 160 | tqdm_iter.set_description(f"{self.proj_name}:te=>{idx + 1}") 161 | 162 | img_fullpath = os.path.join(img_path, img_name) 163 | test_image = Image.open(img_fullpath).convert("RGB") 164 | dep_fullpath = os.path.join(dep_path, img_name[:-3] + "png") 165 | test_depth = Image.open(dep_fullpath).convert("L") 166 | assert test_image.size == test_depth.size 167 | 168 | img_size = test_image.size 169 | test_image = self.test_image_transform(test_image) 170 | test_depth = self.test_depth_transform(test_depth) 171 | test_image = test_image.unsqueeze(0) 172 | test_depth = test_depth.unsqueeze(0) 173 | 174 | test_image = test_image.to(self.dev) 175 | test_depth = test_depth.to(self.dev) 176 | with torch.no_grad(): 177 | start_time = time.time() 178 | outputs = self.net(test_image, test_depth) # 按照实际情况改写 179 | total_time += time.time() - start_time 180 | 181 | outputs_np = outputs.squeeze(0).detach() 182 | 183 | out_img = self.to_pil(outputs_np).resize(img_size) 184 | oimg_path = os.path.join(save_path, img_name[:-4] + ".png") 185 | out_img.save(oimg_path) 186 | 187 | fps = len(img_list) / total_time 188 | return fps 189 | 190 | 191 | if __name__ == "__main__": 192 | proj_list = ["HDFNet_VGG16"] 193 | 194 | arg_dicts = { 195 | "HDFNet_VGG16": {"net": HDFNet_VGG16, "pth_path": None, "save_root": ""}, # 必须有 196 | "new_size": 320, 197 | "test_on_gpu": True, 198 | } 199 | 200 | data_dicts = { 201 | "dut-rgbd1": dutrgbdte_path, 202 | "dut-rgbd2": dutrgbdte_path, 203 | "dut-rgbd3": dutrgbdte_path, 204 | "dut-rgbd4": dutrgbdte_path, 205 | } 206 | 207 | for proj_name in proj_list: 208 | pth_path = arg_dicts[proj_name]["pth_path"] 209 | for data_name, data_path in data_dicts.items(): 210 | if arg_dicts[proj_name]["save_root"]: 211 | save_path = os.path.join(arg_dicts[proj_name]["save_root"], data_name) 212 | else: 213 | save_path = None 214 | if arg_dicts["test_on_gpu"]: 215 | print(f" ==>> 在GPU上测试模型 <<== ") 216 | fpser = GPUFPSer(proj_name, arg_dicts, pth_path) 217 | else: 218 | print(f" ==>> 在CPU上测试模型 <<== ") 219 | fpser = CPUFPSer(proj_name, arg_dicts, pth_path) 220 | fps = fpser.test(data_path, save_path) 221 | print(f"\n ==>> Model: {proj_name}: Dataset: {data_name} \n ==>> FPS: {fps}") 222 | del fpser 223 | 224 | print(" ==>> 所有模型速度测试完毕 <<== ") 225 | -------------------------------------------------------------------------------- /utils/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lartpang/HDFNet/455ec8191043ee76e715eb3db6c70949cbeee80f/utils/data/__init__.py -------------------------------------------------------------------------------- /utils/data/create_loader_imgs.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : create_loader_imgs.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | from prefetch_generator import BackgroundGenerator 8 | from torch.utils.data import DataLoader 9 | 10 | from config import arg_config 11 | 12 | 13 | class DataLoaderX(DataLoader): 14 | def __iter__(self): 15 | return BackgroundGenerator(super(DataLoaderX, self).__iter__()) 16 | 17 | 18 | def _make_loader(dataset, shuffle=True, drop_last=False): 19 | return DataLoaderX( 20 | dataset=dataset, 21 | batch_size=arg_config["batch_size"], 22 | num_workers=arg_config["num_workers"], 23 | shuffle=shuffle, 24 | drop_last=drop_last, 25 | pin_memory=True, 26 | ) 27 | 28 | 29 | def create_loader(data_path, mode, get_length=False, data_mode="RGBD", prefix=(".jpg", ".png")): 30 | if data_mode == "RGB": 31 | from utils.data.create_rgb_datasets_imgs import TestImageFolder, TrainImageFolder 32 | elif data_mode == "RGBD": 33 | from utils.data.create_rgbd_datasets_imgs import TestImageFolder, TrainImageFolder 34 | else: 35 | raise NotImplementedError 36 | 37 | if mode == "train": 38 | print(f" ==>> 使用训练集{data_path}训练 <<== ") 39 | train_set = TrainImageFolder(data_path, in_size=arg_config["input_size"], prefix=prefix) 40 | loader = _make_loader(train_set, shuffle=True, drop_last=True) 41 | length_of_dataset = len(train_set) 42 | elif mode == "test": 43 | print(f" ==>> 使用测试集{data_path}测试 <<== ") 44 | test_set = TestImageFolder(data_path, in_size=arg_config["input_size"], prefix=prefix) 45 | loader = _make_loader(test_set, shuffle=False, drop_last=False) 46 | length_of_dataset = len(test_set) 47 | else: 48 | raise NotImplementedError 49 | 50 | if get_length: 51 | return loader, length_of_dataset 52 | else: 53 | return loader 54 | -------------------------------------------------------------------------------- /utils/data/create_rgb_datasets_imgs.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : create_rgb_datasets_imgs.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import os 8 | 9 | from PIL import Image 10 | from torch.utils.data import Dataset 11 | from torchvision import transforms 12 | 13 | from utils.transforms.joint_transforms import Compose, JointResize, RandomHorizontallyFlip, RandomRotate 14 | 15 | 16 | def _get_ext(path_list): 17 | ext_list = list(set([os.path.splitext(p)[1] for p in path_list])) 18 | if len(ext_list) != 1: 19 | if ".png" in ext_list: 20 | ext = ".png" 21 | elif ".jpg" in ext_list: 22 | ext = ".jpg" 23 | elif ".bmp" in ext_list: 24 | ext = ".bmp" 25 | else: 26 | raise NotImplementedError 27 | print(f" ==>> 数据文件夹中包含多种扩展名,这里仅使用{ext} <<== ") 28 | else: 29 | ext = ext_list[0] 30 | return ext 31 | 32 | 33 | def _make_dataset(root): 34 | img_path = os.path.join(root, "Image") 35 | mask_path = os.path.join(root, "Mask") 36 | 37 | img_list = os.listdir(img_path) 38 | mask_list = os.listdir(mask_path) 39 | 40 | img_ext = _get_ext(img_list) 41 | mask_ext = _get_ext(mask_list) 42 | 43 | img_list = [os.path.splitext(f)[0] for f in mask_list if f.endswith(mask_ext)] 44 | return [ 45 | (os.path.join(img_path, img_name + img_ext), os.path.join(mask_path, img_name + mask_ext)) 46 | for img_name in img_list 47 | ] 48 | 49 | 50 | def _read_list_from_file(list_filepath): 51 | img_list = [] 52 | with open(list_filepath, mode="r", encoding="utf-8") as openedfile: 53 | line = openedfile.readline() 54 | while line: 55 | img_list.append(line.split()[0]) 56 | line = openedfile.readline() 57 | return img_list 58 | 59 | 60 | def _make_test_dataset_from_list(list_filepath, prefix=(".jpg", ".png")): 61 | img_list = _read_list_from_file(list_filepath) 62 | return [ 63 | ( 64 | os.path.join(os.path.join(os.path.dirname(img_path), "Image"), os.path.basename(img_path) + prefix[0]), 65 | os.path.join(os.path.join(os.path.dirname(img_path), "Mask"), os.path.basename(img_path) + prefix[1]), 66 | ) 67 | for img_path in img_list 68 | ] 69 | 70 | 71 | class TestImageFolder(Dataset): 72 | def __init__(self, root, in_size, prefix): 73 | if os.path.isdir(root): 74 | print(f" ==>> {root}是图片文件夹, 将会遍历其中的图片进行测试 <<==") 75 | self.imgs = _make_dataset(root) 76 | elif os.path.isfile(root): 77 | print(f" ==>> {root}是图片地址列表, 将会遍历对应的图片进行测试 <<==") 78 | self.imgs = _make_test_dataset_from_list(root, prefix=prefix) 79 | else: 80 | raise NotImplementedError 81 | self.test_img_trainsform = transforms.Compose( 82 | [ 83 | # 输入的如果是一个tuple,则按照数据缩放,但是如果是一个数字,则按比例缩放到短边等于该值 84 | transforms.Resize((in_size, in_size), interpolation=Image.BILINEAR), 85 | transforms.ToTensor(), 86 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 87 | ] 88 | ) 89 | 90 | def __getitem__(self, index): 91 | img_path, mask_path = self.imgs[index] 92 | 93 | img = Image.open(img_path).convert("RGB") 94 | img_name = (img_path.split(os.sep)[-1]).split(".")[0] 95 | 96 | img = self.test_img_trainsform(img) 97 | return img, img_name, mask_path 98 | 99 | def __len__(self): 100 | return len(self.imgs) 101 | 102 | 103 | def _make_train_dataset_from_list(list_filepath, prefix=(".jpg", ".png")): 104 | # list_filepath = '/home/lart/Datasets/RGBDSaliency/FinalSet/rgbd_train_jw.lst' 105 | img_list = _read_list_from_file(list_filepath) 106 | return [ 107 | ( 108 | os.path.join(os.path.join(os.path.dirname(img_path), "Image"), os.path.basename(img_path) + prefix[0]), 109 | os.path.join(os.path.join(os.path.dirname(img_path), "Mask"), os.path.basename(img_path) + prefix[1]), 110 | ) 111 | for img_path in img_list 112 | ] 113 | 114 | 115 | class TrainImageFolder(Dataset): 116 | def __init__(self, root, in_size, prefix): 117 | if os.path.isdir(root): 118 | print(f" ==>> {root}是图片文件夹, 将会遍历其中的图片进行训练 <<==") 119 | self.imgs = _make_dataset(root) 120 | elif os.path.isfile(root): 121 | print(f" ==>> {root}是图片地址列表, 将会遍历对应的图片进行训练 <<==") 122 | self.imgs = _make_train_dataset_from_list(root, prefix=prefix) 123 | else: 124 | raise NotImplementedError 125 | self.train_joint_transform = Compose([JointResize(in_size), RandomHorizontallyFlip(), RandomRotate(10)]) 126 | self.train_img_transform = transforms.Compose( 127 | [ 128 | transforms.ColorJitter(0.1, 0.1, 0.1), 129 | transforms.ToTensor(), 130 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), # 处理的是Tensor 131 | ] 132 | ) 133 | self.train_mask_transform = transforms.ToTensor() 134 | 135 | def __getitem__(self, index): 136 | img_path, mask_path = self.imgs[index] 137 | 138 | img = Image.open(img_path) 139 | mask = Image.open(mask_path) 140 | if len(img.split()) != 3: 141 | img = img.convert("RGB") 142 | if len(mask.split()) == 3: 143 | mask = mask.convert("L") 144 | 145 | img, mask = self.train_joint_transform(img, mask) 146 | mask = self.train_mask_transform(mask) 147 | img = self.train_img_transform(img) 148 | 149 | img_name = (img_path.split(os.sep)[-1]).split(".")[0] 150 | 151 | return img, mask, img_name 152 | 153 | def __len__(self): 154 | return len(self.imgs) 155 | 156 | 157 | if __name__ == "__main__": 158 | img_list = _make_train_dataset_from_list() 159 | print(len(img_list)) 160 | -------------------------------------------------------------------------------- /utils/data/create_rgbd_datasets_imgs.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : create_rgbd_datasets_imgs.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import os 8 | 9 | from PIL import Image 10 | from prefetch_generator import BackgroundGenerator 11 | from torch.utils.data import DataLoader, Dataset 12 | from torchvision import transforms 13 | 14 | from utils.transforms.triple_transforms import Compose, JointResize, RandomHorizontallyFlip, RandomRotate 15 | 16 | 17 | def _read_list_from_file(list_filepath): 18 | img_list = [] 19 | with open(list_filepath, mode="r", encoding="utf-8") as openedfile: 20 | line = openedfile.readline() 21 | while line: 22 | img_list.append(line.split()[0]) 23 | line = openedfile.readline() 24 | return img_list 25 | 26 | 27 | def _make_test_dataset(root, prefix=(".jpg", ".png")): 28 | img_path = os.path.join(root, "Image") 29 | mask_path = os.path.join(root, "Mask") 30 | depth_path = os.path.join(root, "Depth") 31 | img_list = [os.path.splitext(f)[0] for f in os.listdir(mask_path) if f.endswith(prefix[1])] 32 | return [ 33 | ( 34 | os.path.join(img_path, img_name + prefix[0]), 35 | os.path.join(mask_path, img_name + prefix[1]), 36 | os.path.join(depth_path, img_name + prefix[1]), 37 | ) 38 | for img_name in img_list 39 | ] 40 | 41 | 42 | def _make_test_dataset_from_list(list_filepath, prefix=(".jpg", ".png")): 43 | img_list = _read_list_from_file(list_filepath) 44 | return [ 45 | ( 46 | os.path.join(os.path.join(os.path.dirname(img_path), "Image"), os.path.basename(img_path) + prefix[0]), 47 | os.path.join(os.path.join(os.path.dirname(img_path), "Mask"), os.path.basename(img_path) + prefix[1]), 48 | os.path.join(os.path.join(os.path.dirname(img_path), "Depth"), os.path.basename(img_path) + prefix[1]), 49 | ) 50 | for img_path in img_list 51 | ] 52 | 53 | 54 | class TestImageFolder(Dataset): 55 | def __init__(self, root, in_size, prefix): 56 | if os.path.isdir(root): 57 | print(f" ==>> {root}是图片文件夹, 将会遍历其中的图片进行测试 <<==") 58 | self.imgs = _make_test_dataset(root, prefix=prefix) 59 | elif os.path.isfile(root): 60 | print(f" ==>> {root}是图片地址列表, 将会遍历对应的图片进行测试 <<==") 61 | self.imgs = _make_test_dataset_from_list(root, prefix=prefix) 62 | else: 63 | raise NotImplementedError 64 | self.test_img_trainsform = transforms.Compose( 65 | [ 66 | # 输入的如果是一个tuple,则按照数据缩放,但是如果是一个数字,则按比例缩放到短边等于该值 67 | transforms.Resize((in_size, in_size), interpolation=Image.BILINEAR), 68 | transforms.ToTensor(), 69 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 70 | ] 71 | ) 72 | self.test_depth_transform = transforms.Compose( 73 | [transforms.Resize((in_size, in_size), interpolation=Image.BILINEAR), transforms.ToTensor(),] 74 | ) 75 | 76 | def __getitem__(self, index): 77 | img_path, mask_path, depth_path = self.imgs[index] 78 | 79 | img = Image.open(img_path).convert("RGB") 80 | depth = Image.open(depth_path).convert("L") 81 | if img.size != depth.size: 82 | depth = depth.resize(img.size, resample=Image.BILINEAR) 83 | img_name = (img_path.split(os.sep)[-1]).split(".")[0] 84 | 85 | img = self.test_img_trainsform(img) 86 | depth = self.test_depth_transform(depth) 87 | return img, img_name, mask_path, depth 88 | 89 | def __len__(self): 90 | return len(self.imgs) 91 | 92 | 93 | def _make_train_dataset(root, prefix=(".jpg", ".png")): 94 | img_path = os.path.join(root, "Image") 95 | mask_path = os.path.join(root, "Mask") 96 | depth_path = os.path.join(root, "Depth") 97 | img_list = [os.path.splitext(f)[0] for f in os.listdir(mask_path) if f.endswith(prefix[1])] 98 | return [ 99 | ( 100 | os.path.join(img_path, img_name + prefix[0]), 101 | os.path.join(mask_path, img_name + prefix[1]), 102 | os.path.join(depth_path, img_name + prefix[1]), 103 | ) 104 | for img_name in img_list 105 | ] 106 | 107 | 108 | def _make_train_dataset_from_list(list_filepath, prefix=(".jpg", ".png")): 109 | # list_filepath = '/home/lart/Datasets/RGBDSaliency/FinalSet/rgbd_train_jw.lst' 110 | img_list = _read_list_from_file(list_filepath) 111 | return [ 112 | ( 113 | os.path.join(os.path.join(os.path.dirname(img_path), "Image"), os.path.basename(img_path) + prefix[0]), 114 | os.path.join(os.path.join(os.path.dirname(img_path), "Mask"), os.path.basename(img_path) + prefix[1]), 115 | os.path.join(os.path.join(os.path.dirname(img_path), "Depth"), os.path.basename(img_path) + prefix[1]), 116 | ) 117 | for img_path in img_list 118 | ] 119 | 120 | 121 | class TrainImageFolder(Dataset): 122 | def __init__(self, root, in_size, prefix): 123 | if os.path.isdir(root): 124 | print(f" ==>> {root}是图片文件夹, 将会遍历其中的图片进行训练 <<==") 125 | self.imgs = _make_train_dataset(root, prefix=prefix) 126 | elif os.path.isfile(root): 127 | print(f" ==>> {root}是图片地址列表, 将会遍历对应的图片进行训练 <<==") 128 | self.imgs = _make_train_dataset_from_list(root, prefix=prefix) 129 | else: 130 | raise NotImplementedError 131 | self.train_triple_transform = Compose([JointResize(in_size), RandomHorizontallyFlip(), RandomRotate(10)]) 132 | self.train_img_transform = transforms.Compose( 133 | [ 134 | transforms.ColorJitter(0.1, 0.1, 0.1), 135 | transforms.ToTensor(), 136 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), # 处理的是Tensor 137 | ] 138 | ) 139 | self.train_mask_transform = transforms.ToTensor() 140 | self.train_depth_transform = transforms.ToTensor() 141 | 142 | def __getitem__(self, index): 143 | img_path, mask_path, depth_path = self.imgs[index] 144 | 145 | img = Image.open(img_path) 146 | mask = Image.open(mask_path) 147 | depth = Image.open(depth_path) 148 | if len(img.split()) != 3: 149 | img = img.convert("RGB") 150 | if len(mask.split()) == 3: 151 | mask = mask.convert("L") 152 | if len(depth.split()) == 3: 153 | depth = depth.convert("L") 154 | 155 | if img.size != depth.size: 156 | depth = depth.resize(img.size, resample=Image.BILINEAR) 157 | 158 | img, mask, depth = self.train_triple_transform(img, mask, depth) 159 | mask = self.train_mask_transform(mask) 160 | depth = self.train_depth_transform(depth) 161 | img = self.train_img_transform(img) 162 | 163 | img_name = (img_path.split(os.sep)[-1]).split(".")[0] 164 | 165 | return img, mask, img_name, depth 166 | 167 | def __len__(self): 168 | return len(self.imgs) 169 | 170 | 171 | class DataLoaderX(DataLoader): 172 | def __iter__(self): 173 | return BackgroundGenerator(super(DataLoaderX, self).__iter__()) 174 | 175 | 176 | if __name__ == "__main__": 177 | img_list = _make_train_dataset_from_list() 178 | print(len(img_list)) 179 | -------------------------------------------------------------------------------- /utils/data/data_list/njud_test_jw.lst: -------------------------------------------------------------------------------- 1 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001139_left 2 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000114_left 3 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001542_left 4 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001737_left 5 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000474_left 6 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001612_left 7 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000273_left 8 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000358_left 9 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000119_left 10 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001966_left 11 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001651_left 12 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000551_left 13 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000223_left 14 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000043_left 15 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000761_left 16 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000219_left 17 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000876_left 18 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000353_left 19 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001406_left 20 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001887_left 21 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000261_left 22 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000715_left 23 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001336_left 24 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001837_left 25 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001535_left 26 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000694_left 27 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000658_left 28 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001972_left 29 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001293_left 30 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001187_left 31 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001711_left 32 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001890_left 33 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001994_left 34 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001252_left 35 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000680_left 36 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001394_left 37 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000547_left 38 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000636_left 39 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001784_left 40 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001932_left 41 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001902_left 42 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000315_left 43 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001292_left 44 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001367_left 45 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000792_left 46 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000831_left 47 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001814_left 48 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000384_left 49 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001901_left 50 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000905_left 51 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000258_left 52 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000344_left 53 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000642_left 54 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000975_left 55 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001160_left 56 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000571_left 57 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000611_left 58 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000157_left 59 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001736_left 60 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000558_left 61 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001125_left 62 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000196_left 63 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000900_left 64 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000697_left 65 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000594_left 66 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001430_left 67 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001603_left 68 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001077_left 69 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001746_left 70 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001935_left 71 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000771_left 72 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000822_left 73 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000545_left 74 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001141_left 75 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000110_left 76 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000929_left 77 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000251_left 78 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000237_left 79 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001727_left 80 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001753_left 81 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000886_left 82 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001450_left 83 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001562_left 84 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000667_left 85 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001356_left 86 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001650_left 87 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001415_left 88 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000566_left 89 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001068_left 90 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001557_left 91 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001598_left 92 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001721_left 93 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000391_left 94 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001653_left 95 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001912_left 96 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001017_left 97 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000779_left 98 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001548_left 99 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001733_left 100 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001969_left 101 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001145_left 102 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000198_left 103 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001748_left 104 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001647_left 105 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000117_left 106 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000023_left 107 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000316_left 108 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000868_left 109 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001879_left 110 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001105_left 111 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001269_left 112 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000820_left 113 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000991_left 114 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001167_left 115 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000754_left 116 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001564_left 117 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/002002_left 118 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000349_left 119 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000631_left 120 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000652_left 121 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001111_left 122 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000586_left 123 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001037_left 124 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000296_left 125 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001682_left 126 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000451_left 127 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000845_left 128 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000090_left 129 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001480_left 130 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001027_left 131 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001900_left 132 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001258_left 133 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000187_left 134 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000027_left 135 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000651_left 136 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001847_left 137 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000410_left 138 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000582_left 139 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001446_left 140 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001775_left 141 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000846_left 142 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001817_left 143 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000356_left 144 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000455_left 145 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001333_left 146 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000819_left 147 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001572_left 148 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001196_left 149 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001583_left 150 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000028_left 151 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001632_left 152 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001816_left 153 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001910_left 154 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000379_left 155 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001952_left 156 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000918_left 157 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000670_left 158 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000132_left 159 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000134_left 160 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001296_left 161 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000802_left 162 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000403_left 163 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000213_left 164 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000848_left 165 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001826_left 166 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001363_left 167 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001906_left 168 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000490_left 169 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001822_left 170 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001352_left 171 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000397_left 172 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001329_left 173 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000320_left 174 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001114_left 175 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001685_left 176 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000749_left 177 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001459_left 178 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001267_left 179 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001977_left 180 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001401_left 181 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001437_left 182 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000958_left 183 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001973_left 184 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000721_left 185 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000645_left 186 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000968_left 187 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001654_left 188 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001519_left 189 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001928_left 190 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000034_left 191 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000624_left 192 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001190_left 193 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000445_left 194 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001940_left 195 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000271_left 196 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001528_left 197 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001080_left 198 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000940_left 199 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001436_left 200 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001591_left 201 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001679_left 202 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000605_left 203 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001839_left 204 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000257_left 205 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001454_left 206 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000824_left 207 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001186_left 208 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001955_left 209 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000469_left 210 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001053_left 211 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000836_left 212 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001151_left 213 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001253_left 214 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000695_left 215 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000122_left 216 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000340_left 217 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000176_left 218 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001588_left 219 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001384_left 220 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001163_left 221 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001301_left 222 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000635_left 223 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001194_left 224 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000781_left 225 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000335_left 226 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000734_left 227 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001485_left 228 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001474_left 229 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000637_left 230 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001170_left 231 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000778_left 232 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001622_left 233 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001400_left 234 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001580_left 235 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001613_left 236 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000430_left 237 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000621_left 238 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001508_left 239 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001452_left 240 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000458_left 241 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001426_left 242 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000492_left 243 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001657_left 244 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001865_left 245 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000912_left 246 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000989_left 247 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001349_left 248 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001071_left 249 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000097_left 250 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001968_left 251 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001433_left 252 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001712_left 253 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001724_left 254 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001766_left 255 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001311_left 256 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000026_left 257 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001526_left 258 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000939_left 259 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001789_left 260 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000156_left 261 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000616_left 262 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001476_left 263 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000911_left 264 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001266_left 265 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001007_left 266 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001503_left 267 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000368_left 268 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000089_left 269 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000364_left 270 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001168_left 271 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001813_left 272 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001399_left 273 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001882_left 274 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001019_left 275 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000620_left 276 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000502_left 277 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001028_left 278 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000377_left 279 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001280_left 280 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000896_left 281 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000239_left 282 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000501_left 283 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000025_left 284 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001908_left 285 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001841_left 286 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001852_left 287 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001897_left 288 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000017_left 289 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001255_left 290 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001569_left 291 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001001_left 292 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001629_left 293 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000949_left 294 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001509_left 295 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000318_left 296 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000959_left 297 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000478_left 298 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001744_left 299 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001717_left 300 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000863_left 301 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001377_left 302 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000486_left 303 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001227_left 304 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001735_left 305 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001388_left 306 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001138_left 307 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001425_left 308 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000452_left 309 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000139_left 310 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001991_left 311 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001628_left 312 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001676_left 313 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000240_left 314 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001985_left 315 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001787_left 316 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000334_left 317 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001840_left 318 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000933_left 319 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000722_left 320 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001078_left 321 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001009_left 322 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000860_left 323 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000428_left 324 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001949_left 325 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000277_left 326 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001342_left 327 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001681_left 328 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000735_left 329 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001046_left 330 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000634_left 331 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001921_left 332 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000703_left 333 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001368_left 334 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000516_left 335 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000795_left 336 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001547_left 337 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001279_left 338 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000448_left 339 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000692_left 340 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000535_left 341 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000643_left 342 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001917_left 343 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001085_left 344 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001691_left 345 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000227_left 346 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000155_left 347 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001857_left 348 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001959_left 349 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000084_left 350 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001997_left 351 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000406_left 352 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001871_left 353 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001992_left 354 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001178_left 355 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001035_left 356 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001731_left 357 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001198_left 358 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000513_left 359 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000592_left 360 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000216_left 361 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/002000_left 362 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000476_left 363 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001214_left 364 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001123_left 365 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001462_left 366 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000087_left 367 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000699_left 368 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000542_left 369 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001264_left 370 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000040_left 371 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001300_left 372 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000887_left 373 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000145_left 374 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001522_left 375 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000957_left 376 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000921_left 377 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000360_left 378 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000333_left 379 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000803_left 380 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001337_left 381 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001345_left 382 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001335_left 383 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000534_left 384 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000655_left 385 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000700_left 386 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000215_left 387 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000526_left 388 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000941_left 389 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001005_left 390 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001081_left 391 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000591_left 392 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000008_left 393 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001594_left 394 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000606_left 395 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001891_left 396 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001530_left 397 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001222_left 398 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001250_left 399 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000463_left 400 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/002003_left 401 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001604_left 402 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001314_left 403 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001713_left 404 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001842_left 405 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000698_left 406 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001322_left 407 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001121_left 408 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000708_left 409 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001797_left 410 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001225_left 411 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001918_left 412 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001192_left 413 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000339_left 414 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001203_left 415 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001285_left 416 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001795_left 417 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001947_left 418 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001970_left 419 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001582_left 420 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000268_left 421 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000654_left 422 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000733_left 423 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000013_left 424 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001103_left 425 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001064_left 426 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000124_left 427 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000162_left 428 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001710_left 429 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001447_left 430 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001677_left 431 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000450_left 432 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000813_left 433 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001984_left 434 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001330_left 435 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000183_left 436 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000581_left 437 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000425_left 438 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000857_left 439 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000544_left 440 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001465_left 441 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001957_left 442 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001776_left 443 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001040_left 444 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001050_left 445 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001608_left 446 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000111_left 447 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000943_left 448 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000404_left 449 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001709_left 450 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001164_left 451 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000764_left 452 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001440_left 453 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001144_left 454 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001386_left 455 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001655_left 456 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/002001_left 457 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000728_left 458 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001359_left 459 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001455_left 460 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001501_left 461 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001294_left 462 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000321_left 463 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001962_left 464 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000498_left 465 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000472_left 466 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000269_left 467 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000987_left 468 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000199_left 469 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000688_left 470 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000382_left 471 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001472_left 472 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001668_left 473 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001843_left 474 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001229_left 475 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001858_left 476 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000396_left 477 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001786_left 478 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001550_left 479 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001211_left 480 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000167_left 481 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001835_left 482 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000709_left 483 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001807_left 484 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000773_left 485 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000790_left 486 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000388_left 487 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000454_left 488 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000766_left 489 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001974_left 490 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000919_left 491 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000853_left 492 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001927_left 493 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000208_left 494 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000100_left 495 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001244_left 496 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000274_left 497 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001376_left 498 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000188_left 499 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/000993_left 500 | /home/lart/Datasets/Saliency/RGBDSOD/NJUD_FULL/001091_left 501 | -------------------------------------------------------------------------------- /utils/data/data_list/nlpr_test_jw.lst: -------------------------------------------------------------------------------- 1 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-02-58 2 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-26-52 3 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-31-03 4 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-49-32 5 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-21-41 6 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-29-53 7 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-12-03 8 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-19-52 9 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-58-12 10 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-00-59 11 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-16-04 12 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-39-33 13 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-09-24 14 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-27 15 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-08-22 16 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-45-08 17 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-32-53 18 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-00 19 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-17-34 20 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-43-10 21 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-54-27 22 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-23-28 23 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-56-52 24 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-09-03 25 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-10-54 26 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-21-12 27 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-11-58 28 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-52-35 29 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-27-16 30 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-33-43 31 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-58-50 32 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-57-38 33 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-08-15 34 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-01-41 35 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-31-22 36 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-45-46 37 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-14-01 38 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-21-34 39 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-09-57 40 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-02-53 41 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-43-45 42 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-50-01 43 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-30-36 44 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-11-07 45 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-53-04 46 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-50-21 47 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-29-49 48 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-45-03 49 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-23-51 50 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-44-50 51 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-00-16 52 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-04 53 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-26-51 54 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-43-11 55 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-17-25 56 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-43-28 57 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-45-19 58 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-47-17 59 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-30-24 60 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-11-08 61 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-43-43 62 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-36-58 63 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-42-14 64 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_04-58-43 65 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-54-32 66 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-08-48 67 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-27-39 68 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_04-03-52 69 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-44-34 70 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-07-29 71 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-11-40 72 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-27-10 73 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-47-12 74 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-03-52 75 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-53-50 76 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-05-11 77 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-43-07 78 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_04-12-56 79 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-21-58 80 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-54-51 81 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-49-39 82 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-32-31 83 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-08-01 84 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-10-15 85 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_05-03-01 86 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-40-35 87 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-28-34 88 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-43-24 89 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-31-18 90 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-29-35 91 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-33-09 92 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-52-34 93 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-21-10 94 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-47-01 95 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-00-46 96 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-24-30 97 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-04-44 98 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-46-16 99 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-25-23 100 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-17-27 101 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_04-26-35 102 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-43-43 103 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-44-29 104 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-11-17 105 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-54-34 106 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-56-58 107 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-04-46 108 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-24-15 109 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-31-31 110 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-50-18 111 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-59-25 112 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-43-11 113 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-08-35 114 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-44-47 115 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-25-07 116 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-57-26 117 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-02-01 118 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-51-04 119 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-35-57 120 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-48-16 121 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-46-33 122 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-54-18 123 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-40-19 124 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-38-32 125 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-47-23 126 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-11-51 127 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-53-07 128 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-41-17 129 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-27-11 130 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-13-37 131 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-09-52 132 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_04-58-12 133 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-43-18 134 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-12-31 135 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-39-48 136 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-03-36 137 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-44-59 138 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-48-48 139 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-28-09 140 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-52-11 141 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-04-03 142 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-31-34 143 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-53-12 144 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-54-58 145 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-36 146 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-55-40 147 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-05-19 148 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-45-57 149 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-53-37 150 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-49-15 151 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-49-43 152 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-44-01 153 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-45-27 154 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-31-34 155 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_04-53-56 156 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-06-39 157 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-39-06 158 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-19-22 159 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-47-34 160 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-14-28 161 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-38-55 162 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-59-19 163 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-34-43 164 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-03-44 165 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-50-00 166 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_08-14-28 167 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-49-53 168 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-58-46 169 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-49-44 170 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-36-43 171 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-42-48 172 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-49-50 173 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-38-16 174 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-16-21 175 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-37-38 176 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-05-53 177 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-03-04 178 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-13-06 179 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-59-20 180 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-10-34 181 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-09-49 182 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-04-17 183 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-39-00 184 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-09-52 185 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_04-14-11 186 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-47-42 187 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-37-52 188 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-45-14 189 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-32-14 190 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-40-51 191 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-20-23 192 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-20-22 193 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-51-06 194 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-50-35 195 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-30-36 196 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_05-33-06 197 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-06-05 198 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-44-41 199 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-45-58 200 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-01-05 201 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-31-31 202 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-08-08 203 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-05-48 204 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-44-36 205 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-51-22 206 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-21-21 207 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_08-14-10 208 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-35-19 209 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-29-40 210 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-43-01 211 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-24-52 212 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-41-47 213 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-47-51 214 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-20-50 215 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-06-51 216 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-46-45 217 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-57-56 218 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-49-10 219 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-22-41 220 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-44-14 221 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-48-03 222 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-24-55 223 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-57-45 224 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-28-28 225 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-53-51 226 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-57-31 227 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-40-12 228 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_06-57-36 229 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-07-46 230 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-06-18 231 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-42-57 232 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-30-59 233 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-07-51 234 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-47-32 235 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-34-04 236 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-02-04 237 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-58-59 238 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-27-50 239 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-05-48 240 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-00-18 241 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-09-12 242 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-40-14 243 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-32-45 244 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-58-26 245 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_10-10-45 246 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-37-32 247 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-50-56 248 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-23-41 249 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-57-01 250 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-23-20 251 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-46-02 252 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-24-29 253 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-11-44 254 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_10-06-12 255 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-24-01 256 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-07-43 257 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-18-15 258 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-25-10 259 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-24-01 260 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-53-39 261 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-33-10 262 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-56-44 263 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-07-35 264 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-06-36 265 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-32-30 266 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-47-58 267 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-47-33 268 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-24-53 269 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-10-53 270 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-53-24 271 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-54-11 272 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-51-21 273 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-29-12 274 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-16-10 275 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-16-46 276 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-16-00 277 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-49-49 278 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-19-03 279 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-44-03 280 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-36-33 281 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-22-25 282 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-31-20 283 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-37-52 284 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-31-59 285 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-42-10 286 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-47-38 287 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-17-42 288 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-19-31 289 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-36-08 290 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-21-27 291 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-25-21 292 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-09-03 293 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-18-10 294 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-48-01 295 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-42-39 296 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-46-26 297 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-47-05 298 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-44-33 299 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-25-17 300 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-29-58 301 | -------------------------------------------------------------------------------- /utils/data/data_list/nlpr_train_jw.lst: -------------------------------------------------------------------------------- 1 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-44-46 2 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-54-21 3 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-27-23 4 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-30-33 5 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-21-43 6 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-38-16 7 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-23-30 8 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-01-03 9 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-27-09 10 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-54-13 11 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-34-08 12 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-27-52 13 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-35-51 14 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-35-39 15 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-25-40 16 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-31-11 17 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-25-18 18 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-53-32 19 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-47-15 20 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-01-27 21 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-52-45 22 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-24-55 23 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-44-11 24 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-54-36 25 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-08-35 26 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-25-16 27 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-43-46 28 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-08-15 29 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-44-37 30 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_05-01-08 31 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-59-15 32 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-03-22 33 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-47-07 34 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-19-32 35 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-13-02 36 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-12-50 37 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-24-14 38 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-25-32 39 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-18-11 40 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-47-57 41 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-31-00 42 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-56-18 43 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-55-57 44 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-52-35 45 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-53-20 46 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_04-44-50 47 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-06-47 48 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-52-22 49 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_04-04-14 50 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-26-56 51 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-44-38 52 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-26-11 53 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-57-19 54 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-07-13 55 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-51-37 56 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-39-10 57 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-19-53 58 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-09-49 59 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-05-13 60 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-56-10 61 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-55-36 62 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-04-59 63 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-37-05 64 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-00-02 65 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-58-37 66 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-23-35 67 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-25-37 68 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-06-41 69 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-08-35 70 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-31-40 71 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-37-02 72 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-43-02 73 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-56-44 74 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-47-14 75 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-10-33 76 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-06-26 77 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-56-17 78 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-29-25 79 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-25-59 80 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-35-14 81 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-05-13 82 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-55-39 83 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-52 84 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-28-07 85 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_06-52-20 86 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-05-46 87 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-39-23 88 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-11-32 89 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-54-57 90 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-17-12 91 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-45-32 92 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-46-56 93 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-22-22 94 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-00 95 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-55-12 96 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-18-55 97 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-57-44 98 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-37-31 99 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-59-03 100 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-09-13 101 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-54-53 102 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-44-00 103 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-41-16 104 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-11-20 105 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-08-55 106 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-25-45 107 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-34-29 108 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-48-13 109 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-40-24 110 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-49-08 111 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-48-49 112 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-17-00 113 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-28-02 114 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-46-05 115 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-02-44 116 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-51-48 117 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-46-50 118 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-46-57 119 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-06-00 120 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-42-25 121 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-44-14 122 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-57-56 123 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-58-12 124 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-27-44 125 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-10-05 126 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-39-00 127 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-00-44 128 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-08-22 129 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-10-31 130 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-48-09 131 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-36-33 132 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-47-15 133 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-55-16 134 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-55-09 135 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-07-07 136 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-45-44 137 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-59-08 138 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_06-53-46 139 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-27-00 140 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-29-44 141 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_08-00-38 142 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-48-13 143 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-10-01 144 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-17-07 145 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-12-13 146 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-46 147 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-36-52 148 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-51-52 149 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-55-32 150 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-25-02 151 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-33-21 152 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-31-39 153 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-51-12 154 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-57-40 155 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-56-19 156 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-46-40 157 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-35-38 158 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-53-46 159 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-30-45 160 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-54-16 161 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-35-18 162 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-47-24 163 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-36-40 164 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-44-26 165 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-48-04 166 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-52-22 167 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-21-58 168 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-42-16 169 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-43-13 170 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-34-35 171 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-42-02 172 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-22-33 173 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-34-03 174 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-04-52 175 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-39-21 176 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-20-09 177 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-45-41 178 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-46-32 179 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-50-20 180 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-02-33 181 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-26-28 182 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-40-44 183 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-37-21 184 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-43-36 185 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-10-24 186 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-41-11 187 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-38-49 188 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-54-57 189 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-56-12 190 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-25-34 191 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-28-22 192 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-43-42 193 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-20-39 194 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-41-25 195 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-42-03 196 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-58-58 197 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-46-30 198 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_06-37-14 199 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-51-15 200 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-31-53 201 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-09-41 202 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-08-19 203 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-27-41 204 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-38-21 205 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-48-18 206 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-44-57 207 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-09-27 208 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-24-08 209 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-11-55 210 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-03-39 211 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-20-05 212 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-33-28 213 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-24-25 214 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-13 215 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-43-54 216 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-19-16 217 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-24-15 218 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-23-54 219 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-01-13 220 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-08-24 221 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-34-51 222 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-48-56 223 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-18-50 224 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-18-19 225 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-47-16 226 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_04-13-13 227 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-43-30 228 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-49-39 229 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-07-13 230 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-28-21 231 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-29-23 232 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-41-44 233 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-12-11 234 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-05-54 235 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-22-34 236 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-13-11 237 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-12-01 238 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-59-53 239 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-29-13 240 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-51-21 241 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_04-27-23 242 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-28-10 243 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_06-41-44 244 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-36-01 245 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-53-04 246 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-29-04 247 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-19-33 248 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-41-10 249 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-08-50 250 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-11-07 251 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-36-06 252 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-51-59 253 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-46-36 254 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-49-15 255 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-33-16 256 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-50-46 257 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-45-38 258 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-32-15 259 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-26-35 260 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-29-00 261 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-18-38 262 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_04-52-25 263 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-06-31 264 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-24-55 265 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-38-00 266 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-30-21 267 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-46-17 268 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-36-37 269 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-44-36 270 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-39-17 271 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-11-58 272 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-35-36 273 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-10-46 274 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-21-24 275 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-53-42 276 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-44-24 277 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-10-15 278 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-44-12 279 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-20-35 280 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-16-43 281 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-53-21 282 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-43-31 283 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-10-21 284 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-36-44 285 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-56-46 286 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-28-21 287 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-05-05 288 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-07-36 289 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-45-31 290 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-04-05 291 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-10-41 292 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-55-42 293 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-04-55 294 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-11-09 295 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-30-15 296 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-52-11 297 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-42-00 298 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-43-04 299 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-17-08 300 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-47-17 301 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-36-19 302 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-48-29 303 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-09-22 304 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-54-12 305 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-33-01 306 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-50-19 307 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-10-12 308 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-07-49 309 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-42-56 310 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-21-50 311 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-07-24 312 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-25-56 313 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-40-22 314 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-16-36 315 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-26-15 316 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-51-18 317 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-40-46 318 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-30-03 319 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-11-34 320 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_10-02-10 321 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-43-52 322 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-58-46 323 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-33-31 324 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-47-54 325 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-56-13 326 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-42-09 327 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-22-32 328 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-40-06 329 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-25-01 330 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-13-54 331 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-28-48 332 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-33-47 333 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-10-08 334 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_10-03-39 335 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-27-10 336 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-44-09 337 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-41-10 338 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-40-12 339 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-55-51 340 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-33 341 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-38-58 342 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-02-33 343 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-31-57 344 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-16-10 345 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-17-54 346 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-14-52 347 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-06-20 348 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-29-03 349 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-29-54 350 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-40-29 351 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-21-57 352 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_05-05-03 353 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-45-49 354 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_05-00-17 355 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-55-56 356 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-48-44 357 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-11-59 358 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-53-30 359 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-47-19 360 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-27-36 361 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-57-48 362 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-02-50 363 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-36-45 364 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-49-51 365 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-24-16 366 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-52-02 367 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-41 368 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-43-13 369 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-16-42 370 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-28-38 371 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-19-36 372 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-29-25 373 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-43-45 374 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-54-54 375 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-16-17 376 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-41-01 377 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-15-26 378 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-19-33 379 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-44-23 380 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-22-43 381 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-07-44 382 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-25-12 383 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-54-01 384 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-25-10 385 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-50-08 386 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-30-20 387 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-42-16 388 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-48-58 389 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-24-04 390 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-40-05 391 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-49-37 392 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-56-46 393 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-20 394 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-36-06 395 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_10-06-17 396 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-26-30 397 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-40-10 398 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-22-50 399 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-16-57 400 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-23-35 401 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-10-50 402 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-14-29 403 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-40-52 404 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-59-01 405 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-21-06 406 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-39-53 407 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-13-30 408 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-30-12 409 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-43-22 410 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-29-38 411 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_05-00-48 412 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-15-40 413 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-52-55 414 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-10-29 415 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-34-07 416 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-38-05 417 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-26-49 418 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-34-27 419 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-47-57 420 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-03-56 421 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-46-41 422 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-00-10 423 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-54-05 424 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-51-42 425 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-34-56 426 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-47-50 427 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-22-52 428 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-46-41 429 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-45-49 430 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-26-09 431 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-06-39 432 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-50-51 433 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-38-11 434 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-24-29 435 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-06-15 436 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-54-24 437 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-26-22 438 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_04-54-15 439 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-27-04 440 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-58-32 441 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-53 442 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-33-06 443 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-21-54 444 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-07-08 445 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-03-30 446 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-14-09 447 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-04-16 448 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-19-02 449 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-07-01 450 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-13-30 451 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-22 452 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-33-40 453 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-08-02 454 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-22-45 455 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-04-07 456 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-18-55 457 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-58-38 458 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-11-03 459 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-35-48 460 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-27-19 461 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-44-05 462 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-44-25 463 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-23-44 464 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-50-05 465 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-09-45 466 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-59-50 467 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-38-09 468 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-54-48 469 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-30-51 470 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-14-07 471 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-44-51 472 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-44-38 473 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-27-43 474 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-38-26 475 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-44-15 476 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-20-48 477 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-18-45 478 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-52-08 479 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-39-48 480 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-39-25 481 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-05-32 482 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-32-01 483 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-57-15 484 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-02-41 485 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-03-41 486 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-44-08 487 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-47-33 488 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-04-42 489 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-49-25 490 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-20-25 491 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-46-18 492 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-32-57 493 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-48-54 494 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-35-52 495 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-46-20 496 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-11-22 497 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-19-44 498 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-52-53 499 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-50-40 500 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-43-31 501 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-20-46 502 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-47-50 503 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-18-42 504 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-13-17 505 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-06-21 506 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-48-00 507 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-02-37 508 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_05-31-29 509 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-44-54 510 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-54-05 511 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-14-11 512 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-45-19 513 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_06-08-17 514 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-14-25 515 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-55-06 516 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-28-31 517 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-41-38 518 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-25-18 519 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-48-08 520 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-38-14 521 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-49-49 522 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-55-40 523 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-43-44 524 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-16-57 525 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-06-09 526 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_05-57-24 527 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-42-10 528 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-25-13 529 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-56-23 530 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-03-06 531 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-56-04 532 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-05-13 533 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-37-58 534 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-57-27 535 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-26-07 536 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-56-36 537 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-44-54 538 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-19 539 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-20-01 540 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-43-39 541 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-42-28 542 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-01-38 543 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-36-52 544 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-52-15 545 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_06-54-45 546 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-17-02 547 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-54-50 548 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-49-30 549 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-33-15 550 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-42-40 551 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-45-40 552 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-46-12 553 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-20-45 554 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-33-33 555 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-05-37 556 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-30-06 557 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-20-35 558 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-58-43 559 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-39-51 560 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-52-04 561 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-24-34 562 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-48-39 563 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-07-55 564 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-24-05 565 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-38-19 566 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-14-30 567 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-52-17 568 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-12-11 569 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-00-02 570 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-20-07 571 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-00-31 572 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-14-31 573 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-07-48 574 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-45-51 575 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-05-25 576 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-11-39 577 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-53-07 578 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_06-59-23 579 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-39-37 580 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-07-01 581 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-13-29 582 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-54-47 583 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-33-34 584 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-01-02 585 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-55-02 586 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-22-58 587 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-55-26 588 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-19-16 589 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-18-22 590 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_10-43-31 591 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-19-04 592 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-13-48 593 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-46-28 594 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-23-20 595 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-53-01 596 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-38-12 597 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-44-05 598 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-58-28 599 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-49-32 600 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-51-23 601 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-17-45 602 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-22-54 603 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-43-57 604 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-08-31 605 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-33-24 606 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-00-09 607 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-18-37 608 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-09-57 609 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-42-19 610 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-11-11 611 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-46-57 612 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-10-26 613 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_10-10-23 614 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-10-33 615 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-31-56 616 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_05-58-41 617 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-58-42 618 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_04-25-12 619 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-53-39 620 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-23-55 621 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-23-12 622 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-50-12 623 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-08-19 624 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-21-41 625 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-06-22 626 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-34-01 627 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-34-54 628 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-19-58 629 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-54-37 630 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-04-24 631 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-38-28 632 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-47-47 633 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-02-15 634 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_06-51-43 635 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-39-54 636 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-56-24 637 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_03-32-54 638 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_06-49-36 639 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_08-16-45 640 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-02-40 641 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-11-48 642 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-04-00 643 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_01-26-14 644 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/6_07-47-09 645 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-32-19 646 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-15-25 647 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_07-17-21 648 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_10-46-24 649 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-46-59 650 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-49-43 651 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_11-07-36 652 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-17-50 653 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_03-26-09 654 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-38-20 655 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-01-57 656 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-47-45 657 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-34-01 658 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-16-50 659 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-36-22 660 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/1_02-56-10 661 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-44-15 662 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/7_07-50-23 663 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-52-32 664 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-00-07 665 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-45-22 666 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_02-59-06 667 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-12-36 668 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_04-24-41 669 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_10-27-45 670 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-38-38 671 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_06-39-53 672 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-53-18 673 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_07-36-59 674 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-41-46 675 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-20-08 676 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-32-46 677 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-10-10 678 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/4_05-31-35 679 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/3_09-52-49 680 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_03-46-43 681 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-07-42 682 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_09-40-59 683 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-15-39 684 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_02-29-37 685 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-01-44 686 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/12_03-54-37 687 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-40-03 688 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-55-07 689 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/2_07-27-32 690 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_12-48-34 691 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/8_08-41-58 692 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-45-15 693 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-07-36 694 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_04-06-33 695 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/5_07-00-11 696 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/9_12-50-08 697 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-46-15 698 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-11-23 699 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/10_01-20-40 700 | /home/lart/Datasets/Saliency/RGBDSOD/NLPR_FULL/11_03-53-06 701 | -------------------------------------------------------------------------------- /utils/metric.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time : 2020/7/7 3 | # @Author : Lart Pang 4 | # @Email : lartpang@163.com 5 | # @File : metric.py 6 | # @Project : HDFNet 7 | # @GitHub : https://github.com/lartpang 8 | 9 | import numpy as np 10 | from PIL import Image 11 | from scipy.ndimage import center_of_mass, convolve, distance_transform_edt as bwdist 12 | 13 | 14 | class CalFM(object): 15 | # Fmeasure(maxFm, meanFm)---Frequency-tuned salient region detection(CVPR 2009) 16 | def __init__(self, num, thds=255): 17 | self.precision = np.zeros((num, thds)) 18 | self.recall = np.zeros((num, thds)) 19 | self.meanF = np.zeros(num) 20 | self.idx = 0 21 | self.num = num 22 | 23 | def update(self, pred, gt): 24 | if gt.max() != 0: 25 | prediction, recall, mfmeasure = self.cal(pred, gt) 26 | self.precision[self.idx, :] = prediction 27 | self.recall[self.idx, :] = recall 28 | self.meanF[self.idx] = mfmeasure 29 | self.idx += 1 30 | 31 | def cal(self, pred, gt): 32 | ########################meanF############################## 33 | th = 2 * pred.mean() 34 | if th > 1: 35 | th = 1 36 | binary = np.zeros_like(pred) 37 | binary[pred >= th] = 1 38 | hard_gt = np.zeros_like(gt) 39 | hard_gt[gt > 0.5] = 1 40 | tp = (binary * hard_gt).sum() 41 | if tp == 0: 42 | mfmeasure = 0 43 | else: 44 | pre = tp / binary.sum() 45 | rec = tp / hard_gt.sum() 46 | mfmeasure = 1.3 * pre * rec / (0.3 * pre + rec) 47 | 48 | ########################maxF############################## 49 | pred = np.uint8(pred * 255) 50 | target = pred[gt > 0.5] 51 | nontarget = pred[gt <= 0.5] 52 | targetHist, _ = np.histogram(target, bins=range(256)) 53 | nontargetHist, _ = np.histogram(nontarget, bins=range(256)) 54 | targetHist = np.cumsum(np.flip(targetHist), axis=0) 55 | nontargetHist = np.cumsum(np.flip(nontargetHist), axis=0) 56 | precision = targetHist / (targetHist + nontargetHist + 1e-8) 57 | recall = targetHist / np.sum(gt) 58 | return precision, recall, mfmeasure 59 | 60 | def show(self): 61 | assert self.num == self.idx, f"{self.num}, {self.idx}" 62 | precision = self.precision.mean(axis=0) 63 | recall = self.recall.mean(axis=0) 64 | fmeasure = 1.3 * precision * recall / (0.3 * precision + recall + 1e-8) 65 | mmfmeasure = self.meanF.mean() 66 | return fmeasure, fmeasure.max(), mmfmeasure, precision, recall 67 | 68 | 69 | class CalMAE(object): 70 | # mean absolute error 71 | def __init__(self, num): 72 | # self.prediction = [] 73 | self.prediction = np.zeros(num) 74 | self.idx = 0 75 | self.num = num 76 | 77 | def update(self, pred, gt): 78 | self.prediction[self.idx] = self.cal(pred, gt) 79 | self.idx += 1 80 | 81 | def cal(self, pred, gt): 82 | return np.mean(np.abs(pred - gt)) 83 | 84 | def show(self): 85 | assert self.num == self.idx, f"{self.num}, {self.idx}" 86 | return self.prediction.mean() 87 | 88 | 89 | class CalSM(object): 90 | # Structure-measure: A new way to evaluate foreground maps (ICCV 2017) 91 | def __init__(self, num, alpha=0.5): 92 | self.prediction = np.zeros(num) 93 | self.alpha = alpha 94 | self.idx = 0 95 | self.num = num 96 | 97 | def update(self, pred, gt): 98 | gt = gt > 0.5 99 | self.prediction[self.idx] = self.cal(pred, gt) 100 | self.idx += 1 101 | 102 | def show(self): 103 | assert self.num == self.idx, f"{self.num}, {self.idx}" 104 | return self.prediction.mean() 105 | 106 | def cal(self, pred, gt): 107 | y = np.mean(gt) 108 | if y == 0: 109 | score = 1 - np.mean(pred) 110 | elif y == 1: 111 | score = np.mean(pred) 112 | else: 113 | score = self.alpha * self.object(pred, gt) + (1 - self.alpha) * self.region(pred, gt) 114 | return score 115 | 116 | def object(self, pred, gt): 117 | fg = pred * gt 118 | bg = (1 - pred) * (1 - gt) 119 | 120 | u = np.mean(gt) 121 | return u * self.s_object(fg, gt) + (1 - u) * self.s_object(bg, np.logical_not(gt)) 122 | 123 | def s_object(self, in1, in2): 124 | x = np.mean(in1[in2]) 125 | sigma_x = np.std(in1[in2]) 126 | return 2 * x / (pow(x, 2) + 1 + sigma_x + 1e-8) 127 | 128 | def region(self, pred, gt): 129 | [y, x] = center_of_mass(gt) 130 | y = int(round(y)) + 1 131 | x = int(round(x)) + 1 132 | [gt1, gt2, gt3, gt4, w1, w2, w3, w4] = self.divideGT(gt, x, y) 133 | pred1, pred2, pred3, pred4 = self.dividePred(pred, x, y) 134 | 135 | score1 = self.ssim(pred1, gt1) 136 | score2 = self.ssim(pred2, gt2) 137 | score3 = self.ssim(pred3, gt3) 138 | score4 = self.ssim(pred4, gt4) 139 | 140 | return w1 * score1 + w2 * score2 + w3 * score3 + w4 * score4 141 | 142 | def divideGT(self, gt, x, y): 143 | h, w = gt.shape 144 | area = h * w 145 | LT = gt[0:y, 0:x] 146 | RT = gt[0:y, x:w] 147 | LB = gt[y:h, 0:x] 148 | RB = gt[y:h, x:w] 149 | 150 | w1 = x * y / area 151 | w2 = y * (w - x) / area 152 | w3 = (h - y) * x / area 153 | w4 = (h - y) * (w - x) / area 154 | 155 | return LT, RT, LB, RB, w1, w2, w3, w4 156 | 157 | def dividePred(self, pred, x, y): 158 | h, w = pred.shape 159 | LT = pred[0:y, 0:x] 160 | RT = pred[0:y, x:w] 161 | LB = pred[y:h, 0:x] 162 | RB = pred[y:h, x:w] 163 | 164 | return LT, RT, LB, RB 165 | 166 | def ssim(self, in1, in2): 167 | in2 = np.float32(in2) 168 | h, w = in1.shape 169 | N = h * w 170 | 171 | x = np.mean(in1) 172 | y = np.mean(in2) 173 | sigma_x = np.var(in1) 174 | sigma_y = np.var(in2) 175 | sigma_xy = np.sum((in1 - x) * (in2 - y)) / (N - 1) 176 | 177 | alpha = 4 * x * y * sigma_xy 178 | beta = (x * x + y * y) * (sigma_x + sigma_y) 179 | 180 | if alpha != 0: 181 | score = alpha / (beta + 1e-8) 182 | elif alpha == 0 and beta == 0: 183 | score = 1 184 | else: 185 | score = 0 186 | 187 | return score 188 | 189 | 190 | class CalEM(object): 191 | # Enhanced-alignment Measure for Binary Foreground Map Evaluation (IJCAI 2018) 192 | def __init__(self, num): 193 | self.prediction = np.zeros(num) 194 | self.idx = 0 195 | self.num = num 196 | 197 | def update(self, pred, gt): 198 | self.prediction[self.idx] = self.cal(pred, gt) 199 | self.idx += 1 200 | 201 | def cal(self, pred, gt): 202 | th = 2 * pred.mean() 203 | if th > 1: 204 | th = 1 205 | FM = np.zeros(gt.shape) 206 | FM[pred >= th] = 1 207 | FM = np.array(FM, dtype=bool) 208 | GT = np.array(gt, dtype=bool) 209 | dFM = np.double(FM) 210 | if sum(sum(np.double(GT))) == 0: 211 | enhanced_matrix = 1.0 - dFM 212 | elif sum(sum(np.double(~GT))) == 0: 213 | enhanced_matrix = dFM 214 | else: 215 | dGT = np.double(GT) 216 | align_matrix = self.AlignmentTerm(dFM, dGT) 217 | enhanced_matrix = self.EnhancedAlignmentTerm(align_matrix) 218 | [w, h] = np.shape(GT) 219 | score = sum(sum(enhanced_matrix)) / (w * h - 1 + 1e-8) 220 | return score 221 | 222 | def AlignmentTerm(self, dFM, dGT): 223 | mu_FM = np.mean(dFM) 224 | mu_GT = np.mean(dGT) 225 | align_FM = dFM - mu_FM 226 | align_GT = dGT - mu_GT 227 | align_Matrix = 2.0 * (align_GT * align_FM) / (align_GT * align_GT + align_FM * align_FM + 1e-8) 228 | return align_Matrix 229 | 230 | def EnhancedAlignmentTerm(self, align_Matrix): 231 | enhanced = np.power(align_Matrix + 1, 2) / 4 232 | return enhanced 233 | 234 | def show(self): 235 | assert self.num == self.idx, f"{self.num}, {self.idx}" 236 | return self.prediction.mean() 237 | 238 | 239 | class CalWFM(object): 240 | def __init__(self, num, beta=1): 241 | self.scores_list = np.zeros(num) 242 | self.beta = beta 243 | self.eps = 1e-6 244 | self.idx = 0 245 | self.num = num 246 | 247 | def update(self, pred, gt): 248 | gt = gt > 0.5 249 | self.scores_list[self.idx] = 0 if gt.max() == 0 else self.cal(pred, gt) 250 | self.idx += 1 251 | 252 | def matlab_style_gauss2D(self, shape=(7, 7), sigma=5): 253 | """ 254 | 2D gaussian mask - should give the same result as MATLAB's 255 | fspecial('gaussian',[shape],[sigma]) 256 | """ 257 | m, n = [(ss - 1.0) / 2.0 for ss in shape] 258 | y, x = np.ogrid[-m : m + 1, -n : n + 1] 259 | h = np.exp(-(x * x + y * y) / (2.0 * sigma * sigma)) 260 | h[h < np.finfo(h.dtype).eps * h.max()] = 0 261 | sumh = h.sum() 262 | if sumh != 0: 263 | h /= sumh 264 | return h 265 | 266 | def cal(self, pred, gt): 267 | # [Dst,IDXT] = bwdist(dGT); 268 | Dst, Idxt = bwdist(gt == 0, return_indices=True) 269 | 270 | # %Pixel dependency 271 | # E = abs(FG-dGT); 272 | E = np.abs(pred - gt) 273 | # Et = E; 274 | # Et(~GT)=Et(IDXT(~GT)); %To deal correctly with the edges of the foreground region 275 | Et = np.copy(E) 276 | Et[gt == 0] = Et[Idxt[0][gt == 0], Idxt[1][gt == 0]] 277 | 278 | # K = fspecial('gaussian',7,5); 279 | # EA = imfilter(Et,K); 280 | # MIN_E_EA(GT & EA= 0 323 | assert gt.max() <= 1 and gt.min() >= 0 324 | 325 | self.cal_mae.update(pred, gt) 326 | self.cal_fm.update(pred, gt) 327 | self.cal_sm.update(pred, gt) 328 | self.cal_em.update(pred, gt) 329 | self.cal_wfm.update(pred, gt) 330 | 331 | def show(self): 332 | MAE = self.cal_mae.show() 333 | _, Maxf, Meanf, _, _, = self.cal_fm.show() 334 | SM = self.cal_sm.show() 335 | EM = self.cal_em.show() 336 | WFM = self.cal_wfm.show() 337 | results = { 338 | "MaxF": Maxf, 339 | "MeanF": Meanf, 340 | "WFM": WFM, 341 | "MAE": MAE, 342 | "SM": SM, 343 | "EM": EM, 344 | } 345 | return results 346 | 347 | 348 | if __name__ == "__main__": 349 | pred = Image 350 | -------------------------------------------------------------------------------- /utils/misc.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : misc.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import os 8 | from datetime import datetime 9 | 10 | 11 | class AvgMeter(object): 12 | def __init__(self): 13 | self.reset() 14 | 15 | def reset(self): 16 | self.val = 0 17 | self.avg = 0 18 | self.sum = 0 19 | self.count = 0 20 | 21 | def update(self, val, n=1): 22 | self.val = val 23 | self.sum += val * n 24 | self.count += n 25 | self.avg = self.sum / self.count 26 | 27 | 28 | def pre_mkdir(path_config): 29 | # 提前创建好记录文件,避免自动创建的时候触发文件创建事件 30 | check_mkdir(path_config["pth_log"]) 31 | make_log(path_config["te_log"], f"=== te_log {datetime.now()} ===") 32 | make_log(path_config["tr_log"], f"=== tr_log {datetime.now()} ===") 33 | 34 | # 提前创建好存储预测结果和存放模型以及tensorboard的文件夹 35 | check_mkdir(path_config["save"]) 36 | check_mkdir(path_config["pth"]) 37 | check_mkdir(path_config["tb"]) 38 | 39 | 40 | def check_mkdir(dir_name): 41 | if not os.path.exists(dir_name): 42 | os.makedirs(dir_name) 43 | 44 | 45 | def make_log(path, context): 46 | with open(path, "a") as log: 47 | log.write(f"{context}\n") 48 | 49 | 50 | def check_dir_path_valid(path: list): 51 | for p in path: 52 | if p: 53 | assert os.path.exists(p) 54 | assert os.path.isdir(p) 55 | 56 | 57 | def construct_path_dict(proj_root, exp_name): 58 | ckpt_path = os.path.join(proj_root, "output") 59 | 60 | pth_log_path = os.path.join(ckpt_path, exp_name) 61 | 62 | tb_path = os.path.join(pth_log_path, "tb") 63 | save_path = os.path.join(pth_log_path, "pre") 64 | pth_path = os.path.join(pth_log_path, "pth") 65 | 66 | final_full_model_path = os.path.join(pth_path, "checkpoint_final.pth.tar") 67 | final_state_path = os.path.join(pth_path, "state_final.pth") 68 | 69 | tr_log_path = os.path.join(pth_log_path, f"tr_{str(datetime.now())[:10]}.txt") 70 | te_log_path = os.path.join(pth_log_path, f"te_{str(datetime.now())[:10]}.txt") 71 | cfg_log_path = os.path.join(pth_log_path, f"cfg_{str(datetime.now())[:10]}.txt") 72 | trainer_log_path = os.path.join(pth_log_path, f"trainer_{str(datetime.now())[:10]}.txt") 73 | 74 | path_config = { 75 | "ckpt_path": ckpt_path, 76 | "pth_log": pth_log_path, 77 | "tb": tb_path, 78 | "save": save_path, 79 | "pth": pth_path, 80 | "final_full_net": final_full_model_path, 81 | "final_state_net": final_state_path, 82 | "tr_log": tr_log_path, 83 | "te_log": te_log_path, 84 | "cfg_log": cfg_log_path, 85 | "trainer_log": trainer_log_path, 86 | } 87 | 88 | return path_config 89 | -------------------------------------------------------------------------------- /utils/tensor_ops.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : tensor_ops.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import torch 8 | import torch.nn.functional as F 9 | from torch import nn 10 | 11 | 12 | def cus_sample(feat, **kwargs): 13 | """ 14 | :param feat: 输入特征 15 | :param kwargs: size或者scale_factor 16 | """ 17 | assert len(kwargs.keys()) == 1 and list(kwargs.keys())[0] in ["size", "scale_factor"] 18 | return F.interpolate(feat, **kwargs, mode="bilinear", align_corners=True) 19 | 20 | 21 | def upsample_add(*xs): 22 | y = xs[-1] 23 | for x in xs[:-1]: 24 | y = y + F.interpolate(x, size=y.size()[2:], mode="bilinear", align_corners=False) 25 | return y 26 | 27 | 28 | def upsample_cat(*xs): 29 | y = xs[-1] 30 | out = [] 31 | for x in xs[:-1]: 32 | out.append(F.interpolate(x, size=y.size()[2:], mode="bilinear", align_corners=False)) 33 | return torch.cat([*out, y], dim=1) 34 | 35 | 36 | def upsample_reduce(b, a): 37 | """ 38 | 上采样所有特征到最后一个特征的尺度以及前一个特征的通道数 39 | """ 40 | _, C, _, _ = b.size() 41 | N, _, H, W = a.size() 42 | 43 | b = F.interpolate(b, size=(H, W), mode="bilinear", align_corners=False) 44 | a = a.reshape(N, -1, C, H, W).mean(1) 45 | 46 | return b + a 47 | 48 | 49 | def shuffle_channels(x, groups): 50 | """ 51 | Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,W] -> [N,C,H,W] 52 | 一共C个channel要分成g组混合的channel,先把C reshape成(g, C/g)的形状, 53 | 然后转置成(C/g, g)最后平坦成C组channel 54 | """ 55 | N, C, H, W = x.size() 56 | x = x.reshape(N, groups, C // groups, H, W).permute(0, 2, 1, 3, 4) 57 | return x.reshape(N, C, H, W) 58 | -------------------------------------------------------------------------------- /utils/transforms/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time : 2020/7/7 3 | # @Author : Lart Pang 4 | # @Email : lartpang@163.com 5 | # @File : __init__.py.py 6 | # @Project : HDFNet 7 | # @GitHub : https://github.com/lartpang 8 | -------------------------------------------------------------------------------- /utils/transforms/joint_transforms.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : joint_transforms.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import random 8 | 9 | import numpy as np 10 | import torch 11 | from PIL import Image, ImageFilter 12 | from torchvision.transforms import transforms 13 | 14 | 15 | class Compose(object): 16 | def __init__(self, transforms): 17 | self.transforms = transforms 18 | 19 | def __call__(self, img, mask): 20 | assert img.size == mask.size 21 | for t in self.transforms: 22 | img, mask = t(img, mask) 23 | return img, mask 24 | 25 | 26 | class RandomHorizontallyFlip(object): 27 | def __call__(self, img, mask): 28 | if random.random() < 0.5: 29 | return img.transpose(Image.FLIP_LEFT_RIGHT), mask.transpose(Image.FLIP_LEFT_RIGHT) 30 | return img, mask 31 | 32 | 33 | class JointResize(object): 34 | def __init__(self, size): 35 | if isinstance(size, int): 36 | self.size = (size, size) 37 | elif isinstance(size, tuple): 38 | self.size = size 39 | else: 40 | raise RuntimeError("size参数请设置为int或者tuple") 41 | 42 | def __call__(self, img, mask): 43 | img = img.resize(self.size) 44 | mask = mask.resize(self.size) 45 | return img, mask 46 | 47 | 48 | class RandomRotate(object): 49 | def __init__(self, degree): 50 | self.degree = degree 51 | 52 | def __call__(self, img, mask): 53 | rotate_degree = random.random() * 2 * self.degree - self.degree 54 | return img.rotate(rotate_degree, Image.BILINEAR), mask.rotate(rotate_degree, Image.NEAREST) 55 | 56 | 57 | class RandomScaleCrop(object): 58 | def __init__(self, input_size, scale_factor): 59 | """ 60 | 处理的是长宽相同的图像。这里会进行扩张到原图的随机倍数(1~scale_factor), 61 | 之后进行随机裁剪,得到输入图像大小。 62 | """ 63 | self.input_size = input_size 64 | self.scale_factor = scale_factor 65 | 66 | def __call__(self, img, mask): 67 | # random scale (short edge) 68 | assert img.size[0] == self.input_size 69 | 70 | o_size = random.randint(int(self.input_size * 1), int(self.input_size * self.scale_factor)) 71 | img = img.resize((o_size, o_size), Image.BILINEAR) 72 | mask = mask.resize((o_size, o_size), Image.NEAREST) # mask的放缩使用的是近邻差值 73 | 74 | # random crop input_size 75 | x1 = random.randint(0, o_size - self.input_size) 76 | y1 = random.randint(0, o_size - self.input_size) 77 | img = img.crop((x1, y1, x1 + self.input_size, y1 + self.input_size)) 78 | mask = mask.crop((x1, y1, x1 + self.input_size, y1 + self.input_size)) 79 | 80 | return img, mask 81 | 82 | 83 | class ScaleCenterCrop(object): 84 | def __init__(self, input_size): 85 | self.input_size = input_size 86 | 87 | def __call__(self, img, mask): 88 | w, h = img.size 89 | # 让短边等于剪裁的尺寸 90 | if w > h: 91 | oh = self.input_size 92 | ow = int(1.0 * w * oh / h) 93 | else: 94 | ow = self.input_size 95 | oh = int(1.0 * h * ow / w) 96 | img = img.resize((ow, oh), Image.BILINEAR) 97 | mask = mask.resize((ow, oh), Image.NEAREST) 98 | 99 | # 从放缩后的结果中进行中心剪裁 100 | w, h = img.size 101 | x1 = int(round((w - self.input_size) / 2.0)) 102 | y1 = int(round((h - self.input_size) / 2.0)) 103 | img = img.crop((x1, y1, x1 + self.input_size, y1 + self.input_size)) 104 | mask = mask.crop((x1, y1, x1 + self.input_size, y1 + self.input_size)) 105 | 106 | return img, mask 107 | 108 | 109 | class RandomGaussianBlur(object): 110 | def __call__(self, img, mask): 111 | if random.random() < 0.5: 112 | img = img.filter(ImageFilter.GaussianBlur(radius=random.random())) 113 | 114 | return img, mask 115 | 116 | 117 | class PaddingSquare(object): 118 | """ 119 | 先通过在原图的四边进行padding变成正方形,再利用resize放缩到网络需要的输入尺寸(方形) 120 | """ 121 | 122 | def __init__(self, pad_size): 123 | assert isinstance(pad_size, int) 124 | self.pad_size = (pad_size, pad_size) 125 | 126 | def __call__(self, x, y): 127 | # array和asarray都可以将结构数据转化为ndarray 128 | # 但是主要区别就是当数据源是ndarray时,array仍然会copy出一个副本,占用新的内存,但asarray不会,仍会受原始的数据的影响。 129 | x = np.asarray(x) 130 | y = np.asarray(y) 131 | 132 | h, w, c = x.shape 133 | size = max(h, w) 134 | pad_h = (size - h) // 2 135 | pad_w = (size - w) // 2 136 | temp_x = np.zeros((size, size, c)) 137 | temp_y = np.zeros((size, size)) 138 | temp_x[pad_h : h + pad_h, pad_w : w + pad_w, :] = x 139 | temp_y[pad_h : h + pad_h, pad_w : w + pad_w] = y 140 | 141 | temp_x = Image.fromarray(temp_x.astype(np.uint8)).resize(self.pad_size) 142 | temp_y = Image.fromarray(temp_y.astype(np.uint8)).resize(self.pad_size) 143 | return temp_x, temp_y 144 | 145 | 146 | if __name__ == "__main__": 147 | a = torch.randn((3, 320, 320)) 148 | b = torch.randn((1, 320, 320)) 149 | to_pil = transforms.ToPILImage() 150 | 151 | a = to_pil(a) 152 | b = to_pil(b) 153 | lu_x, lu_y, rb_x, rb_y = (320, 320, 544, 544) 154 | b.crop((lu_x, lu_y, rb_x, rb_y)) 155 | print(np.asarray(b.crop((lu_x, lu_y, rb_x, rb_y)))) 156 | -------------------------------------------------------------------------------- /utils/transforms/triple_transforms.py: -------------------------------------------------------------------------------- 1 | # @Time : 2020/7/8 2 | # @Author : Lart Pang 3 | # @Email : lartpang@163.com 4 | # @File : triple_transforms.py 5 | # @Project : HDFNet 6 | # @GitHub : https://github.com/lartpang 7 | import random 8 | 9 | from PIL import Image 10 | 11 | 12 | class Compose(object): 13 | def __init__(self, transforms): 14 | self.transforms = transforms 15 | 16 | def __call__(self, img, mask, depth): 17 | assert img.size == mask.size 18 | assert img.size == depth.size 19 | for t in self.transforms: 20 | img, mask, depth = t(img, mask, depth) 21 | return img, mask, depth 22 | 23 | 24 | class RandomHorizontallyFlip(object): 25 | def __call__(self, img, mask, depth): 26 | if random.random() < 0.5: 27 | return ( 28 | img.transpose(Image.FLIP_LEFT_RIGHT), 29 | mask.transpose(Image.FLIP_LEFT_RIGHT), 30 | depth.transpose(Image.FLIP_LEFT_RIGHT), 31 | ) 32 | return img, mask, depth 33 | 34 | 35 | class JointResize(object): 36 | def __init__(self, size): 37 | if isinstance(size, int): 38 | self.size = (size, size) 39 | elif isinstance(size, tuple): 40 | self.size = size 41 | else: 42 | raise RuntimeError("size参数请设置为int或者tuple") 43 | 44 | def __call__(self, img, mask, depth): 45 | img = img.resize(self.size, Image.BILINEAR) 46 | mask = mask.resize(self.size, Image.NEAREST) 47 | depth = depth.resize(self.size, Image.BILINEAR) 48 | return img, mask, depth 49 | 50 | 51 | class RandomRotate(object): 52 | def __init__(self, degree): 53 | self.degree = degree 54 | 55 | def __call__(self, img, mask, depth): 56 | rotate_degree = random.random() * 2 * self.degree - self.degree 57 | return ( 58 | img.rotate(rotate_degree, Image.BILINEAR), 59 | mask.rotate(rotate_degree, Image.NEAREST), 60 | depth.rotate(rotate_degree, Image.BILINEAR), 61 | ) 62 | --------------------------------------------------------------------------------