├── utils ├── __init__.py └── evaluate.py ├── dataset ├── __init__.py ├── datasets.py └── list │ └── cityscapes │ └── val.lst ├── networks ├── __init__.py └── pspnet.py ├── libs ├── src │ ├── bn.o │ ├── common.h │ ├── lib_cffi.h │ ├── bn.h │ ├── lib_cffi.cpp │ └── bn.cu ├── _ext │ ├── __ext.so │ ├── __pycache__ │ │ ├── __init__.cpython-35.pyc │ │ ├── __init__.cpython-36.pyc │ │ └── __init__.cpython-37.pyc │ └── __init__.py ├── __pycache__ │ ├── bn.cpython-35.pyc │ ├── bn.cpython-36.pyc │ ├── bn.cpython-37.pyc │ ├── misc.cpython-35.pyc │ ├── dense.cpython-35.pyc │ ├── __init__.cpython-35.pyc │ ├── __init__.cpython-36.pyc │ ├── __init__.cpython-37.pyc │ ├── functions.cpython-35.pyc │ ├── functions.cpython-36.pyc │ ├── functions.cpython-37.pyc │ └── residual.cpython-35.pyc ├── __init__.py ├── misc.py ├── build.py ├── build.sh ├── dense.py ├── residual.py ├── bn.py └── functions.py ├── LICENSE ├── valandtest.py ├── README.md └── options.py /utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dataset/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /networks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /libs/src/bn.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/src/bn.o -------------------------------------------------------------------------------- /libs/_ext/__ext.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/_ext/__ext.so -------------------------------------------------------------------------------- /libs/__pycache__/bn.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/bn.cpython-35.pyc -------------------------------------------------------------------------------- /libs/__pycache__/bn.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/bn.cpython-36.pyc -------------------------------------------------------------------------------- /libs/__pycache__/bn.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/bn.cpython-37.pyc -------------------------------------------------------------------------------- /libs/__pycache__/misc.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/misc.cpython-35.pyc -------------------------------------------------------------------------------- /libs/__pycache__/dense.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/dense.cpython-35.pyc -------------------------------------------------------------------------------- /libs/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /libs/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /libs/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /libs/__pycache__/functions.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/functions.cpython-35.pyc -------------------------------------------------------------------------------- /libs/__pycache__/functions.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/functions.cpython-36.pyc -------------------------------------------------------------------------------- /libs/__pycache__/functions.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/functions.cpython-37.pyc -------------------------------------------------------------------------------- /libs/__pycache__/residual.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/__pycache__/residual.cpython-35.pyc -------------------------------------------------------------------------------- /libs/_ext/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/_ext/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /libs/_ext/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/_ext/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /libs/_ext/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/drilistbox/CWD/HEAD/libs/_ext/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /libs/__init__.py: -------------------------------------------------------------------------------- 1 | from .bn import ABN, InPlaceABN, InPlaceABNWrapper, InPlaceABNSync, InPlaceABNSyncWrapper 2 | from .misc import GlobalAvgPool2d 3 | from .residual import IdentityResidualBlock 4 | from .dense import DenseModule 5 | -------------------------------------------------------------------------------- /libs/misc.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | 3 | 4 | class GlobalAvgPool2d(nn.Module): 5 | def __init__(self): 6 | """Global average pooling over the input's spatial dimensions""" 7 | super(GlobalAvgPool2d, self).__init__() 8 | 9 | def forward(self, inputs): 10 | in_size = inputs.size() 11 | return inputs.view((in_size[0], in_size[1], -1)).mean(dim=2) 12 | -------------------------------------------------------------------------------- /libs/_ext/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from torch.utils.ffi import _wrap_function 3 | from .__ext import lib as _lib, ffi as _ffi 4 | 5 | __all__ = [] 6 | def _import_symbols(locals): 7 | for symbol in dir(_lib): 8 | fn = getattr(_lib, symbol) 9 | if callable(fn): 10 | locals[symbol] = _wrap_function(fn, _ffi) 11 | else: 12 | locals[symbol] = fn 13 | __all__.append(symbol) 14 | 15 | _import_symbols(locals()) 16 | -------------------------------------------------------------------------------- /libs/build.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from torch.utils.ffi import create_extension 4 | 5 | sources = ['src/lib_cffi.cpp'] 6 | headers = ['src/lib_cffi.h'] 7 | extra_objects = ['src/bn.o'] 8 | with_cuda = True 9 | 10 | this_file = os.path.dirname(os.path.realpath(__file__)) 11 | extra_objects = [os.path.join(this_file, fname) for fname in extra_objects] 12 | 13 | ffi = create_extension( 14 | '_ext', 15 | headers=headers, 16 | sources=sources, 17 | relative_to=__file__, 18 | with_cuda=with_cuda, 19 | extra_objects=extra_objects, 20 | extra_compile_args=["-std=c++11"] 21 | ) 22 | 23 | if __name__ == '__main__': 24 | ffi.build() 25 | -------------------------------------------------------------------------------- /libs/src/common.h: -------------------------------------------------------------------------------- 1 | #ifndef __COMMON__ 2 | #define __COMMON__ 3 | #include 4 | 5 | /* 6 | * General settings 7 | */ 8 | const int WARP_SIZE = 32; 9 | const int MAX_BLOCK_SIZE = 512; 10 | 11 | /* 12 | * Utility functions 13 | */ 14 | template 15 | __device__ __forceinline__ T WARP_SHFL_XOR(T value, int laneMask, int width = warpSize, 16 | unsigned int mask = 0xffffffff) { 17 | #if CUDART_VERSION >= 9000 18 | return __shfl_xor_sync(mask, value, laneMask, width); 19 | #else 20 | return __shfl_xor(value, laneMask, width); 21 | #endif 22 | } 23 | 24 | __device__ __forceinline__ int getMSB(int val) { return 31 - __clz(val); } 25 | 26 | static int getNumThreads(int nElem) { 27 | int threadSizes[5] = {32, 64, 128, 256, MAX_BLOCK_SIZE}; 28 | for (int i = 0; i != 5; ++i) { 29 | if (nElem <= threadSizes[i]) { 30 | return threadSizes[i]; 31 | } 32 | } 33 | return MAX_BLOCK_SIZE; 34 | } 35 | 36 | 37 | #endif -------------------------------------------------------------------------------- /libs/src/lib_cffi.h: -------------------------------------------------------------------------------- 1 | int bn_mean_var_cuda(const THCudaTensor *x, THCudaTensor *mean, THCudaTensor *var); 2 | int bn_forward_cuda(const THCudaTensor *x, const THCudaTensor *mean, const THCudaTensor *var, 3 | const THCudaTensor *weight, const THCudaTensor *bias, THCudaTensor *y, THCudaTensor *z, 4 | float eps); 5 | int bn_edz_eydz_cuda(const THCudaTensor *z, const THCudaTensor *dz, const THCudaTensor *weight, 6 | const THCudaTensor *bias, THCudaTensor *edz, THCudaTensor *eydz, float eps); 7 | int bn_backard_cuda(const THCudaTensor *dz, const THCudaTensor *z, const THCudaTensor *var, 8 | const THCudaTensor *weight, const THCudaTensor *bias, const THCudaTensor *edz, 9 | const THCudaTensor *eydz, THCudaTensor *dx, THCudaTensor *dweight, THCudaTensor *dbias, 10 | float eps); 11 | int leaky_relu_cuda(THCudaTensor *x, float slope); 12 | int leaky_relu_backward_cuda(const THCudaTensor *x, THCudaTensor *dx, float slope); 13 | int elu_cuda(THCudaTensor *x); 14 | int elu_backward_cuda(const THCudaTensor *x, THCudaTensor *dx); 15 | int elu_inv_cuda(THCudaTensor *x); -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 2-Clause License 2 | 3 | Copyright (c) 2020, Changyong Shu 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /libs/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Configuration 4 | # -gencode=arch=compute_70,code=sm_70 \ 5 | # CUDA_GENCODE="\ 6 | # -gencode=arch=compute_61,code=sm_61 \ 7 | # -gencode=arch=compute_60,code=sm_60 \ 8 | # -gencode=arch=compute_52,code=sm_52 \ 9 | # -gencode=arch=compute_50,code=sm_50" 10 | 11 | CUDA_GENCODE="-arch=sm_30 \ 12 | -gencode=arch=compute_30,code=sm_30 \ 13 | -gencode=arch=compute_50,code=sm_50 \ 14 | -gencode=arch=compute_52,code=sm_52 \ 15 | -gencode=arch=compute_60,code=sm_60 \ 16 | -gencode=arch=compute_61,code=sm_61 \ 17 | -gencode=arch=compute_62,code=sm_62 \ 18 | -gencode=arch=compute_70,code=sm_70 \ 19 | -gencode=arch=compute_70,code=compute_70" 20 | 21 | # CUDA_GENCODE="-arch=sm_30 \ 22 | # -gencode=arch=compute_30,code=sm_30 \ 23 | # -gencode=arch=compute_50,code=sm_50 \ 24 | # -gencode=arch=compute_52,code=sm_52 \ 25 | # -gencode=arch=compute_60,code=sm_60 \ 26 | # -gencode=arch=compute_61,code=sm_61 \ 27 | # -gencode=arch=compute_62,code=sm_62 " 28 | 29 | # CUDA_GENCODE="-arch=sm_30 \ 30 | # -gencode=arch=compute_30,code=sm_30 \ 31 | # -gencode=arch=compute_50,code=sm_50 \ 32 | # -gencode=arch=compute_52,code=sm_52 \ 33 | # -gencode=arch=compute_60,code=sm_60 \ 34 | # -gencode=arch=compute_61,code=sm_61" 35 | 36 | 37 | cd src 38 | nvcc -I/usr/local/cuda/include --expt-extended-lambda -O3 -c -o bn.o bn.cu -x cu -Xcompiler -fPIC -std=c++11 ${CUDA_GENCODE} 39 | cd .. 40 | -------------------------------------------------------------------------------- /libs/src/bn.h: -------------------------------------------------------------------------------- 1 | #ifndef __BN__ 2 | #define __BN__ 3 | 4 | /* 5 | * Exported functions 6 | */ 7 | extern "C" int _bn_mean_var_cuda(int N, int C, int S, const float *x, float *mean, float *var, cudaStream_t); 8 | extern "C" int _bn_forward_cuda(int N, int C, int S, const float *x, const float *mean, const float *var, 9 | const float *weight, const float *bias, float *y, float *z, float eps, cudaStream_t); 10 | extern "C" int _bn_edz_eydz_cuda(int N, int C, int S, const float *z, const float *dz, const float *weight, 11 | const float *bias, float *edz, float *eydz, float eps, cudaStream_t stream); 12 | extern "C" int _bn_backward_cuda(int N, int C, int S, const float *dz, const float *z, const float *var, 13 | const float *weight, const float *bias, const float *edz, const float *eydz, float *dx, 14 | float *dweight, float *dbias, float eps, cudaStream_t stream); 15 | extern "C" int _leaky_relu_cuda(int N, float *x, float slope, cudaStream_t stream); 16 | extern "C" int _leaky_relu_backward_cuda(int N, const float *x, float *dx, float slope, cudaStream_t stream); 17 | extern "C" int _elu_cuda(int N, float *x, cudaStream_t stream); 18 | extern "C" int _elu_backward_cuda(int N, const float *x, float *dx, cudaStream_t stream); 19 | extern "C" int _elu_inv_cuda(int N, float *x, cudaStream_t stream); 20 | 21 | #endif 22 | -------------------------------------------------------------------------------- /libs/dense.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | import torch 4 | import torch.nn as nn 5 | 6 | from .bn import ABN 7 | 8 | 9 | class DenseModule(nn.Module): 10 | def __init__(self, in_channels, growth, layers, bottleneck_factor=4, norm_act=ABN, dilation=1): 11 | super(DenseModule, self).__init__() 12 | self.in_channels = in_channels 13 | self.growth = growth 14 | self.layers = layers 15 | 16 | self.convs1 = nn.ModuleList() 17 | self.convs3 = nn.ModuleList() 18 | for i in range(self.layers): 19 | self.convs1.append(nn.Sequential(OrderedDict([ 20 | ("bn", norm_act(in_channels)), 21 | ("conv", nn.Conv2d(in_channels, self.growth * bottleneck_factor, 1, bias=False)) 22 | ]))) 23 | self.convs3.append(nn.Sequential(OrderedDict([ 24 | ("bn", norm_act(self.growth * bottleneck_factor)), 25 | ("conv", nn.Conv2d(self.growth * bottleneck_factor, self.growth, 3, padding=dilation, bias=False, 26 | dilation=dilation)) 27 | ]))) 28 | in_channels += self.growth 29 | 30 | @property 31 | def out_channels(self): 32 | return self.in_channels + self.growth * self.layers 33 | 34 | def forward(self, x): 35 | inputs = [x] 36 | for i in range(self.layers): 37 | x = torch.cat(inputs, dim=1) 38 | x = self.convs1[i](x) 39 | x = self.convs3[i](x) 40 | inputs += [x] 41 | 42 | return torch.cat(inputs, dim=1) -------------------------------------------------------------------------------- /valandtest.py: -------------------------------------------------------------------------------- 1 | ''' 2 | ml load anaconda 3 | conda activate ke_seg 4 | cd /home/nfs/em4/shuchangyong/submit 5 | 6 | python val.py --data-dir /home/nfs/em2/wei_dataset/raw_datasets/CITYSCAPES/data/cityscapes --restore-from new_rn18-cityscape_singleAndWhole_val-75.90_test-74.58.pth --gpu 0 --type val --figsavepath 75.9val 7 | python val.py --data-dir /home/nfs/em2/wei_dataset/raw_datasets/CITYSCAPES/data/cityscapes --restore-from new_rn18-cityscape_singleAndWhole_val-75.90_test-74.58.pth --gpu 2 --type test --figsavepath 74.58test 8 | 9 | python val.py --data-dir /home/nfs/em2/wei_dataset/raw_datasets/CITYSCAPES/data/cityscapes --restore-from new_rn18-cityscape_singleAndWhole_val-75.15_test-74.18.pth --gpu 1 --type val --figsavepath 75.15val 10 | python val.py --data-dir /home/nfs/em2/wei_dataset/raw_datasets/CITYSCAPES/data/cityscapes --restore-from new_rn18-cityscape_singleAndWhole_val-75.15_test-74.18.pth --gpu 3 --type test --figsavepath 74.18test 11 | 12 | python val.py --data-dir /home/nfs/em2/wei_dataset/raw_datasets/CITYSCAPES/data/cityscapes --restore-from new_rn18-cityscape_singleAndWhole_val-75.02_test-00.00.pth --gpu 1 --type val --figsavepath 75.02val 13 | python val.py --data-dir /home/nfs/em2/wei_dataset/raw_datasets/CITYSCAPES/data/cityscapes --restore-from new_rn18-cityscape_singleAndWhole_val-75.02_test-00.00.pth --gpu 2 --type test --figsavepath 00.00test 14 | 15 | ''' 16 | 17 | import os 18 | import torch 19 | from options import ValOptions 20 | from torch.utils import data 21 | from dataset.datasets import CSTrainValSet, CSTestSet 22 | from networks.pspnet import Res_pspnet, BasicBlock, Bottleneck 23 | from utils.evaluate import evaluate_main 24 | import warnings 25 | warnings.filterwarnings("ignore") 26 | 27 | if __name__ == '__main__': 28 | args = ValOptions().initialize() 29 | os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu 30 | 31 | if args.type == 'val': 32 | loader = data.DataLoader(CSTrainValSet(args.data_dir, args.val_data_list, crop_size=(1024, 2048), scale=False, mirror=False), batch_size=1, shuffle=False, pin_memory=True) 33 | elif args.type == 'test': 34 | loader = data.DataLoader(CSTestSet(args.data_dir, args.test_data_list, crop_size=(1024, 2048)), batch_size=1, shuffle=False, pin_memory=True) 35 | 36 | student = Res_pspnet(BasicBlock, [2, 2, 2, 2], num_classes = args.num_classes) 37 | student.load_state_dict(torch.load(args.restore_from)) 38 | print("=> load " + str(args.restore_from)) 39 | 40 | mean_IU, IU_array = evaluate_main(args.figsavepath, student, loader, args.num_classes, args.type) 41 | print('mean_IU: {:.6f} IU_array: \n{}'.format(mean_IU, IU_array)) 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Channel-wise Distillation for Semantic Segmentation 2 | 3 | ## Introduction 4 | 5 | This repository contains the PyTorch test implementation of: Channel-wise Distillation for Semantic Segmentation 6 | 7 | ## Requirements 8 | 9 | All the codes are tested in the following environment: 10 | 11 | * Linux (tested on Ubuntu 16.04 / CentOS 7.6) 12 | * Python 3.6.2 13 | * PyTorch 0.4.1 14 | * Single TITAN Xp GPU 15 | 16 | ## Installation 17 | 18 | * Install PyTorch: ` conda install pytorch=0.4.1 cuda90 torchvision -c pytorch ` 19 | * Install other dependences: ` pip install opencv-python scipy ` 20 | * Install InPlace-ABN: 21 | ```bash 22 | cd libs 23 | sh build.sh 24 | python build.py 25 | ``` 26 | The `build.sh` script assumes that the `nvcc` compiler is available in the current system search path. 27 | The CUDA kernels are compiled for `sm_50`, `sm_52` and `sm_61` by default. 28 | To change this (_e.g._ if you are using a Kepler GPU), please edit the `CUDA_GENCODE` variable in `build.sh`. 29 | 30 | ## Dataset & Models 31 | 32 | * Dataset: [[Cityscapes]](https://www.cityscapes-dataset.com/) 33 | 34 | * After distillation: PSPNet (ResNet-18) 35 | rn18-cityscape_singleAndWhole_val-75.05_test-73.86.pth [[Google Drive]](https://drive.google.com/file/d/1eLOslSm1Clif_PJFTedbmG9fdhhqPSAe/view?usp=sharing) 36 | rn18-cityscape_singleAndWhole_val-75.90_test-74.58.pth [[Google Drive]](https://drive.google.com/file/d/1IWGQvoP8OMcRysHPMPmXAjWi8k7IW3ZZ/view?usp=sharing) 37 | 38 | Please create a new folder `ckpt` and move all downloaded models to it. 39 | 40 | ## Usage 41 | 42 | 43 | #### 1. Inference with the evaluation dataset 44 | 45 | ```bash 46 | python valandtest.py --data-dir path/to/CITYSCAPES/data/cityscapes --restore-from ckpt/new_rn18-cityscape_singleAndWhole_val-75.02_test-73.86.pth --gpu 0 --type val --figsavepath 75.02val 47 | 48 | python valandtest.py --data-dir path/to/CITYSCAPES/data/cityscapes --restore-from ckpt/new_rn18-cityscape_singleAndWhole_val-75.90_test-74.58.pth --gpu 0 --type val --figsavepath 75.90val 49 | ``` 50 | 51 | #### 2. Inference on the test dataset 52 | 53 | ```bash 54 | python valandtest.py --data-dir path/to/CITYSCAPES/data/cityscapes --restore-from ckpt/new_rn18-cityscape_singleAndWhole_val-75.02_test-73.86.pth --gpu 0 --type test --figsavepath 73.86test 55 | 56 | python valandtest.py --data-dir path/to/CITYSCAPES/data/cityscapes --restore-from ckpt/new_rn18-cityscape_singleAndWhole_val-75.90_test-74.58.pth --gpu 0 --type test --figsavepath 74.58test 57 | ``` 58 | 59 | 60 | | Model | Average | roda | sidewalk | building| wall | fence | pole | trafficlight | trafficsign | vegetation | terrain | sky | person | rider | car | truck | bus | train | motorcycle | bicycle | 61 | | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | -- | 62 | | IoU | 73.86 | 97.84 | 81.01 | 91.55 | 48.63 | 53.18 | 61.14 | 70.21 | 74.20 | 92.93 | 70.91 | 94.84 | 83.11 | 62.39 | 94.74 | 54.12 | 66.80 | 70.91 | 61.60 | 73.27 | 63 | | IoU | 74.58 | 97.78 | 80.56 | 91.45 | 52.78 | 52.91 | 59.90 | 70.50 | 73.13 | 92.54 | 70.70 | 94.57 | 82.25 | 63.51 | 94.76 | 59.31 | 73.68 | 73.00 | 61.54 | 72.12 | 64 | 65 | 66 | 67 | ## Citation 68 | 69 | Please consider citing this work if it helps your research: 70 | 71 | ``` 72 | 73 | @inproceedings{shu2020cwd, 74 | title={Channel-wise Distillation for Semantic Segmentation}, 75 | author={Shu, Changyong and Liu, Yifan and Gao, Jianfei and Xu, Lin and Shen, Chunhua}, 76 | } 77 | 78 | ``` 79 | 80 | -------------------------------------------------------------------------------- /libs/residual.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | import torch.nn as nn 4 | 5 | from .bn import ABN 6 | 7 | 8 | class IdentityResidualBlock(nn.Module): 9 | def __init__(self, 10 | in_channels, 11 | channels, 12 | stride=1, 13 | dilation=1, 14 | groups=1, 15 | norm_act=ABN, 16 | dropout=None): 17 | """Configurable identity-mapping residual block 18 | 19 | Parameters 20 | ---------- 21 | in_channels : int 22 | Number of input channels. 23 | channels : list of int 24 | Number of channels in the internal feature maps. Can either have two or three elements: if three construct 25 | a residual block with two `3 x 3` convolutions, otherwise construct a bottleneck block with `1 x 1`, then 26 | `3 x 3` then `1 x 1` convolutions. 27 | stride : int 28 | Stride of the first `3 x 3` convolution 29 | dilation : int 30 | Dilation to apply to the `3 x 3` convolutions. 31 | groups : int 32 | Number of convolution groups. This is used to create ResNeXt-style blocks and is only compatible with 33 | bottleneck blocks. 34 | norm_act : callable 35 | Function to create normalization / activation Module. 36 | dropout: callable 37 | Function to create Dropout Module. 38 | """ 39 | super(IdentityResidualBlock, self).__init__() 40 | 41 | # Check parameters for inconsistencies 42 | if len(channels) != 2 and len(channels) != 3: 43 | raise ValueError("channels must contain either two or three values") 44 | if len(channels) == 2 and groups != 1: 45 | raise ValueError("groups > 1 are only valid if len(channels) == 3") 46 | 47 | is_bottleneck = len(channels) == 3 48 | need_proj_conv = stride != 1 or in_channels != channels[-1] 49 | 50 | self.bn1 = norm_act(in_channels) 51 | if not is_bottleneck: 52 | layers = [ 53 | ("conv1", nn.Conv2d(in_channels, channels[0], 3, stride=stride, padding=dilation, bias=False, 54 | dilation=dilation)), 55 | ("bn2", norm_act(channels[0])), 56 | ("conv2", nn.Conv2d(channels[0], channels[1], 3, stride=1, padding=dilation, bias=False, 57 | dilation=dilation)) 58 | ] 59 | if dropout is not None: 60 | layers = layers[0:2] + [("dropout", dropout())] + layers[2:] 61 | else: 62 | layers = [ 63 | ("conv1", nn.Conv2d(in_channels, channels[0], 1, stride=stride, padding=0, bias=False)), 64 | ("bn2", norm_act(channels[0])), 65 | ("conv2", nn.Conv2d(channels[0], channels[1], 3, stride=1, padding=dilation, bias=False, 66 | groups=groups, dilation=dilation)), 67 | ("bn3", norm_act(channels[1])), 68 | ("conv3", nn.Conv2d(channels[1], channels[2], 1, stride=1, padding=0, bias=False)) 69 | ] 70 | if dropout is not None: 71 | layers = layers[0:4] + [("dropout", dropout())] + layers[4:] 72 | self.convs = nn.Sequential(OrderedDict(layers)) 73 | 74 | if need_proj_conv: 75 | self.proj_conv = nn.Conv2d(in_channels, channels[-1], 1, stride=stride, padding=0, bias=False) 76 | 77 | def forward(self, x): 78 | if hasattr(self, "proj_conv"): 79 | bn1 = self.bn1(x) 80 | shortcut = self.proj_conv(bn1) 81 | else: 82 | shortcut = x.clone() 83 | bn1 = self.bn1(x) 84 | 85 | out = self.convs(bn1) 86 | out.add_(shortcut) 87 | 88 | return out 89 | -------------------------------------------------------------------------------- /utils/evaluate.py: -------------------------------------------------------------------------------- 1 | import os 2 | import scipy 3 | from scipy import ndimage 4 | import numpy as np 5 | import torch 6 | from torch.autograd import Variable 7 | from torch.utils import data 8 | from math import ceil 9 | from PIL import Image as PILImage 10 | import torch.nn as nn 11 | from torch.nn import functional as F 12 | import matplotlib.pyplot as plt 13 | 14 | def id2trainId(label, id_to_trainid, reverse=False): 15 | label_copy = label.copy() 16 | if reverse: 17 | for v, k in id_to_trainid.items(): 18 | label_copy[label == k] = v 19 | else: 20 | for k, v in id_to_trainid.items(): 21 | label_copy[label == k] = v 22 | return label_copy 23 | 24 | def get_palette(num_cls): 25 | """ Returns the color map for visualizing the segmentation mask. 26 | Args: 27 | num_cls: Number of classes 28 | Returns: 29 | The color map 30 | """ 31 | n = num_cls 32 | palette = [0] * (n * 3) 33 | for j in range(0, n): 34 | lab = j 35 | palette[j * 3 + 0] = 0 36 | palette[j * 3 + 1] = 0 37 | palette[j * 3 + 2] = 0 38 | i = 0 39 | while lab: 40 | palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) 41 | palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) 42 | palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) 43 | i += 1 44 | lab >>= 3 45 | return palette 46 | 47 | def get_confusion_matrix(gt_label, pred_label, class_num): 48 | """ 49 | Calcute the confusion matrix by given label and pred 50 | :param gt_label: the ground truth label 51 | :param pred_label: the pred label 52 | :param class_num: the nunber of class 53 | :return: the confusion matrix 54 | """ 55 | index = (gt_label * class_num + pred_label).astype('int32') 56 | label_count = np.bincount(index) 57 | confusion_matrix = np.zeros((class_num, class_num)) 58 | 59 | for i_label in range(class_num): 60 | for i_pred_label in range(class_num): 61 | cur_index = i_label * class_num + i_pred_label 62 | if cur_index < len(label_count): 63 | confusion_matrix[i_label, i_pred_label] = label_count[cur_index] 64 | 65 | return confusion_matrix 66 | 67 | def evaluate_main(save_path, model, loader, num_classes, type = 'val'): 68 | """Create the model and start the evaluation process.""" 69 | 70 | ignore_label = 255 71 | id_to_trainid = {-1: ignore_label, 0: ignore_label, 1: ignore_label, 2: ignore_label, 72 | 3: ignore_label, 4: ignore_label, 5: ignore_label, 6: ignore_label, 73 | 7: 0, 8: 1, 9: ignore_label, 10: ignore_label, 11: 2, 12: 3, 13: 4, 74 | 14: ignore_label, 15: ignore_label, 16: ignore_label, 17: 5, 75 | 18: ignore_label, 19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11, 25: 12, 26: 13, 27: 14, 76 | 28: 15, 29: ignore_label, 30: ignore_label, 31: 16, 32: 17, 33: 18} 77 | 78 | model.eval() 79 | model.cuda() 80 | 81 | confusion_matrix = np.zeros((num_classes,num_classes)) 82 | palette = get_palette(256) 83 | 84 | if not os.path.exists('outputs'): 85 | os.makedirs('outputs') 86 | 87 | for index, batch in enumerate(loader): 88 | if index % 100 == 0: 89 | print('%d processd'%(index)) 90 | if type == 'val': 91 | image, label, size, name = batch 92 | elif type == 'test': 93 | image, size, name = batch 94 | size = size[0].numpy() 95 | with torch.no_grad(): 96 | image = image.data 97 | output = model(image.cuda()) 98 | if isinstance(output, list): 99 | output = output[0] 100 | interp = nn.Upsample(size=(1024, 2048), mode='bilinear', align_corners=True) 101 | output = interp(output).cpu().data[0].numpy().transpose(1,2,0) 102 | 103 | seg_pred = np.asarray(np.argmax(output, axis=2), dtype=np.uint8) 104 | if type == 'test': seg_pred = id2trainId(seg_pred, id_to_trainid, reverse=True) 105 | 106 | output_im = PILImage.fromarray(seg_pred) 107 | output_im.putpalette(palette) 108 | if os.path.exists(save_path) == False: 109 | os.makedirs(save_path) 110 | output_im.save(save_path+'/'+name[0]+'.png') 111 | 112 | if type == 'val': 113 | seg_gt = np.asarray(label[0].numpy()[:size[0],:size[1]], dtype=np.int) 114 | ignore_index = seg_gt != 255 115 | seg_gt = seg_gt[ignore_index] 116 | seg_pred = seg_pred[ignore_index] 117 | confusion_matrix += get_confusion_matrix(seg_gt, seg_pred, num_classes) 118 | 119 | if type == 'val': 120 | pos = confusion_matrix.sum(1) 121 | res = confusion_matrix.sum(0) 122 | tp = np.diag(confusion_matrix) 123 | IU_array = (tp / np.maximum(1.0, pos + res - tp)) 124 | mean_IU = IU_array.mean() 125 | return mean_IU, IU_array 126 | 127 | -------------------------------------------------------------------------------- /dataset/datasets.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.utils import data 3 | import os.path as osp 4 | import numpy as np 5 | import random 6 | import cv2 7 | 8 | class CSTrainValSet(data.Dataset): 9 | def __init__(self, root, list_path, max_iters=None, crop_size=(321, 321), scale=True, mirror=True, ignore_label=255): 10 | self.root = root 11 | self.list_path = list_path 12 | self.crop_h, self.crop_w = crop_size 13 | self.is_scale = scale 14 | self.is_mirror = mirror 15 | self.ignore_label = ignore_label 16 | self.img_ids = [i_id.strip().split() for i_id in open(list_path)] 17 | if max_iters: 18 | self.img_ids = self.img_ids * int(np.ceil(float(max_iters) / len(self.img_ids))) 19 | self.img_ids = self.img_ids[:max_iters] 20 | self.files = [] 21 | for item in self.img_ids: 22 | image_path, label_path = item 23 | name = osp.splitext(osp.basename(label_path))[0] 24 | img_file = osp.join(self.root, image_path) 25 | label_file = osp.join(self.root, label_path) 26 | self.files.append({ 27 | "img": img_file, 28 | "label": label_file, 29 | "name": name 30 | }) 31 | self.id_to_trainid = {-1: ignore_label, 0: ignore_label, 1: ignore_label, 2: ignore_label, 32 | 3: ignore_label, 4: ignore_label, 5: ignore_label, 6: ignore_label, 33 | 7: 0, 8: 1, 9: ignore_label, 10: ignore_label, 11: 2, 12: 3, 13: 4, 34 | 14: ignore_label, 15: ignore_label, 16: ignore_label, 17: 5, 35 | 18: ignore_label, 19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11, 25: 12, 26: 13, 27: 14, 36 | 28: 15, 29: ignore_label, 30: ignore_label, 31: 16, 32: 17, 33: 18} 37 | print('{} images are loaded!'.format(len(self.img_ids))) 38 | 39 | def __len__(self): 40 | return len(self.files) 41 | 42 | def generate_scale_label(self, image, label): 43 | f_scale = 0.7 + random.randint(0, 14) / 10.0 44 | image = cv2.resize(image, None, fx=f_scale, fy=f_scale, interpolation = cv2.INTER_LINEAR) 45 | label = cv2.resize(label, None, fx=f_scale, fy=f_scale, interpolation = cv2.INTER_NEAREST) 46 | return image, label 47 | 48 | def id2trainId(self, label, reverse=False): 49 | label_copy = label.copy() 50 | if reverse: 51 | for v, k in self.id_to_trainid.items(): 52 | label_copy[label == k] = v 53 | else: 54 | for k, v in self.id_to_trainid.items(): 55 | label_copy[label == k] = v 56 | return label_copy 57 | 58 | def __getitem__(self, index): 59 | datafiles = self.files[index] 60 | image = cv2.imread(datafiles["img"], cv2.IMREAD_COLOR) 61 | label = cv2.imread(datafiles["label"], cv2.IMREAD_GRAYSCALE) 62 | label = self.id2trainId(label) 63 | size = image.shape 64 | name = datafiles["name"] 65 | if self.is_scale: 66 | image, label = self.generate_scale_label(image, label) 67 | image = np.asarray(image, np.float32) 68 | image = image - np.array([104.00698793, 116.66876762, 122.67891434]) 69 | img_h, img_w = label.shape 70 | pad_h = max(self.crop_h - img_h, 0) 71 | pad_w = max(self.crop_w - img_w, 0) 72 | if pad_h > 0 or pad_w > 0: 73 | img_pad = cv2.copyMakeBorder(image, 0, pad_h, 0, 74 | pad_w, cv2.BORDER_CONSTANT, 75 | value=(0.0, 0.0, 0.0)) 76 | label_pad = cv2.copyMakeBorder(label, 0, pad_h, 0, 77 | pad_w, cv2.BORDER_CONSTANT, 78 | value=(self.ignore_label,)) 79 | else: 80 | img_pad, label_pad = image, label 81 | img_h, img_w = label_pad.shape 82 | h_off = random.randint(0, img_h - self.crop_h) 83 | w_off = random.randint(0, img_w - self.crop_w) 84 | image = np.asarray(img_pad[h_off : h_off+self.crop_h, w_off : w_off+self.crop_w], np.float32) 85 | label = np.asarray(label_pad[h_off : h_off+self.crop_h, w_off : w_off+self.crop_w], np.float32) 86 | image = image.transpose((2, 0, 1)) 87 | if self.is_mirror: 88 | flip = np.random.choice(2) * 2 - 1 89 | image = image[:, :, ::flip] 90 | label = label[:, ::flip] 91 | return image.copy(), label.copy(), np.array(size), name 92 | 93 | 94 | class CSTestSet(data.Dataset): 95 | def __init__(self, root, list_path, crop_size=(321, 321)): 96 | self.root = root 97 | self.list_path = list_path 98 | self.crop_h, self.crop_w = crop_size 99 | self.img_ids = [i_id.strip().split() for i_id in open(list_path)] 100 | self.files = [] 101 | for item in self.img_ids: 102 | image_path = item[0] 103 | name = osp.splitext(osp.basename(image_path))[0] 104 | img_file = osp.join(self.root, image_path) 105 | self.files.append({ 106 | "img": img_file 107 | }) 108 | 109 | def __len__(self): 110 | return len(self.files) 111 | 112 | def __getitem__(self, index): 113 | datafiles = self.files[index] 114 | image = cv2.imread(datafiles["img"], cv2.IMREAD_COLOR) 115 | size = image.shape 116 | name = osp.splitext(osp.basename(datafiles["img"]))[0] 117 | image = np.asarray(image, np.float32) 118 | image = image - np.array([104.00698793, 116.66876762, 122.67891434]) 119 | img_h, img_w, _ = image.shape 120 | pad_h = max(self.crop_h - img_h, 0) 121 | pad_w = max(self.crop_w - img_w, 0) 122 | if pad_h > 0 or pad_w > 0: 123 | image = cv2.copyMakeBorder(image, 0, pad_h, 0, 124 | pad_w, cv2.BORDER_CONSTANT, 125 | value=(0.0, 0.0, 0.0)) 126 | image = image.transpose((2, 0, 1)).astype(np.float32) 127 | return image, np.array(size), name 128 | -------------------------------------------------------------------------------- /options.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import torch 3 | import time 4 | import logging 5 | import os 6 | 7 | def str2bool(v): 8 | if v.lower() in ('yes', 'true', 't', 'y', '1'): 9 | return True 10 | elif v.lower() in ('no', 'false', 'f', 'n', '0'): 11 | return False 12 | else: 13 | raise argparse.ArgumentTypeError('Boolean value expected.') 14 | 15 | # dataset 16 | DATASET = 'city' 17 | NUM_CLASSES = 19 18 | DATA_DIRECTORY = '/workdir/cityscapes' 19 | DATA_LIST_TRAIN_PATH = './dataset/list/cityscapes/train.lst' 20 | DATA_LIST_VAL_PATH = './dataset/list/cityscapes/val.lst' 21 | DATA_LIST_TEST_PATH = './dataset/list/cityscapes/test.lst' 22 | IGNORE_LABEL = 255 23 | INPUT_SIZE = '512,512' 24 | 25 | # init params 26 | T_CKPT = './ckpt/teacher_city.pth' 27 | S_CKPT = './ckpt/resnet18-imagenet.pth' 28 | 29 | # training params 30 | BATCH_SIZE = 8 31 | NUM_STEPS = 40000 32 | MOMENTUM = 0.9 33 | POWER = 0.9 34 | LEARNING_RATE = 1e-2 35 | WEIGHT_DECAY = 0.0005 36 | 37 | # save params 38 | SAVE_CKPT_START = 40000 39 | SAVE_CKPT_EVERY = 200 40 | 41 | def log_init(log_dir, name='log'): 42 | time_cur = time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime()) 43 | if os.path.exists(log_dir) == False: 44 | os.makedirs(log_dir) 45 | logging.basicConfig(filename=log_dir + '/' + name + '_' + str(time_cur) + '.log', 46 | format='%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s', 47 | level=logging.DEBUG) 48 | console = logging.StreamHandler() 49 | console.setLevel(logging.INFO) 50 | formatter = logging.Formatter('%(levelname)-8s %(message)s') 51 | console.setFormatter(formatter) 52 | logging.getLogger('').addHandler(console) 53 | 54 | class TrainOptions(): 55 | def initialize(self): 56 | parser = argparse.ArgumentParser(description='train') 57 | # dataset 58 | parser.add_argument('--data_set', type=str, default=DATASET, help='The name of the dataset.') 59 | parser.add_argument('--num_classes', type=int, default=NUM_CLASSES, help='Number of classes to predict.') 60 | parser.add_argument("--data-dir", type=str, default=DATA_DIRECTORY, help="Path to the directory containing the dataset.") 61 | parser.add_argument("--data-list", type=str, default=DATA_LIST_TRAIN_PATH, help="Path to the file listing the images in the dataset.") 62 | parser.add_argument("--data-listval", type=str, default=DATA_LIST_VAL_PATH, help="Path to the file listing the images in the dataset.") 63 | parser.add_argument("--ignore-label", type=int, default=IGNORE_LABEL, help="The index of the label to ignore during the training.") 64 | parser.add_argument("--input-size", type=str, default=INPUT_SIZE, help="Comma-separated string with height and width of images.") 65 | parser.add_argument("--random-mirror", action="store_true", help="Whether to randomly mirror the inputs during the training.") 66 | parser.add_argument("--random-scale", action="store_true", help="Whether to randomly scale the inputs during the training.") 67 | 68 | # init params 69 | parser.add_argument('--T_ckpt_path', type=str, default=T_CKPT, help='teacher ckpt path') 70 | parser.add_argument('--S_resume', type=str2bool, default='False', help='is or not use student ckpt') 71 | parser.add_argument('--S_ckpt_path', type=str, default='', help='student ckpt path') 72 | parser.add_argument('--D_resume', type=str2bool, default='False', help='is or not use discriminator ckpt') 73 | parser.add_argument('--D_ckpt_path', type=str, default='', help='discriminator ckpt path') 74 | parser.add_argument("--is-student-load-imgnet", type=str2bool, default='True', help="is student load imgnet") 75 | parser.add_argument("--student-pretrain-model-imgnet", type=str, default=S_CKPT, help="student pretrain model on imgnet") 76 | 77 | # training params 78 | parser.add_argument("--gpu", type=str, default='None', help="Choose gpu device.") 79 | parser.add_argument("--batch-size", type=int, default=BATCH_SIZE, help="Number of images sent to the network in one step.") 80 | parser.add_argument("--num-steps", type=int, default=NUM_STEPS, help="Number of training steps.") 81 | parser.add_argument("--momentum", type=float, default=MOMENTUM, help="Momentum component of the optimiser.") 82 | parser.add_argument("--power", type=float, default=POWER, help="Decay parameter to compute the learning rate.") 83 | parser.add_argument("--lr-g", type=float, default=LEARNING_RATE, help="learning rate for G") 84 | parser.add_argument("--weight-decay", type=float, default=WEIGHT_DECAY, help="Regularisation parameter for L2-loss.") 85 | parser.add_argument("--last-step", type=int, default=0, help="last train step.") 86 | 87 | # distiller 88 | parser.add_argument("--kd", type=str2bool, default='True') 89 | parser.add_argument("--lambda-kd", type=float, default=10.0, help="lambda_kd") 90 | 91 | parser.add_argument("--adv", type=str2bool, default='True') 92 | parser.add_argument("--lambda-adv", type=float, default=0.001, help="lambda_adv") 93 | parser.add_argument("--preprocess-GAN-mode", type=int, default=1, help="preprocess-GAN-mode should be tanh or bn") 94 | parser.add_argument("--adv-loss-type", type=str, default='wgan-gp', help="adversarial loss setting") 95 | parser.add_argument("--imsize-for-adv", type=int, default=65, help="imsize for addv") 96 | parser.add_argument("--adv-conv-dim", type=int, default=64, help="conv dim in adv") 97 | parser.add_argument("--lambda-gp", type=float, default=10.0, help="lambda_gp") 98 | parser.add_argument("--lambda-d", type=float, default=0.1, help="lambda_d") 99 | parser.add_argument("--lr-d", type=float, default=4e-4, help="learning rate for D") 100 | 101 | parser.add_argument("--ifv", type=str2bool, default='True') 102 | parser.add_argument('--lambda-ifv', type=float, default=200.0, help='lambda_ifv') 103 | 104 | # save params 105 | parser.add_argument("--save-name", type=str, default='exp') 106 | parser.add_argument("--save-dir", type=str, default='ckpt', help="Where to save models.") 107 | parser.add_argument("--save-ckpt-start", type=int, default=SAVE_CKPT_START) 108 | parser.add_argument("--save-ckpt-every", type=int, default=SAVE_CKPT_EVERY) 109 | 110 | args = parser.parse_args() 111 | 112 | if not os.path.exists(args.save_dir): 113 | os.makedirs(args.save_dir) 114 | args.save_path = args.save_dir + '/' + args.save_name 115 | log_init(args.save_path, args.data_set) 116 | 117 | for key, val in args._get_kwargs(): 118 | logging.info(key+' : '+str(val)) 119 | 120 | return args 121 | 122 | class ValOptions(): 123 | def initialize(self): 124 | parser = argparse.ArgumentParser(description='Val') 125 | parser.add_argument("--data-dir", type=str, default=DATA_DIRECTORY) 126 | parser.add_argument("--val-data-list", type=str, default=DATA_LIST_VAL_PATH) 127 | parser.add_argument("--test-data-list", type=str, default=DATA_LIST_TEST_PATH) 128 | parser.add_argument('--num_classes', type=int, default=NUM_CLASSES) 129 | parser.add_argument("--restore-from", type=str, default='') 130 | parser.add_argument("--gpu", type=str, default='None') 131 | parser.add_argument("--figsavepath", type=str, default='outputs', help="") 132 | parser.add_argument("--type", type=str, default='val', help="") 133 | args = parser.parse_args() 134 | 135 | for key, val in args._get_kwargs(): 136 | print(key+' : '+str(val)) 137 | return args 138 | 139 | -------------------------------------------------------------------------------- /libs/src/lib_cffi.cpp: -------------------------------------------------------------------------------- 1 | // All functions assume that input and output tensors are already initialized 2 | // and have the correct dimensions 3 | #include 4 | 5 | // Forward definition of implementation functions 6 | extern "C" { 7 | int _bn_mean_var_cuda(int N, int C, int S, const float *x, float *mean, float *var, cudaStream_t); 8 | int _bn_forward_cuda(int N, int C, int S, const float *x, const float *mean, const float *var, const float *weight, 9 | const float *bias, float *y, float *z, float eps, cudaStream_t); 10 | int _bn_edz_eydz_cuda(int N, int C, int S, const float *z, const float *dz, const float *weight, const float *bias, 11 | float *edz, float *eydz, float eps, cudaStream_t stream); 12 | int _bn_backward_cuda(int N, int C, int S, const float *dz, const float *z, const float *var, const float *weight, 13 | const float *bias, const float *edz, const float *eydz, float *dx, float *dweight, float *dbias, 14 | float eps, cudaStream_t stream); 15 | int _leaky_relu_cuda(int N, float *x, float slope, cudaStream_t stream); 16 | int _leaky_relu_backward_cuda(int N, const float *x, float *dx, float slope, cudaStream_t stream); 17 | int _elu_cuda(int N, float *x, cudaStream_t stream); 18 | int _elu_backward_cuda(int N, const float *x, float *dx, cudaStream_t stream); 19 | int _elu_inv_cuda(int N, float *x, cudaStream_t stream); 20 | } 21 | 22 | extern THCState *state; 23 | 24 | void get_sizes(const THCudaTensor *t, int *N, int *C, int *S){ 25 | // Get sizes 26 | *S = 1; 27 | *N = THCudaTensor_size(state, t, 0); 28 | *C = THCudaTensor_size(state, t, 1); 29 | if (THCudaTensor_nDimension(state, t) > 2) { 30 | for (int i = 2; i < THCudaTensor_nDimension(state, t); ++i) { 31 | *S *= THCudaTensor_size(state, t, i); 32 | } 33 | } 34 | } 35 | 36 | extern "C" int bn_mean_var_cuda(const THCudaTensor *x, THCudaTensor *mean, THCudaTensor *var) { 37 | cudaStream_t stream = THCState_getCurrentStream(state); 38 | 39 | int S, N, C; 40 | get_sizes(x, &N, &C, &S); 41 | 42 | // Get pointers 43 | const float *x_data = THCudaTensor_data(state, x); 44 | float *mean_data = THCudaTensor_data(state, mean); 45 | float *var_data = THCudaTensor_data(state, var); 46 | 47 | return _bn_mean_var_cuda(N, C, S, x_data, mean_data, var_data, stream); 48 | } 49 | 50 | extern "C" int bn_forward_cuda(const THCudaTensor *x, const THCudaTensor *mean, const THCudaTensor *var, 51 | const THCudaTensor *weight, const THCudaTensor *bias, THCudaTensor *y, THCudaTensor *z, 52 | float eps) { 53 | cudaStream_t stream = THCState_getCurrentStream(state); 54 | 55 | int S, N, C; 56 | get_sizes(x, &N, &C, &S); 57 | 58 | // Get pointers 59 | const float *x_data = THCudaTensor_data(state, x); 60 | const float *mean_data = THCudaTensor_data(state, mean); 61 | const float *var_data = THCudaTensor_data(state, var); 62 | const float *weight_data = THCudaTensor_nDimension(state, weight) != 0 ? THCudaTensor_data(state, weight) : 0; 63 | const float *bias_data = THCudaTensor_nDimension(state, bias) != 0 ? THCudaTensor_data(state, bias) : 0; 64 | float *y_data = THCudaTensor_data(state, y); 65 | float *z_data = THCudaTensor_data(state, z); 66 | 67 | return _bn_forward_cuda(N, C, S, x_data, mean_data, var_data, weight_data, bias_data, y_data, z_data, eps, stream); 68 | } 69 | 70 | extern "C" int bn_edz_eydz_cuda(const THCudaTensor *z, const THCudaTensor *dz, const THCudaTensor *weight, 71 | const THCudaTensor *bias, THCudaTensor *edz, THCudaTensor *eydz, float eps) { 72 | cudaStream_t stream = THCState_getCurrentStream(state); 73 | 74 | int S, N, C; 75 | get_sizes(z, &N, &C, &S); 76 | 77 | // Get pointers 78 | const float *z_data = THCudaTensor_data(state, z); 79 | const float *dz_data = THCudaTensor_data(state, dz); 80 | const float *weight_data = THCudaTensor_nDimension(state, weight) != 0 ? THCudaTensor_data(state, weight) : 0; 81 | const float *bias_data = THCudaTensor_nDimension(state, bias) != 0 ? THCudaTensor_data(state, bias) : 0; 82 | float *edz_data = THCudaTensor_data(state, edz); 83 | float *eydz_data = THCudaTensor_data(state, eydz); 84 | 85 | return _bn_edz_eydz_cuda(N, C, S, z_data, dz_data, weight_data, bias_data, edz_data, eydz_data, eps, stream); 86 | } 87 | 88 | extern "C" int bn_backard_cuda(const THCudaTensor *dz, const THCudaTensor *z, const THCudaTensor *var, 89 | const THCudaTensor *weight, const THCudaTensor *bias, const THCudaTensor *edz, 90 | const THCudaTensor *eydz, THCudaTensor *dx, THCudaTensor *dweight, 91 | THCudaTensor *dbias, float eps) { 92 | cudaStream_t stream = THCState_getCurrentStream(state); 93 | 94 | int S, N, C; 95 | get_sizes(dz, &N, &C, &S); 96 | 97 | // Get pointers 98 | const float *dz_data = THCudaTensor_data(state, dz); 99 | const float *z_data = THCudaTensor_data(state, z); 100 | const float *var_data = THCudaTensor_data(state, var); 101 | const float *weight_data = THCudaTensor_nDimension(state, weight) != 0 ? THCudaTensor_data(state, weight) : 0; 102 | const float *bias_data = THCudaTensor_nDimension(state, bias) != 0 ? THCudaTensor_data(state, bias) : 0; 103 | const float *edz_data = THCudaTensor_data(state, edz); 104 | const float *eydz_data = THCudaTensor_data(state, eydz); 105 | float *dx_data = THCudaTensor_nDimension(state, dx) != 0 ? THCudaTensor_data(state, dx) : 0; 106 | float *dweight_data = THCudaTensor_nDimension(state, dweight) != 0 ? THCudaTensor_data(state, dweight) : 0; 107 | float *dbias_data = THCudaTensor_nDimension(state, dbias) != 0 ? THCudaTensor_data(state, dbias) : 0; 108 | 109 | return _bn_backward_cuda(N, C, S, dz_data, z_data, var_data, weight_data, bias_data, edz_data, eydz_data, dx_data, 110 | dweight_data, dbias_data, eps, stream); 111 | } 112 | 113 | extern "C" int leaky_relu_cuda(THCudaTensor *x, float slope) { 114 | cudaStream_t stream = THCState_getCurrentStream(state); 115 | 116 | int N = THCudaTensor_nElement(state, x); 117 | 118 | // Get pointers 119 | float *x_data = THCudaTensor_data(state, x); 120 | 121 | return _leaky_relu_cuda(N, x_data, slope, stream); 122 | } 123 | 124 | extern "C" int leaky_relu_backward_cuda(const THCudaTensor *x, THCudaTensor *dx, float slope) { 125 | cudaStream_t stream = THCState_getCurrentStream(state); 126 | 127 | int N = THCudaTensor_nElement(state, x); 128 | 129 | // Get pointers 130 | const float *x_data = THCudaTensor_data(state, x); 131 | float *dx_data = THCudaTensor_data(state, dx); 132 | 133 | return _leaky_relu_backward_cuda(N, x_data, dx_data, slope, stream); 134 | } 135 | 136 | extern "C" int elu_cuda(THCudaTensor *x) { 137 | cudaStream_t stream = THCState_getCurrentStream(state); 138 | 139 | int N = THCudaTensor_nElement(state, x); 140 | 141 | // Get pointers 142 | float *x_data = THCudaTensor_data(state, x); 143 | 144 | return _elu_cuda(N, x_data, stream); 145 | } 146 | 147 | extern "C" int elu_backward_cuda(const THCudaTensor *x, THCudaTensor *dx) { 148 | cudaStream_t stream = THCState_getCurrentStream(state); 149 | 150 | int N = THCudaTensor_nElement(state, x); 151 | 152 | // Get pointers 153 | const float *x_data = THCudaTensor_data(state, x); 154 | float *dx_data = THCudaTensor_data(state, dx); 155 | 156 | return _elu_backward_cuda(N, x_data, dx_data, stream); 157 | } 158 | 159 | extern "C" int elu_inv_cuda(THCudaTensor *x) { 160 | cudaStream_t stream = THCState_getCurrentStream(state); 161 | 162 | int N = THCudaTensor_nElement(state, x); 163 | 164 | // Get pointers 165 | float *x_data = THCudaTensor_data(state, x); 166 | 167 | return _elu_inv_cuda(N, x_data, stream); 168 | } 169 | -------------------------------------------------------------------------------- /networks/pspnet.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torch.nn import functional as F 4 | affine_par = True 5 | import functools 6 | # from inplace_abn import InPlaceABN, InPlaceABNSync 7 | from libs import InPlaceABN, InPlaceABNSync 8 | # BatchNorm2d = functools.partial(InPlaceABN, activation='identity') 9 | BatchNorm2d = functools.partial(InPlaceABNSync, activation='none') 10 | 11 | def conv3x3(in_planes, out_planes, stride=1): 12 | "3x3 convolution with padding" 13 | return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, 14 | padding=1, bias=False) 15 | 16 | class BasicBlock(nn.Module): 17 | expansion = 1 18 | 19 | def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, multi_grid=1): 20 | super(BasicBlock, self).__init__() 21 | dilation = dilation*multi_grid 22 | self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, bias=False) 23 | self.bn1 = BatchNorm2d(planes) 24 | self.relu = nn.ReLU(inplace=False) 25 | self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=dilation, dilation=dilation, bias=False) 26 | self.bn2 = BatchNorm2d(planes) 27 | self.relu_inplace = nn.ReLU(inplace=True) 28 | self.downsample = downsample 29 | 30 | def forward(self, x): 31 | residual = x 32 | 33 | out = self.relu(self.bn1(self.conv1(x))) 34 | out = self.bn2(self.conv2(out)) 35 | 36 | if self.downsample: 37 | residual = self.downsample(x) 38 | 39 | out = out + residual 40 | out = self.relu_inplace(out) 41 | 42 | return out 43 | 44 | class Bottleneck(nn.Module): 45 | expansion = 4 46 | def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, fist_dilation=1, multi_grid=1): 47 | super(Bottleneck, self).__init__() 48 | self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) 49 | self.bn1 = BatchNorm2d(planes) 50 | self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, 51 | padding=dilation*multi_grid, dilation=dilation*multi_grid, bias=False) 52 | self.bn2 = BatchNorm2d(planes) 53 | self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) 54 | self.bn3 = BatchNorm2d(planes * 4) 55 | self.relu = nn.ReLU(inplace=False) 56 | self.relu_inplace = nn.ReLU(inplace=True) 57 | self.downsample = downsample 58 | self.dilation = dilation 59 | self.stride = stride 60 | 61 | def forward(self, x): 62 | residual = x 63 | 64 | out = self.conv1(x) 65 | out = self.bn1(out) 66 | out = self.relu(out) 67 | 68 | out = self.conv2(out) 69 | out = self.bn2(out) 70 | out = self.relu(out) 71 | 72 | out = self.conv3(out) 73 | out = self.bn3(out) 74 | 75 | if self.downsample is not None: 76 | residual = self.downsample(x) 77 | 78 | out = out + residual 79 | out = self.relu_inplace(out) 80 | 81 | return out 82 | 83 | class PSPModule(nn.Module): 84 | """ 85 | Reference: 86 | Zhao, Hengshuang, et al. *"Pyramid scene parsing network."* 87 | """ 88 | def __init__(self, features, out_features=512, sizes=(1, 2, 3, 6)): 89 | super(PSPModule, self).__init__() 90 | 91 | self.stages = [] 92 | self.stages = nn.ModuleList([self._make_stage(features, out_features, size) for size in sizes]) 93 | self.bottleneck = nn.Sequential( 94 | nn.Conv2d(features+len(sizes)*out_features, out_features, kernel_size=3, padding=1, dilation=1, bias=False), 95 | InPlaceABNSync(out_features), 96 | nn.Dropout2d(0.1) 97 | ) 98 | 99 | def _make_stage(self, features, out_features, size): 100 | prior = nn.AdaptiveAvgPool2d(output_size=(size, size)) 101 | conv = nn.Conv2d(features, out_features, kernel_size=1, bias=False) 102 | bn = InPlaceABNSync(out_features) 103 | return nn.Sequential(prior, conv, bn) 104 | 105 | def forward(self, feats): 106 | h, w = feats.size(2), feats.size(3) 107 | priors = [F.upsample(input=stage(feats), size=(h, w), mode='bilinear', align_corners=True) for stage in self.stages] + [feats] 108 | bottle = self.bottleneck(torch.cat(priors, 1)) 109 | return bottle 110 | 111 | class ResNet(nn.Module): 112 | def __init__(self, block, layers, num_classes): 113 | self.inplanes = 128 114 | super(ResNet, self).__init__() 115 | self.conv1 = conv3x3(3, 64, stride=2) 116 | self.bn1 = BatchNorm2d(64) 117 | self.relu1 = nn.ReLU(inplace=False) 118 | self.conv2 = conv3x3(64, 64) 119 | self.bn2 = BatchNorm2d(64) 120 | self.relu2 = nn.ReLU(inplace=False) 121 | self.conv3 = conv3x3(64, 128) 122 | self.bn3 = BatchNorm2d(128) 123 | self.relu3 = nn.ReLU(inplace=False) 124 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) 125 | 126 | self.relu = nn.ReLU(inplace=False) 127 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1, ceil_mode=True) # change 128 | self.layer1 = self._make_layer(block, 64, layers[0]) 129 | self.layer2 = self._make_layer(block, 128, layers[1], stride=2) 130 | self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2) 131 | self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4, multi_grid=(1,1,1)) 132 | 133 | if layers == [3, 4, 23, 3]: 134 | self.pspmodule = PSPModule(2048, 512) 135 | self.head = nn.Conv2d(512, num_classes, kernel_size=1, stride=1, padding=0, bias=True) 136 | 137 | self.dsn = nn.Sequential( 138 | nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), 139 | InPlaceABNSync(512), 140 | nn.Dropout2d(0.1), 141 | nn.Conv2d(512, num_classes, kernel_size=1, stride=1, padding=0, bias=True) 142 | ) 143 | elif layers == [2, 2, 2, 2]: 144 | self.pspmodule = PSPModule(512, 128) 145 | self.head = nn.Conv2d(128, num_classes, kernel_size=1, stride=1, padding=0, bias=True) 146 | 147 | self.dsn = nn.Sequential( 148 | nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), 149 | InPlaceABNSync(128), 150 | nn.Dropout2d(0.1), 151 | nn.Conv2d(128, num_classes, kernel_size=1, stride=1, padding=0, bias=True) 152 | ) 153 | else: 154 | raise ValueError('layers should be [3, 4, 23, 3] or [2, 2, 2, 2]') 155 | 156 | def _make_layer(self, block, planes, blocks, stride=1, dilation=1, multi_grid=1): 157 | downsample = None 158 | if stride != 1 or self.inplanes != planes * block.expansion: 159 | downsample = nn.Sequential( 160 | nn.Conv2d(self.inplanes, planes * block.expansion, 161 | kernel_size=1, stride=stride, bias=False), 162 | BatchNorm2d(planes * block.expansion,affine = affine_par)) 163 | 164 | layers = [] 165 | generate_multi_grid = lambda index, grids: grids[index%len(grids)] if isinstance(grids, tuple) else 1 166 | layers.append(block(self.inplanes, planes, stride,dilation=dilation, downsample=downsample, multi_grid=generate_multi_grid(0, multi_grid))) 167 | self.inplanes = planes * block.expansion 168 | for i in range(1, blocks): 169 | layers.append(block(self.inplanes, planes, dilation=dilation, multi_grid=generate_multi_grid(i, multi_grid))) 170 | 171 | return nn.Sequential(*layers) 172 | 173 | def forward(self, x): 174 | x = self.relu1(self.bn1(self.conv1(x))) 175 | x = self.relu2(self.bn2(self.conv2(x))) 176 | x = self.relu3(self.bn3(self.conv3(x))) 177 | x = self.maxpool(x) 178 | x = self.layer1(x) 179 | x = self.layer2(x) 180 | x = self.layer3(x) 181 | x_dsn = self.dsn(x) 182 | x = self.layer4(x) 183 | x_feat_after_psp = self.pspmodule(x) 184 | x = self.head(x_feat_after_psp) 185 | return [x, x_dsn, x_feat_after_psp] 186 | 187 | def Res_pspnet(block = Bottleneck, layers = [3, 4, 23, 3], num_classes=21): 188 | ''' 189 | ResNet(Bottleneck, [3, 4, 23, 3], num_classes) 190 | ResNet(BasicBlock, [2, 2, 2, 2], num_classes) 191 | ''' 192 | model = ResNet(block, layers, num_classes) 193 | return model 194 | -------------------------------------------------------------------------------- /libs/bn.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict, Iterable 2 | from itertools import repeat 3 | 4 | try: 5 | # python 3 6 | from queue import Queue 7 | except ImportError: 8 | # python 2 9 | from Queue import Queue 10 | 11 | import torch 12 | import torch.nn as nn 13 | import torch.autograd as autograd 14 | 15 | from .functions import inplace_abn, inplace_abn_sync 16 | 17 | 18 | def _pair(x): 19 | if isinstance(x, Iterable): 20 | return x 21 | return tuple(repeat(x, 2)) 22 | 23 | 24 | class ABN(nn.Sequential): 25 | """Activated Batch Normalization 26 | 27 | This gathers a `BatchNorm2d` and an activation function in a single module 28 | """ 29 | 30 | def __init__(self, num_features, activation=nn.ReLU(inplace=True), **kwargs): 31 | """Creates an Activated Batch Normalization module 32 | 33 | Parameters 34 | ---------- 35 | num_features : int 36 | Number of feature channels in the input and output. 37 | activation : nn.Module 38 | Module used as an activation function. 39 | kwargs 40 | All other arguments are forwarded to the `BatchNorm2d` constructor. 41 | """ 42 | super(ABN, self).__init__(OrderedDict([ 43 | ("bn", nn.BatchNorm2d(num_features, **kwargs)), 44 | ("act", activation) 45 | ])) 46 | 47 | 48 | class InPlaceABN(nn.Module): 49 | """InPlace Activated Batch Normalization""" 50 | 51 | def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, activation="leaky_relu", slope=0.01): 52 | """Creates an InPlace Activated Batch Normalization module 53 | 54 | Parameters 55 | ---------- 56 | num_features : int 57 | Number of feature channels in the input and output. 58 | eps : float 59 | Small constant to prevent numerical issues. 60 | momentum : float 61 | Momentum factor applied to compute running statistics as. 62 | affine : bool 63 | If `True` apply learned scale and shift transformation after normalization. 64 | activation : str 65 | Name of the activation functions, one of: `leaky_relu`, `elu` or `none`. 66 | slope : float 67 | Negative slope for the `leaky_relu` activation. 68 | """ 69 | super(InPlaceABN, self).__init__() 70 | self.num_features = num_features 71 | self.affine = affine 72 | self.eps = eps 73 | self.momentum = momentum 74 | self.activation = activation 75 | self.slope = slope 76 | if self.affine: 77 | self.weight = nn.Parameter(torch.Tensor(num_features)) 78 | self.bias = nn.Parameter(torch.Tensor(num_features)) 79 | else: 80 | self.register_parameter('weight', None) 81 | self.register_parameter('bias', None) 82 | self.register_buffer('running_mean', torch.zeros(num_features)) 83 | self.register_buffer('running_var', torch.ones(num_features)) 84 | self.reset_parameters() 85 | 86 | def reset_parameters(self): 87 | self.running_mean.zero_() 88 | self.running_var.fill_(1) 89 | if self.affine: 90 | self.weight.data.fill_(1) 91 | self.bias.data.zero_() 92 | 93 | def forward(self, x): 94 | return inplace_abn(x, self.weight, self.bias, autograd.Variable(self.running_mean), 95 | autograd.Variable(self.running_var), self.training, self.momentum, self.eps, 96 | self.activation, self.slope) 97 | 98 | def __repr__(self): 99 | rep = '{name}({num_features}, eps={eps}, momentum={momentum},' \ 100 | ' affine={affine}, activation={activation}' 101 | if self.activation == "leaky_relu": 102 | rep += ' slope={slope})' 103 | else: 104 | rep += ')' 105 | return rep.format(name=self.__class__.__name__, **self.__dict__) 106 | 107 | 108 | class InPlaceABNSync(nn.Module): 109 | """InPlace Activated Batch Normalization with cross-GPU synchronization 110 | 111 | This assumes that it will be replicated across GPUs using the same mechanism as in `nn.DataParallel`. 112 | """ 113 | 114 | def __init__(self, num_features, devices=None, eps=1e-5, momentum=0.1, affine=True, activation="leaky_relu", 115 | slope=0.01): 116 | """Creates a synchronized, InPlace Activated Batch Normalization module 117 | 118 | Parameters 119 | ---------- 120 | num_features : int 121 | Number of feature channels in the input and output. 122 | devices : list of int or None 123 | IDs of the GPUs that will run the replicas of this module. 124 | eps : float 125 | Small constant to prevent numerical issues. 126 | momentum : float 127 | Momentum factor applied to compute running statistics as. 128 | affine : bool 129 | If `True` apply learned scale and shift transformation after normalization. 130 | activation : str 131 | Name of the activation functions, one of: `leaky_relu`, `elu` or `none`. 132 | slope : float 133 | Negative slope for the `leaky_relu` activation. 134 | """ 135 | super(InPlaceABNSync, self).__init__() 136 | self.num_features = num_features 137 | self.devices = devices if devices else list(range(torch.cuda.device_count())) 138 | self.affine = affine 139 | self.eps = eps 140 | self.momentum = momentum 141 | self.activation = activation 142 | self.slope = slope 143 | if self.affine: 144 | self.weight = nn.Parameter(torch.Tensor(num_features)) 145 | self.bias = nn.Parameter(torch.Tensor(num_features)) 146 | else: 147 | self.register_parameter('weight', None) 148 | self.register_parameter('bias', None) 149 | self.register_buffer('running_mean', torch.zeros(num_features)) 150 | self.register_buffer('running_var', torch.ones(num_features)) 151 | self.reset_parameters() 152 | 153 | # Initialize queues 154 | self.worker_ids = self.devices[1:] 155 | self.master_queue = Queue(len(self.worker_ids)) 156 | self.worker_queues = [Queue(1) for _ in self.worker_ids] 157 | 158 | def reset_parameters(self): 159 | self.running_mean.zero_() 160 | self.running_var.fill_(1) 161 | if self.affine: 162 | self.weight.data.fill_(1) 163 | self.bias.data.zero_() 164 | 165 | def forward(self, x): 166 | if x.get_device() == self.devices[0]: 167 | # Master mode 168 | extra = { 169 | "is_master": True, 170 | "master_queue": self.master_queue, 171 | "worker_queues": self.worker_queues, 172 | "worker_ids": self.worker_ids 173 | } 174 | else: 175 | # Worker mode 176 | extra = { 177 | "is_master": False, 178 | "master_queue": self.master_queue, 179 | "worker_queue": self.worker_queues[self.worker_ids.index(x.get_device())] 180 | } 181 | 182 | return inplace_abn_sync(x, self.weight, self.bias, autograd.Variable(self.running_mean), 183 | autograd.Variable(self.running_var), extra, self.training, self.momentum, self.eps, 184 | self.activation, self.slope) 185 | 186 | def __repr__(self): 187 | rep = '{name}({num_features}, eps={eps}, momentum={momentum},' \ 188 | ' affine={affine}, devices={devices}, activation={activation}' 189 | if self.activation == "leaky_relu": 190 | rep += ' slope={slope})' 191 | else: 192 | rep += ')' 193 | return rep.format(name=self.__class__.__name__, **self.__dict__) 194 | 195 | 196 | class InPlaceABNWrapper(nn.Module): 197 | """Wrapper module to make `InPlaceABN` compatible with `ABN`""" 198 | 199 | def __init__(self, *args, **kwargs): 200 | super(InPlaceABNWrapper, self).__init__() 201 | self.bn = InPlaceABN(*args, **kwargs) 202 | 203 | def forward(self, input): 204 | return self.bn(input) 205 | 206 | 207 | class InPlaceABNSyncWrapper(nn.Module): 208 | """Wrapper module to make `InPlaceABNSync` compatible with `ABN`""" 209 | 210 | def __init__(self, *args, **kwargs): 211 | super(InPlaceABNSyncWrapper, self).__init__() 212 | self.bn = InPlaceABNSync(*args, **kwargs) 213 | 214 | def forward(self, input): 215 | return self.bn(input) 216 | -------------------------------------------------------------------------------- /libs/functions.py: -------------------------------------------------------------------------------- 1 | import torch.autograd as autograd 2 | import torch.cuda.comm as comm 3 | from torch.autograd.function import once_differentiable 4 | 5 | from . import _ext 6 | 7 | # Activation names 8 | ACT_LEAKY_RELU = "leaky_relu" 9 | ACT_ELU = "elu" 10 | ACT_NONE = "none" 11 | 12 | 13 | def _check(fn, *args, **kwargs): 14 | success = fn(*args, **kwargs) 15 | if not success: 16 | raise RuntimeError("CUDA Error encountered in {}".format(fn)) 17 | 18 | 19 | def _broadcast_shape(x): 20 | out_size = [] 21 | for i, s in enumerate(x.size()): 22 | if i != 1: 23 | out_size.append(1) 24 | else: 25 | out_size.append(s) 26 | return out_size 27 | 28 | 29 | def _reduce(x): 30 | if len(x.size()) == 2: 31 | return x.sum(dim=0) 32 | else: 33 | n, c = x.size()[0:2] 34 | return x.contiguous().view((n, c, -1)).sum(2).sum(0) 35 | 36 | 37 | def _count_samples(x): 38 | count = 1 39 | for i, s in enumerate(x.size()): 40 | if i != 1: 41 | count *= s 42 | return count 43 | 44 | 45 | def _act_forward(ctx, x): 46 | if ctx.activation == ACT_LEAKY_RELU: 47 | _check(_ext.leaky_relu_cuda, x, ctx.slope) 48 | elif ctx.activation == ACT_ELU: 49 | _check(_ext.elu_cuda, x) 50 | elif ctx.activation == ACT_NONE: 51 | pass 52 | 53 | 54 | def _act_backward(ctx, x, dx): 55 | if ctx.activation == ACT_LEAKY_RELU: 56 | _check(_ext.leaky_relu_backward_cuda, x, dx, ctx.slope) 57 | _check(_ext.leaky_relu_cuda, x, 1. / ctx.slope) 58 | elif ctx.activation == ACT_ELU: 59 | _check(_ext.elu_backward_cuda, x, dx) 60 | _check(_ext.elu_inv_cuda, x) 61 | elif ctx.activation == ACT_NONE: 62 | pass 63 | 64 | 65 | def _check_contiguous(*args): 66 | if not all([mod is None or mod.is_contiguous() for mod in args]): 67 | raise ValueError("Non-contiguous input") 68 | 69 | 70 | class InPlaceABN(autograd.Function): 71 | @staticmethod 72 | def forward(ctx, x, weight, bias, running_mean, running_var, 73 | training=True, momentum=0.1, eps=1e-05, activation=ACT_LEAKY_RELU, slope=0.01): 74 | # Save context 75 | ctx.training = training 76 | ctx.momentum = momentum 77 | ctx.eps = eps 78 | ctx.activation = activation 79 | ctx.slope = slope 80 | 81 | n = _count_samples(x) 82 | 83 | if ctx.training: 84 | mean = x.new().resize_as_(running_mean) 85 | var = x.new().resize_as_(running_var) 86 | _check_contiguous(x, mean, var) 87 | _check(_ext.bn_mean_var_cuda, x, mean, var) 88 | 89 | # Update running stats 90 | running_mean.mul_((1 - ctx.momentum)).add_(ctx.momentum * mean) 91 | running_var.mul_((1 - ctx.momentum)).add_(ctx.momentum * var * n / (n - 1)) 92 | else: 93 | mean, var = running_mean, running_var 94 | 95 | _check_contiguous(x, mean, var, weight, bias) 96 | _check(_ext.bn_forward_cuda, 97 | x, mean, var, 98 | weight if weight is not None else x.new(), 99 | bias if bias is not None else x.new(), 100 | x, x, ctx.eps) 101 | 102 | # Activation 103 | _act_forward(ctx, x) 104 | 105 | # Output 106 | ctx.var = var 107 | ctx.save_for_backward(x, weight, bias, running_mean, running_var) 108 | ctx.mark_dirty(x) 109 | return x 110 | 111 | @staticmethod 112 | @once_differentiable 113 | def backward(ctx, dz): 114 | z, weight, bias, running_mean, running_var = ctx.saved_tensors 115 | dz = dz.contiguous() 116 | 117 | # Undo activation 118 | _act_backward(ctx, z, dz) 119 | 120 | if ctx.needs_input_grad[0]: 121 | dx = dz.new().resize_as_(dz) 122 | else: 123 | dx = None 124 | 125 | if ctx.needs_input_grad[1]: 126 | dweight = dz.new().resize_as_(running_mean).zero_() 127 | else: 128 | dweight = None 129 | 130 | if ctx.needs_input_grad[2]: 131 | dbias = dz.new().resize_as_(running_mean).zero_() 132 | else: 133 | dbias = None 134 | 135 | if ctx.training: 136 | edz = dz.new().resize_as_(running_mean) 137 | eydz = dz.new().resize_as_(running_mean) 138 | _check_contiguous(z, dz, weight, bias, edz, eydz) 139 | _check(_ext.bn_edz_eydz_cuda, 140 | z, dz, 141 | weight if weight is not None else dz.new(), 142 | bias if bias is not None else dz.new(), 143 | edz, eydz, ctx.eps) 144 | else: 145 | # TODO: implement CUDA backward for inference mode 146 | edz = dz.new().resize_as_(running_mean).zero_() 147 | eydz = dz.new().resize_as_(running_mean).zero_() 148 | 149 | _check_contiguous(dz, z, ctx.var, weight, bias, edz, eydz, dx, dweight, dbias) 150 | _check(_ext.bn_backard_cuda, 151 | dz, z, ctx.var, 152 | weight if weight is not None else dz.new(), 153 | bias if bias is not None else dz.new(), 154 | edz, eydz, 155 | dx if dx is not None else dz.new(), 156 | dweight if dweight is not None else dz.new(), 157 | dbias if dbias is not None else dz.new(), 158 | ctx.eps) 159 | 160 | del ctx.var 161 | 162 | return dx, dweight, dbias, None, None, None, None, None, None, None 163 | 164 | 165 | class InPlaceABNSync(autograd.Function): 166 | @classmethod 167 | def forward(cls, ctx, x, weight, bias, running_mean, running_var, 168 | extra, training=True, momentum=0.1, eps=1e-05, activation=ACT_LEAKY_RELU, slope=0.01): 169 | # Save context 170 | cls._parse_extra(ctx, extra) 171 | ctx.training = training 172 | ctx.momentum = momentum 173 | ctx.eps = eps 174 | ctx.activation = activation 175 | ctx.slope = slope 176 | 177 | n = _count_samples(x) * (ctx.master_queue.maxsize + 1) 178 | 179 | if ctx.training: 180 | mean = x.new().resize_(1, running_mean.size(0)) 181 | var = x.new().resize_(1, running_var.size(0)) 182 | _check_contiguous(x, mean, var) 183 | _check(_ext.bn_mean_var_cuda, x, mean, var) 184 | 185 | if ctx.is_master: 186 | means, vars = [mean], [var] 187 | for _ in range(ctx.master_queue.maxsize): 188 | mean_w, var_w = ctx.master_queue.get() 189 | ctx.master_queue.task_done() 190 | means.append(mean_w) 191 | vars.append(var_w) 192 | 193 | means = comm.gather(means) 194 | vars = comm.gather(vars) 195 | 196 | mean = means.mean(0) 197 | var = (vars + (mean - means) ** 2).mean(0) 198 | 199 | tensors = comm.broadcast_coalesced((mean, var), [mean.get_device()] + ctx.worker_ids) 200 | for ts, queue in zip(tensors[1:], ctx.worker_queues): 201 | queue.put(ts) 202 | else: 203 | ctx.master_queue.put((mean, var)) 204 | mean, var = ctx.worker_queue.get() 205 | ctx.worker_queue.task_done() 206 | 207 | # Update running stats 208 | running_mean.mul_((1 - ctx.momentum)).add_(ctx.momentum * mean) 209 | running_var.mul_((1 - ctx.momentum)).add_(ctx.momentum * var * n / (n - 1)) 210 | else: 211 | mean, var = running_mean, running_var 212 | 213 | _check_contiguous(x, mean, var, weight, bias) 214 | _check(_ext.bn_forward_cuda, 215 | x, mean, var, 216 | weight if weight is not None else x.new(), 217 | bias if bias is not None else x.new(), 218 | x, x, ctx.eps) 219 | 220 | # Activation 221 | _act_forward(ctx, x) 222 | 223 | # Output 224 | ctx.var = var 225 | ctx.save_for_backward(x, weight, bias, running_mean, running_var) 226 | ctx.mark_dirty(x) 227 | return x 228 | 229 | @staticmethod 230 | @once_differentiable 231 | def backward(ctx, dz): 232 | z, weight, bias, running_mean, running_var = ctx.saved_tensors 233 | dz = dz.contiguous() 234 | 235 | # Undo activation 236 | _act_backward(ctx, z, dz) 237 | 238 | if ctx.needs_input_grad[0]: 239 | dx = dz.new().resize_as_(dz) 240 | else: 241 | dx = None 242 | 243 | if ctx.needs_input_grad[1]: 244 | dweight = dz.new().resize_as_(running_mean).zero_() 245 | else: 246 | dweight = None 247 | 248 | if ctx.needs_input_grad[2]: 249 | dbias = dz.new().resize_as_(running_mean).zero_() 250 | else: 251 | dbias = None 252 | 253 | if ctx.training: 254 | edz = dz.new().resize_as_(running_mean) 255 | eydz = dz.new().resize_as_(running_mean) 256 | _check_contiguous(z, dz, weight, bias, edz, eydz) 257 | _check(_ext.bn_edz_eydz_cuda, 258 | z, dz, 259 | weight if weight is not None else dz.new(), 260 | bias if bias is not None else dz.new(), 261 | edz, eydz, ctx.eps) 262 | 263 | if ctx.is_master: 264 | edzs, eydzs = [edz], [eydz] 265 | for _ in range(len(ctx.worker_queues)): 266 | edz_w, eydz_w = ctx.master_queue.get() 267 | ctx.master_queue.task_done() 268 | edzs.append(edz_w) 269 | eydzs.append(eydz_w) 270 | 271 | edz = comm.reduce_add(edzs) / (ctx.master_queue.maxsize + 1) 272 | eydz = comm.reduce_add(eydzs) / (ctx.master_queue.maxsize + 1) 273 | 274 | tensors = comm.broadcast_coalesced((edz, eydz), [edz.get_device()] + ctx.worker_ids) 275 | for ts, queue in zip(tensors[1:], ctx.worker_queues): 276 | queue.put(ts) 277 | else: 278 | ctx.master_queue.put((edz, eydz)) 279 | edz, eydz = ctx.worker_queue.get() 280 | ctx.worker_queue.task_done() 281 | else: 282 | edz = dz.new().resize_as_(running_mean).zero_() 283 | eydz = dz.new().resize_as_(running_mean).zero_() 284 | 285 | _check_contiguous(dz, z, ctx.var, weight, bias, edz, eydz, dx, dweight, dbias) 286 | _check(_ext.bn_backard_cuda, 287 | dz, z, ctx.var, 288 | weight if weight is not None else dz.new(), 289 | bias if bias is not None else dz.new(), 290 | edz, eydz, 291 | dx if dx is not None else dz.new(), 292 | dweight if dweight is not None else dz.new(), 293 | dbias if dbias is not None else dz.new(), 294 | ctx.eps) 295 | 296 | del ctx.var 297 | 298 | return dx, dweight, dbias, None, None, None, None, None, None, None, None 299 | 300 | @staticmethod 301 | def _parse_extra(ctx, extra): 302 | ctx.is_master = extra["is_master"] 303 | if ctx.is_master: 304 | ctx.master_queue = extra["master_queue"] 305 | ctx.worker_queues = extra["worker_queues"] 306 | ctx.worker_ids = extra["worker_ids"] 307 | else: 308 | ctx.master_queue = extra["master_queue"] 309 | ctx.worker_queue = extra["worker_queue"] 310 | 311 | 312 | inplace_abn = InPlaceABN.apply 313 | inplace_abn_sync = InPlaceABNSync.apply 314 | 315 | __all__ = ["inplace_abn", "inplace_abn_sync"] 316 | -------------------------------------------------------------------------------- /libs/src/bn.cu: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include "common.h" 6 | #include "bn.h" 7 | 8 | /* 9 | * Device functions and data structures 10 | */ 11 | struct Float2 { 12 | float v1, v2; 13 | __device__ Float2() {} 14 | __device__ Float2(float _v1, float _v2) : v1(_v1), v2(_v2) {} 15 | __device__ Float2(float v) : v1(v), v2(v) {} 16 | __device__ Float2(int v) : v1(v), v2(v) {} 17 | __device__ Float2 &operator+=(const Float2 &a) { 18 | v1 += a.v1; 19 | v2 += a.v2; 20 | return *this; 21 | } 22 | }; 23 | 24 | struct SumOp { 25 | __device__ SumOp(const float *t, int c, int s) 26 | : tensor(t), C(c), S(s) {} 27 | __device__ __forceinline__ float operator()(int batch, int plane, int n) { 28 | return tensor[(batch * C + plane) * S + n]; 29 | } 30 | const float *tensor; 31 | const int C; 32 | const int S; 33 | }; 34 | 35 | struct VarOp { 36 | __device__ VarOp(float m, const float *t, int c, int s) 37 | : mean(m), tensor(t), C(c), S(s) {} 38 | __device__ __forceinline__ float operator()(int batch, int plane, int n) { 39 | float val = tensor[(batch * C + plane) * S + n]; 40 | return (val - mean) * (val - mean); 41 | } 42 | const float mean; 43 | const float *tensor; 44 | const int C; 45 | const int S; 46 | }; 47 | 48 | struct GradOp { 49 | __device__ GradOp(float _gamma, float _beta, const float *_z, const float *_dz, int c, int s) 50 | : gamma(_gamma), beta(_beta), z(_z), dz(_dz), C(c), S(s) {} 51 | __device__ __forceinline__ Float2 operator()(int batch, int plane, int n) { 52 | float _y = (z[(batch * C + plane) * S + n] - beta) / gamma; 53 | float _dz = dz[(batch * C + plane) * S + n]; 54 | return Float2(_dz, _y * _dz); 55 | } 56 | const float gamma; 57 | const float beta; 58 | const float *z; 59 | const float *dz; 60 | const int C; 61 | const int S; 62 | }; 63 | 64 | static __device__ __forceinline__ float warpSum(float val) { 65 | #if __CUDA_ARCH__ >= 300 66 | for (int i = 0; i < getMSB(WARP_SIZE); ++i) { 67 | val += WARP_SHFL_XOR(val, 1 << i, WARP_SIZE); 68 | } 69 | #else 70 | __shared__ float values[MAX_BLOCK_SIZE]; 71 | values[threadIdx.x] = val; 72 | __threadfence_block(); 73 | const int base = (threadIdx.x / WARP_SIZE) * WARP_SIZE; 74 | for (int i = 1; i < WARP_SIZE; i++) { 75 | val += values[base + ((i + threadIdx.x) % WARP_SIZE)]; 76 | } 77 | #endif 78 | return val; 79 | } 80 | 81 | static __device__ __forceinline__ Float2 warpSum(Float2 value) { 82 | value.v1 = warpSum(value.v1); 83 | value.v2 = warpSum(value.v2); 84 | return value; 85 | } 86 | 87 | template 88 | __device__ T reduce(Op op, int plane, int N, int C, int S) { 89 | T sum = (T)0; 90 | for (int batch = 0; batch < N; ++batch) { 91 | for (int x = threadIdx.x; x < S; x += blockDim.x) { 92 | sum += op(batch, plane, x); 93 | } 94 | } 95 | 96 | // sum over NumThreads within a warp 97 | sum = warpSum(sum); 98 | 99 | // 'transpose', and reduce within warp again 100 | __shared__ T shared[32]; 101 | __syncthreads(); 102 | if (threadIdx.x % WARP_SIZE == 0) { 103 | shared[threadIdx.x / WARP_SIZE] = sum; 104 | } 105 | if (threadIdx.x >= blockDim.x / WARP_SIZE && threadIdx.x < WARP_SIZE) { 106 | // zero out the other entries in shared 107 | shared[threadIdx.x] = (T)0; 108 | } 109 | __syncthreads(); 110 | if (threadIdx.x / WARP_SIZE == 0) { 111 | sum = warpSum(shared[threadIdx.x]); 112 | if (threadIdx.x == 0) { 113 | shared[0] = sum; 114 | } 115 | } 116 | __syncthreads(); 117 | 118 | // Everyone picks it up, should be broadcast into the whole gradInput 119 | return shared[0]; 120 | } 121 | 122 | /* 123 | * Kernels 124 | */ 125 | __global__ void mean_var_kernel(const float *x, float *mean, float *var, int N, 126 | int C, int S) { 127 | int plane = blockIdx.x; 128 | float norm = 1.f / (N * S); 129 | 130 | float _mean = reduce(SumOp(x, C, S), plane, N, C, S) * norm; 131 | __syncthreads(); 132 | float _var = reduce(VarOp(_mean, x, C, S), plane, N, C, S) * norm; 133 | 134 | if (threadIdx.x == 0) { 135 | mean[plane] = _mean; 136 | var[plane] = _var; 137 | } 138 | } 139 | 140 | __global__ void forward_kernel(const float *x, const float *mean, 141 | const float *var, const float *weight, 142 | const float *bias, float *y, float *z, float eps, 143 | int N, int C, int S) { 144 | int plane = blockIdx.x; 145 | 146 | float _mean = mean[plane]; 147 | float _var = var[plane]; 148 | float invStd = 0; 149 | if (_var != 0.f || eps != 0.f) { 150 | invStd = 1 / sqrt(_var + eps); 151 | } 152 | 153 | float gamma = weight != 0 ? abs(weight[plane]) + eps : 1.f; 154 | float beta = bias != 0 ? bias[plane] : 0.f; 155 | for (int batch = 0; batch < N; ++batch) { 156 | for (int n = threadIdx.x; n < S; n += blockDim.x) { 157 | float _x = x[(batch * C + plane) * S + n]; 158 | float _y = (_x - _mean) * invStd; 159 | float _z = _y * gamma + beta; 160 | 161 | y[(batch * C + plane) * S + n] = _y; 162 | z[(batch * C + plane) * S + n] = _z; 163 | } 164 | } 165 | } 166 | 167 | __global__ void edz_eydz_kernel(const float *z, const float *dz, const float *weight, const float *bias, 168 | float *edz, float *eydz, float eps, int N, int C, int S) { 169 | int plane = blockIdx.x; 170 | float norm = 1.f / (N * S); 171 | 172 | float gamma = weight != 0 ? abs(weight[plane]) + eps : 1.f; 173 | float beta = bias != 0 ? bias[plane] : 0.f; 174 | 175 | Float2 res = reduce(GradOp(gamma, beta, z, dz, C, S), plane, N, C, S); 176 | float _edz = res.v1 * norm; 177 | float _eydz = res.v2 * norm; 178 | __syncthreads(); 179 | 180 | if (threadIdx.x == 0) { 181 | edz[plane] = _edz; 182 | eydz[plane] = _eydz; 183 | } 184 | } 185 | 186 | __global__ void backward_kernel(const float *dz, const float *z, const float *var, const float *weight, 187 | const float *bias, const float *edz, const float *eydz, float *dx, float *dweight, 188 | float *dbias, float eps, int N, int C, int S) { 189 | int plane = blockIdx.x; 190 | float _edz = edz[plane]; 191 | float _eydz = eydz[plane]; 192 | 193 | float gamma = weight != 0 ? abs(weight[plane]) + eps : 1.f; 194 | float beta = bias != 0 ? bias[plane] : 0.f; 195 | 196 | if (dx != 0) { 197 | float _var = var[plane]; 198 | float invStd = 0; 199 | if (_var != 0.f || eps != 0.f) { 200 | invStd = 1 / sqrt(_var + eps); 201 | } 202 | 203 | float mul = gamma * invStd; 204 | 205 | for (int batch = 0; batch < N; ++batch) { 206 | for (int n = threadIdx.x; n < S; n += blockDim.x) { 207 | float _dz = dz[(batch * C + plane) * S + n]; 208 | float _y = (z[(batch * C + plane) * S + n] - beta) / gamma; 209 | dx[(batch * C + plane) * S + n] = (_dz - _edz - _y * _eydz) * mul; 210 | } 211 | } 212 | } 213 | 214 | if (dweight != 0 || dbias != 0) { 215 | float norm = N * S; 216 | 217 | if (dweight != 0) { 218 | if (threadIdx.x == 0) { 219 | if (weight[plane] > 0) 220 | dweight[plane] += _eydz * norm; 221 | else if (weight[plane] < 0) 222 | dweight[plane] -= _eydz * norm; 223 | } 224 | } 225 | 226 | if (dbias != 0) { 227 | if (threadIdx.x == 0) { 228 | dbias[plane] += _edz * norm; 229 | } 230 | } 231 | } 232 | } 233 | 234 | /* 235 | * Implementations 236 | */ 237 | extern "C" int _bn_mean_var_cuda(int N, int C, int S, const float *x, float *mean, 238 | float *var, cudaStream_t stream) { 239 | // Run kernel 240 | dim3 blocks(C); 241 | dim3 threads(getNumThreads(S)); 242 | mean_var_kernel<<>>(x, mean, var, N, C, S); 243 | 244 | // Check for errors 245 | cudaError_t err = cudaGetLastError(); 246 | if (err != cudaSuccess) 247 | return 0; 248 | else 249 | return 1; 250 | } 251 | 252 | extern "C" int _bn_forward_cuda(int N, int C, int S, const float *x, 253 | const float *mean, const float *var, 254 | const float *weight, const float *bias, float *y, 255 | float *z, float eps, cudaStream_t stream) { 256 | // Run kernel 257 | dim3 blocks(C); 258 | dim3 threads(getNumThreads(S)); 259 | forward_kernel<<>>(x, mean, var, weight, bias, y, 260 | z, eps, N, C, S); 261 | 262 | // Check for errors 263 | cudaError_t err = cudaGetLastError(); 264 | if (err != cudaSuccess) 265 | return 0; 266 | else 267 | return 1; 268 | } 269 | 270 | extern "C" int _bn_edz_eydz_cuda(int N, int C, int S, const float *z, const float *dz, const float *weight, 271 | const float *bias, float *edz, float *eydz, float eps, cudaStream_t stream) { 272 | // Run kernel 273 | dim3 blocks(C); 274 | dim3 threads(getNumThreads(S)); 275 | edz_eydz_kernel<<>>(z, dz, weight, bias, edz, eydz, eps, N, C, S); 276 | 277 | // Check for errors 278 | cudaError_t err = cudaGetLastError(); 279 | if (err != cudaSuccess) 280 | return 0; 281 | else 282 | return 1; 283 | } 284 | 285 | extern "C" int _bn_backward_cuda(int N, int C, int S, const float *dz, const float *z, const float *var, 286 | const float *weight, const float *bias, const float *edz, const float *eydz, 287 | float *dx, float *dweight, float *dbias, float eps, cudaStream_t stream) { 288 | // Run kernel 289 | dim3 blocks(C); 290 | dim3 threads(getNumThreads(S)); 291 | backward_kernel<<>>(dz, z, var, weight, bias, edz, eydz, dx, dweight, dbias, 292 | eps, N, C, S); 293 | 294 | // Check for errors 295 | cudaError_t err = cudaGetLastError(); 296 | if (err != cudaSuccess) 297 | return 0; 298 | else 299 | return 1; 300 | } 301 | 302 | extern "C" int _leaky_relu_cuda(int N, float *x, float slope, cudaStream_t stream) { 303 | // Run using thrust 304 | thrust::device_ptr th_x = thrust::device_pointer_cast(x); 305 | thrust::transform_if(thrust::cuda::par.on(stream), th_x, th_x + N, th_x, 306 | [slope] __device__ (const float& x) { return x * slope; }, 307 | [] __device__ (const float& x) { return x < 0; }); 308 | 309 | // Check for errors 310 | cudaError_t err = cudaGetLastError(); 311 | if (err != cudaSuccess) 312 | return 0; 313 | else 314 | return 1; 315 | } 316 | 317 | extern "C" int _leaky_relu_backward_cuda(int N, const float *x, float *dx, float slope, cudaStream_t stream) { 318 | // Run using thrust 319 | thrust::device_ptr th_x = thrust::device_pointer_cast(x); 320 | thrust::device_ptr th_dx = thrust::device_pointer_cast(dx); 321 | thrust::transform_if(thrust::cuda::par.on(stream), th_dx, th_dx + N, th_x, th_dx, 322 | [slope] __device__ (const float& dx) { return dx * slope; }, 323 | [] __device__ (const float& x) { return x < 0; }); 324 | 325 | // Check for errors 326 | cudaError_t err = cudaGetLastError(); 327 | if (err != cudaSuccess) 328 | return 0; 329 | else 330 | return 1; 331 | } 332 | 333 | extern "C" int _elu_cuda(int N, float *x, cudaStream_t stream) { 334 | // Run using thrust 335 | thrust::device_ptr th_x = thrust::device_pointer_cast(x); 336 | thrust::transform_if(thrust::cuda::par.on(stream), th_x, th_x + N, th_x, 337 | [] __device__ (const float& x) { return exp(x) - 1.f; }, 338 | [] __device__ (const float& x) { return x < 0; }); 339 | 340 | // Check for errors 341 | cudaError_t err = cudaGetLastError(); 342 | if (err != cudaSuccess) 343 | return 0; 344 | else 345 | return 1; 346 | } 347 | 348 | extern "C" int _elu_backward_cuda(int N, const float *x, float *dx, cudaStream_t stream) { 349 | // Run using thrust 350 | thrust::device_ptr th_x = thrust::device_pointer_cast(x); 351 | thrust::device_ptr th_dx = thrust::device_pointer_cast(dx); 352 | thrust::transform_if(thrust::cuda::par.on(stream), th_dx, th_dx + N, th_x, th_x, th_dx, 353 | [] __device__ (const float& dx, const float& x) { return dx * (x + 1.f); }, 354 | [] __device__ (const float& x) { return x < 0; }); 355 | 356 | // Check for errors 357 | cudaError_t err = cudaGetLastError(); 358 | if (err != cudaSuccess) 359 | return 0; 360 | else 361 | return 1; 362 | } 363 | 364 | extern "C" int _elu_inv_cuda(int N, float *x, cudaStream_t stream) { 365 | // Run using thrust 366 | thrust::device_ptr th_x = thrust::device_pointer_cast(x); 367 | thrust::transform_if(thrust::cuda::par.on(stream), th_x, th_x + N, th_x, 368 | [] __device__ (const float& x) { return log1p(x); }, 369 | [] __device__ (const float& x) { return x < 0; }); 370 | 371 | // Check for errors 372 | cudaError_t err = cudaGetLastError(); 373 | if (err != cudaSuccess) 374 | return 0; 375 | else 376 | return 1; 377 | } 378 | -------------------------------------------------------------------------------- /dataset/list/cityscapes/val.lst: -------------------------------------------------------------------------------- 1 | leftImg8bit/val/frankfurt/frankfurt_000000_000294_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_000294_gtFine_labelIds.png 2 | leftImg8bit/val/frankfurt/frankfurt_000000_000576_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_000576_gtFine_labelIds.png 3 | leftImg8bit/val/frankfurt/frankfurt_000000_001016_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_001016_gtFine_labelIds.png 4 | leftImg8bit/val/frankfurt/frankfurt_000000_001236_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_001236_gtFine_labelIds.png 5 | leftImg8bit/val/frankfurt/frankfurt_000000_001751_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_001751_gtFine_labelIds.png 6 | leftImg8bit/val/frankfurt/frankfurt_000000_002196_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_002196_gtFine_labelIds.png 7 | leftImg8bit/val/frankfurt/frankfurt_000000_002963_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_002963_gtFine_labelIds.png 8 | leftImg8bit/val/frankfurt/frankfurt_000000_003025_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_003025_gtFine_labelIds.png 9 | leftImg8bit/val/frankfurt/frankfurt_000000_003357_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_003357_gtFine_labelIds.png 10 | leftImg8bit/val/frankfurt/frankfurt_000000_003920_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_003920_gtFine_labelIds.png 11 | leftImg8bit/val/frankfurt/frankfurt_000000_004617_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_004617_gtFine_labelIds.png 12 | leftImg8bit/val/frankfurt/frankfurt_000000_005543_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_005543_gtFine_labelIds.png 13 | leftImg8bit/val/frankfurt/frankfurt_000000_005898_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_005898_gtFine_labelIds.png 14 | leftImg8bit/val/frankfurt/frankfurt_000000_006589_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_006589_gtFine_labelIds.png 15 | leftImg8bit/val/frankfurt/frankfurt_000000_007365_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_007365_gtFine_labelIds.png 16 | leftImg8bit/val/frankfurt/frankfurt_000000_008206_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_008206_gtFine_labelIds.png 17 | leftImg8bit/val/frankfurt/frankfurt_000000_008451_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_008451_gtFine_labelIds.png 18 | leftImg8bit/val/frankfurt/frankfurt_000000_009291_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_009291_gtFine_labelIds.png 19 | leftImg8bit/val/frankfurt/frankfurt_000000_009561_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_009561_gtFine_labelIds.png 20 | leftImg8bit/val/frankfurt/frankfurt_000000_009688_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_009688_gtFine_labelIds.png 21 | leftImg8bit/val/frankfurt/frankfurt_000000_009969_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_009969_gtFine_labelIds.png 22 | leftImg8bit/val/frankfurt/frankfurt_000000_010351_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_010351_gtFine_labelIds.png 23 | leftImg8bit/val/frankfurt/frankfurt_000000_010763_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_010763_gtFine_labelIds.png 24 | leftImg8bit/val/frankfurt/frankfurt_000000_011007_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_011007_gtFine_labelIds.png 25 | leftImg8bit/val/frankfurt/frankfurt_000000_011074_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_011074_gtFine_labelIds.png 26 | leftImg8bit/val/frankfurt/frankfurt_000000_011461_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_011461_gtFine_labelIds.png 27 | leftImg8bit/val/frankfurt/frankfurt_000000_011810_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_011810_gtFine_labelIds.png 28 | leftImg8bit/val/frankfurt/frankfurt_000000_012009_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_012009_gtFine_labelIds.png 29 | leftImg8bit/val/frankfurt/frankfurt_000000_012121_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_012121_gtFine_labelIds.png 30 | leftImg8bit/val/frankfurt/frankfurt_000000_012868_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_012868_gtFine_labelIds.png 31 | leftImg8bit/val/frankfurt/frankfurt_000000_013067_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_013067_gtFine_labelIds.png 32 | leftImg8bit/val/frankfurt/frankfurt_000000_013240_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_013240_gtFine_labelIds.png 33 | leftImg8bit/val/frankfurt/frankfurt_000000_013382_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_013382_gtFine_labelIds.png 34 | leftImg8bit/val/frankfurt/frankfurt_000000_013942_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_013942_gtFine_labelIds.png 35 | leftImg8bit/val/frankfurt/frankfurt_000000_014480_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_014480_gtFine_labelIds.png 36 | leftImg8bit/val/frankfurt/frankfurt_000000_015389_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_015389_gtFine_labelIds.png 37 | leftImg8bit/val/frankfurt/frankfurt_000000_015676_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_015676_gtFine_labelIds.png 38 | leftImg8bit/val/frankfurt/frankfurt_000000_016005_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_016005_gtFine_labelIds.png 39 | leftImg8bit/val/frankfurt/frankfurt_000000_016286_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_016286_gtFine_labelIds.png 40 | leftImg8bit/val/frankfurt/frankfurt_000000_017228_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_017228_gtFine_labelIds.png 41 | leftImg8bit/val/frankfurt/frankfurt_000000_017476_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_017476_gtFine_labelIds.png 42 | leftImg8bit/val/frankfurt/frankfurt_000000_018797_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_018797_gtFine_labelIds.png 43 | leftImg8bit/val/frankfurt/frankfurt_000000_019607_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_019607_gtFine_labelIds.png 44 | leftImg8bit/val/frankfurt/frankfurt_000000_020215_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_020215_gtFine_labelIds.png 45 | leftImg8bit/val/frankfurt/frankfurt_000000_020321_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_020321_gtFine_labelIds.png 46 | leftImg8bit/val/frankfurt/frankfurt_000000_020880_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_020880_gtFine_labelIds.png 47 | leftImg8bit/val/frankfurt/frankfurt_000000_021667_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_021667_gtFine_labelIds.png 48 | leftImg8bit/val/frankfurt/frankfurt_000000_021879_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_021879_gtFine_labelIds.png 49 | leftImg8bit/val/frankfurt/frankfurt_000000_022254_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_022254_gtFine_labelIds.png 50 | leftImg8bit/val/frankfurt/frankfurt_000000_022797_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000000_022797_gtFine_labelIds.png 51 | leftImg8bit/val/frankfurt/frankfurt_000001_000538_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_000538_gtFine_labelIds.png 52 | leftImg8bit/val/frankfurt/frankfurt_000001_001464_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_001464_gtFine_labelIds.png 53 | leftImg8bit/val/frankfurt/frankfurt_000001_002512_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_002512_gtFine_labelIds.png 54 | leftImg8bit/val/frankfurt/frankfurt_000001_002646_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_002646_gtFine_labelIds.png 55 | leftImg8bit/val/frankfurt/frankfurt_000001_002759_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_002759_gtFine_labelIds.png 56 | leftImg8bit/val/frankfurt/frankfurt_000001_003056_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_003056_gtFine_labelIds.png 57 | leftImg8bit/val/frankfurt/frankfurt_000001_003588_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_003588_gtFine_labelIds.png 58 | leftImg8bit/val/frankfurt/frankfurt_000001_004327_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_004327_gtFine_labelIds.png 59 | leftImg8bit/val/frankfurt/frankfurt_000001_004736_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_004736_gtFine_labelIds.png 60 | leftImg8bit/val/frankfurt/frankfurt_000001_004859_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_004859_gtFine_labelIds.png 61 | leftImg8bit/val/frankfurt/frankfurt_000001_005184_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_005184_gtFine_labelIds.png 62 | leftImg8bit/val/frankfurt/frankfurt_000001_005410_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_005410_gtFine_labelIds.png 63 | leftImg8bit/val/frankfurt/frankfurt_000001_005703_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_005703_gtFine_labelIds.png 64 | leftImg8bit/val/frankfurt/frankfurt_000001_005898_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_005898_gtFine_labelIds.png 65 | leftImg8bit/val/frankfurt/frankfurt_000001_007285_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_007285_gtFine_labelIds.png 66 | leftImg8bit/val/frankfurt/frankfurt_000001_007407_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_007407_gtFine_labelIds.png 67 | leftImg8bit/val/frankfurt/frankfurt_000001_007622_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_007622_gtFine_labelIds.png 68 | leftImg8bit/val/frankfurt/frankfurt_000001_007857_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_007857_gtFine_labelIds.png 69 | leftImg8bit/val/frankfurt/frankfurt_000001_007973_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_007973_gtFine_labelIds.png 70 | leftImg8bit/val/frankfurt/frankfurt_000001_008200_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_008200_gtFine_labelIds.png 71 | leftImg8bit/val/frankfurt/frankfurt_000001_008688_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_008688_gtFine_labelIds.png 72 | leftImg8bit/val/frankfurt/frankfurt_000001_009058_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_009058_gtFine_labelIds.png 73 | leftImg8bit/val/frankfurt/frankfurt_000001_009504_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_009504_gtFine_labelIds.png 74 | leftImg8bit/val/frankfurt/frankfurt_000001_009854_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_009854_gtFine_labelIds.png 75 | leftImg8bit/val/frankfurt/frankfurt_000001_010156_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_010156_gtFine_labelIds.png 76 | leftImg8bit/val/frankfurt/frankfurt_000001_010444_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_010444_gtFine_labelIds.png 77 | leftImg8bit/val/frankfurt/frankfurt_000001_010600_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_010600_gtFine_labelIds.png 78 | leftImg8bit/val/frankfurt/frankfurt_000001_010830_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_010830_gtFine_labelIds.png 79 | leftImg8bit/val/frankfurt/frankfurt_000001_011162_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_011162_gtFine_labelIds.png 80 | leftImg8bit/val/frankfurt/frankfurt_000001_011715_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_011715_gtFine_labelIds.png 81 | leftImg8bit/val/frankfurt/frankfurt_000001_011835_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_011835_gtFine_labelIds.png 82 | leftImg8bit/val/frankfurt/frankfurt_000001_012038_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_012038_gtFine_labelIds.png 83 | leftImg8bit/val/frankfurt/frankfurt_000001_012519_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_012519_gtFine_labelIds.png 84 | leftImg8bit/val/frankfurt/frankfurt_000001_012699_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_012699_gtFine_labelIds.png 85 | leftImg8bit/val/frankfurt/frankfurt_000001_012738_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_012738_gtFine_labelIds.png 86 | leftImg8bit/val/frankfurt/frankfurt_000001_012870_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_012870_gtFine_labelIds.png 87 | leftImg8bit/val/frankfurt/frankfurt_000001_013016_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_013016_gtFine_labelIds.png 88 | leftImg8bit/val/frankfurt/frankfurt_000001_013496_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_013496_gtFine_labelIds.png 89 | leftImg8bit/val/frankfurt/frankfurt_000001_013710_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_013710_gtFine_labelIds.png 90 | leftImg8bit/val/frankfurt/frankfurt_000001_014221_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_014221_gtFine_labelIds.png 91 | leftImg8bit/val/frankfurt/frankfurt_000001_014406_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_014406_gtFine_labelIds.png 92 | leftImg8bit/val/frankfurt/frankfurt_000001_014565_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_014565_gtFine_labelIds.png 93 | leftImg8bit/val/frankfurt/frankfurt_000001_014741_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_014741_gtFine_labelIds.png 94 | leftImg8bit/val/frankfurt/frankfurt_000001_015091_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_015091_gtFine_labelIds.png 95 | leftImg8bit/val/frankfurt/frankfurt_000001_015328_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_015328_gtFine_labelIds.png 96 | leftImg8bit/val/frankfurt/frankfurt_000001_015768_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_015768_gtFine_labelIds.png 97 | leftImg8bit/val/frankfurt/frankfurt_000001_016029_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_016029_gtFine_labelIds.png 98 | leftImg8bit/val/frankfurt/frankfurt_000001_016273_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_016273_gtFine_labelIds.png 99 | leftImg8bit/val/frankfurt/frankfurt_000001_016462_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_016462_gtFine_labelIds.png 100 | leftImg8bit/val/frankfurt/frankfurt_000001_017101_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_017101_gtFine_labelIds.png 101 | leftImg8bit/val/frankfurt/frankfurt_000001_017459_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_017459_gtFine_labelIds.png 102 | leftImg8bit/val/frankfurt/frankfurt_000001_017842_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_017842_gtFine_labelIds.png 103 | leftImg8bit/val/frankfurt/frankfurt_000001_018113_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_018113_gtFine_labelIds.png 104 | leftImg8bit/val/frankfurt/frankfurt_000001_019698_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_019698_gtFine_labelIds.png 105 | leftImg8bit/val/frankfurt/frankfurt_000001_019854_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_019854_gtFine_labelIds.png 106 | leftImg8bit/val/frankfurt/frankfurt_000001_019969_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_019969_gtFine_labelIds.png 107 | leftImg8bit/val/frankfurt/frankfurt_000001_020046_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_020046_gtFine_labelIds.png 108 | leftImg8bit/val/frankfurt/frankfurt_000001_020287_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_020287_gtFine_labelIds.png 109 | leftImg8bit/val/frankfurt/frankfurt_000001_020693_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_020693_gtFine_labelIds.png 110 | leftImg8bit/val/frankfurt/frankfurt_000001_021406_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_021406_gtFine_labelIds.png 111 | leftImg8bit/val/frankfurt/frankfurt_000001_021825_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_021825_gtFine_labelIds.png 112 | leftImg8bit/val/frankfurt/frankfurt_000001_023235_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_023235_gtFine_labelIds.png 113 | leftImg8bit/val/frankfurt/frankfurt_000001_023369_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_023369_gtFine_labelIds.png 114 | leftImg8bit/val/frankfurt/frankfurt_000001_023769_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_023769_gtFine_labelIds.png 115 | leftImg8bit/val/frankfurt/frankfurt_000001_024927_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_024927_gtFine_labelIds.png 116 | leftImg8bit/val/frankfurt/frankfurt_000001_025512_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_025512_gtFine_labelIds.png 117 | leftImg8bit/val/frankfurt/frankfurt_000001_025713_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_025713_gtFine_labelIds.png 118 | leftImg8bit/val/frankfurt/frankfurt_000001_025921_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_025921_gtFine_labelIds.png 119 | leftImg8bit/val/frankfurt/frankfurt_000001_027325_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_027325_gtFine_labelIds.png 120 | leftImg8bit/val/frankfurt/frankfurt_000001_028232_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_028232_gtFine_labelIds.png 121 | leftImg8bit/val/frankfurt/frankfurt_000001_028335_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_028335_gtFine_labelIds.png 122 | leftImg8bit/val/frankfurt/frankfurt_000001_028590_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_028590_gtFine_labelIds.png 123 | leftImg8bit/val/frankfurt/frankfurt_000001_028854_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_028854_gtFine_labelIds.png 124 | leftImg8bit/val/frankfurt/frankfurt_000001_029086_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_029086_gtFine_labelIds.png 125 | leftImg8bit/val/frankfurt/frankfurt_000001_029236_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_029236_gtFine_labelIds.png 126 | leftImg8bit/val/frankfurt/frankfurt_000001_029600_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_029600_gtFine_labelIds.png 127 | leftImg8bit/val/frankfurt/frankfurt_000001_030067_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_030067_gtFine_labelIds.png 128 | leftImg8bit/val/frankfurt/frankfurt_000001_030310_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_030310_gtFine_labelIds.png 129 | leftImg8bit/val/frankfurt/frankfurt_000001_030669_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_030669_gtFine_labelIds.png 130 | leftImg8bit/val/frankfurt/frankfurt_000001_031266_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_031266_gtFine_labelIds.png 131 | leftImg8bit/val/frankfurt/frankfurt_000001_031416_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_031416_gtFine_labelIds.png 132 | leftImg8bit/val/frankfurt/frankfurt_000001_032018_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_032018_gtFine_labelIds.png 133 | leftImg8bit/val/frankfurt/frankfurt_000001_032556_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_032556_gtFine_labelIds.png 134 | leftImg8bit/val/frankfurt/frankfurt_000001_032711_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_032711_gtFine_labelIds.png 135 | leftImg8bit/val/frankfurt/frankfurt_000001_032942_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_032942_gtFine_labelIds.png 136 | leftImg8bit/val/frankfurt/frankfurt_000001_033655_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_033655_gtFine_labelIds.png 137 | leftImg8bit/val/frankfurt/frankfurt_000001_034047_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_034047_gtFine_labelIds.png 138 | leftImg8bit/val/frankfurt/frankfurt_000001_034816_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_034816_gtFine_labelIds.png 139 | leftImg8bit/val/frankfurt/frankfurt_000001_035144_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_035144_gtFine_labelIds.png 140 | leftImg8bit/val/frankfurt/frankfurt_000001_035864_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_035864_gtFine_labelIds.png 141 | leftImg8bit/val/frankfurt/frankfurt_000001_037705_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_037705_gtFine_labelIds.png 142 | leftImg8bit/val/frankfurt/frankfurt_000001_038245_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_038245_gtFine_labelIds.png 143 | leftImg8bit/val/frankfurt/frankfurt_000001_038418_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_038418_gtFine_labelIds.png 144 | leftImg8bit/val/frankfurt/frankfurt_000001_038645_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_038645_gtFine_labelIds.png 145 | leftImg8bit/val/frankfurt/frankfurt_000001_038844_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_038844_gtFine_labelIds.png 146 | leftImg8bit/val/frankfurt/frankfurt_000001_039895_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_039895_gtFine_labelIds.png 147 | leftImg8bit/val/frankfurt/frankfurt_000001_040575_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_040575_gtFine_labelIds.png 148 | leftImg8bit/val/frankfurt/frankfurt_000001_040732_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_040732_gtFine_labelIds.png 149 | leftImg8bit/val/frankfurt/frankfurt_000001_041074_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_041074_gtFine_labelIds.png 150 | leftImg8bit/val/frankfurt/frankfurt_000001_041354_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_041354_gtFine_labelIds.png 151 | leftImg8bit/val/frankfurt/frankfurt_000001_041517_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_041517_gtFine_labelIds.png 152 | leftImg8bit/val/frankfurt/frankfurt_000001_041664_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_041664_gtFine_labelIds.png 153 | leftImg8bit/val/frankfurt/frankfurt_000001_042098_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_042098_gtFine_labelIds.png 154 | leftImg8bit/val/frankfurt/frankfurt_000001_042384_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_042384_gtFine_labelIds.png 155 | leftImg8bit/val/frankfurt/frankfurt_000001_042733_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_042733_gtFine_labelIds.png 156 | leftImg8bit/val/frankfurt/frankfurt_000001_043395_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_043395_gtFine_labelIds.png 157 | leftImg8bit/val/frankfurt/frankfurt_000001_043564_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_043564_gtFine_labelIds.png 158 | leftImg8bit/val/frankfurt/frankfurt_000001_044227_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_044227_gtFine_labelIds.png 159 | leftImg8bit/val/frankfurt/frankfurt_000001_044413_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_044413_gtFine_labelIds.png 160 | leftImg8bit/val/frankfurt/frankfurt_000001_044525_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_044525_gtFine_labelIds.png 161 | leftImg8bit/val/frankfurt/frankfurt_000001_044658_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_044658_gtFine_labelIds.png 162 | leftImg8bit/val/frankfurt/frankfurt_000001_044787_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_044787_gtFine_labelIds.png 163 | leftImg8bit/val/frankfurt/frankfurt_000001_046126_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_046126_gtFine_labelIds.png 164 | leftImg8bit/val/frankfurt/frankfurt_000001_046272_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_046272_gtFine_labelIds.png 165 | leftImg8bit/val/frankfurt/frankfurt_000001_046504_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_046504_gtFine_labelIds.png 166 | leftImg8bit/val/frankfurt/frankfurt_000001_046779_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_046779_gtFine_labelIds.png 167 | leftImg8bit/val/frankfurt/frankfurt_000001_047178_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_047178_gtFine_labelIds.png 168 | leftImg8bit/val/frankfurt/frankfurt_000001_047552_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_047552_gtFine_labelIds.png 169 | leftImg8bit/val/frankfurt/frankfurt_000001_048196_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_048196_gtFine_labelIds.png 170 | leftImg8bit/val/frankfurt/frankfurt_000001_048355_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_048355_gtFine_labelIds.png 171 | leftImg8bit/val/frankfurt/frankfurt_000001_048654_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_048654_gtFine_labelIds.png 172 | leftImg8bit/val/frankfurt/frankfurt_000001_049078_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_049078_gtFine_labelIds.png 173 | leftImg8bit/val/frankfurt/frankfurt_000001_049209_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_049209_gtFine_labelIds.png 174 | leftImg8bit/val/frankfurt/frankfurt_000001_049298_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_049298_gtFine_labelIds.png 175 | leftImg8bit/val/frankfurt/frankfurt_000001_049698_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_049698_gtFine_labelIds.png 176 | leftImg8bit/val/frankfurt/frankfurt_000001_049770_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_049770_gtFine_labelIds.png 177 | leftImg8bit/val/frankfurt/frankfurt_000001_050149_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_050149_gtFine_labelIds.png 178 | leftImg8bit/val/frankfurt/frankfurt_000001_050686_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_050686_gtFine_labelIds.png 179 | leftImg8bit/val/frankfurt/frankfurt_000001_051516_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_051516_gtFine_labelIds.png 180 | leftImg8bit/val/frankfurt/frankfurt_000001_051737_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_051737_gtFine_labelIds.png 181 | leftImg8bit/val/frankfurt/frankfurt_000001_051807_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_051807_gtFine_labelIds.png 182 | leftImg8bit/val/frankfurt/frankfurt_000001_052120_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_052120_gtFine_labelIds.png 183 | leftImg8bit/val/frankfurt/frankfurt_000001_052594_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_052594_gtFine_labelIds.png 184 | leftImg8bit/val/frankfurt/frankfurt_000001_053102_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_053102_gtFine_labelIds.png 185 | leftImg8bit/val/frankfurt/frankfurt_000001_054077_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_054077_gtFine_labelIds.png 186 | leftImg8bit/val/frankfurt/frankfurt_000001_054219_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_054219_gtFine_labelIds.png 187 | leftImg8bit/val/frankfurt/frankfurt_000001_054415_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_054415_gtFine_labelIds.png 188 | leftImg8bit/val/frankfurt/frankfurt_000001_054640_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_054640_gtFine_labelIds.png 189 | leftImg8bit/val/frankfurt/frankfurt_000001_054884_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_054884_gtFine_labelIds.png 190 | leftImg8bit/val/frankfurt/frankfurt_000001_055062_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_055062_gtFine_labelIds.png 191 | leftImg8bit/val/frankfurt/frankfurt_000001_055172_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_055172_gtFine_labelIds.png 192 | leftImg8bit/val/frankfurt/frankfurt_000001_055306_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_055306_gtFine_labelIds.png 193 | leftImg8bit/val/frankfurt/frankfurt_000001_055387_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_055387_gtFine_labelIds.png 194 | leftImg8bit/val/frankfurt/frankfurt_000001_055538_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_055538_gtFine_labelIds.png 195 | leftImg8bit/val/frankfurt/frankfurt_000001_055603_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_055603_gtFine_labelIds.png 196 | leftImg8bit/val/frankfurt/frankfurt_000001_055709_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_055709_gtFine_labelIds.png 197 | leftImg8bit/val/frankfurt/frankfurt_000001_056580_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_056580_gtFine_labelIds.png 198 | leftImg8bit/val/frankfurt/frankfurt_000001_057181_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_057181_gtFine_labelIds.png 199 | leftImg8bit/val/frankfurt/frankfurt_000001_057478_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_057478_gtFine_labelIds.png 200 | leftImg8bit/val/frankfurt/frankfurt_000001_057954_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_057954_gtFine_labelIds.png 201 | leftImg8bit/val/frankfurt/frankfurt_000001_058057_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_058057_gtFine_labelIds.png 202 | leftImg8bit/val/frankfurt/frankfurt_000001_058176_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_058176_gtFine_labelIds.png 203 | leftImg8bit/val/frankfurt/frankfurt_000001_058504_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_058504_gtFine_labelIds.png 204 | leftImg8bit/val/frankfurt/frankfurt_000001_058914_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_058914_gtFine_labelIds.png 205 | leftImg8bit/val/frankfurt/frankfurt_000001_059119_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_059119_gtFine_labelIds.png 206 | leftImg8bit/val/frankfurt/frankfurt_000001_059642_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_059642_gtFine_labelIds.png 207 | leftImg8bit/val/frankfurt/frankfurt_000001_059789_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_059789_gtFine_labelIds.png 208 | leftImg8bit/val/frankfurt/frankfurt_000001_060135_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_060135_gtFine_labelIds.png 209 | leftImg8bit/val/frankfurt/frankfurt_000001_060422_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_060422_gtFine_labelIds.png 210 | leftImg8bit/val/frankfurt/frankfurt_000001_060545_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_060545_gtFine_labelIds.png 211 | leftImg8bit/val/frankfurt/frankfurt_000001_060906_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_060906_gtFine_labelIds.png 212 | leftImg8bit/val/frankfurt/frankfurt_000001_061682_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_061682_gtFine_labelIds.png 213 | leftImg8bit/val/frankfurt/frankfurt_000001_061763_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_061763_gtFine_labelIds.png 214 | leftImg8bit/val/frankfurt/frankfurt_000001_062016_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_062016_gtFine_labelIds.png 215 | leftImg8bit/val/frankfurt/frankfurt_000001_062250_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_062250_gtFine_labelIds.png 216 | leftImg8bit/val/frankfurt/frankfurt_000001_062396_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_062396_gtFine_labelIds.png 217 | leftImg8bit/val/frankfurt/frankfurt_000001_062509_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_062509_gtFine_labelIds.png 218 | leftImg8bit/val/frankfurt/frankfurt_000001_062653_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_062653_gtFine_labelIds.png 219 | leftImg8bit/val/frankfurt/frankfurt_000001_062793_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_062793_gtFine_labelIds.png 220 | leftImg8bit/val/frankfurt/frankfurt_000001_063045_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_063045_gtFine_labelIds.png 221 | leftImg8bit/val/frankfurt/frankfurt_000001_064130_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_064130_gtFine_labelIds.png 222 | leftImg8bit/val/frankfurt/frankfurt_000001_064305_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_064305_gtFine_labelIds.png 223 | leftImg8bit/val/frankfurt/frankfurt_000001_064651_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_064651_gtFine_labelIds.png 224 | leftImg8bit/val/frankfurt/frankfurt_000001_064798_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_064798_gtFine_labelIds.png 225 | leftImg8bit/val/frankfurt/frankfurt_000001_064925_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_064925_gtFine_labelIds.png 226 | leftImg8bit/val/frankfurt/frankfurt_000001_065160_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_065160_gtFine_labelIds.png 227 | leftImg8bit/val/frankfurt/frankfurt_000001_065617_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_065617_gtFine_labelIds.png 228 | leftImg8bit/val/frankfurt/frankfurt_000001_065850_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_065850_gtFine_labelIds.png 229 | leftImg8bit/val/frankfurt/frankfurt_000001_066092_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_066092_gtFine_labelIds.png 230 | leftImg8bit/val/frankfurt/frankfurt_000001_066438_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_066438_gtFine_labelIds.png 231 | leftImg8bit/val/frankfurt/frankfurt_000001_066574_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_066574_gtFine_labelIds.png 232 | leftImg8bit/val/frankfurt/frankfurt_000001_066832_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_066832_gtFine_labelIds.png 233 | leftImg8bit/val/frankfurt/frankfurt_000001_067092_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_067092_gtFine_labelIds.png 234 | leftImg8bit/val/frankfurt/frankfurt_000001_067178_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_067178_gtFine_labelIds.png 235 | leftImg8bit/val/frankfurt/frankfurt_000001_067295_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_067295_gtFine_labelIds.png 236 | leftImg8bit/val/frankfurt/frankfurt_000001_067474_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_067474_gtFine_labelIds.png 237 | leftImg8bit/val/frankfurt/frankfurt_000001_067735_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_067735_gtFine_labelIds.png 238 | leftImg8bit/val/frankfurt/frankfurt_000001_068063_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_068063_gtFine_labelIds.png 239 | leftImg8bit/val/frankfurt/frankfurt_000001_068208_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_068208_gtFine_labelIds.png 240 | leftImg8bit/val/frankfurt/frankfurt_000001_068682_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_068682_gtFine_labelIds.png 241 | leftImg8bit/val/frankfurt/frankfurt_000001_068772_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_068772_gtFine_labelIds.png 242 | leftImg8bit/val/frankfurt/frankfurt_000001_069633_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_069633_gtFine_labelIds.png 243 | leftImg8bit/val/frankfurt/frankfurt_000001_070099_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_070099_gtFine_labelIds.png 244 | leftImg8bit/val/frankfurt/frankfurt_000001_071288_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_071288_gtFine_labelIds.png 245 | leftImg8bit/val/frankfurt/frankfurt_000001_071781_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_071781_gtFine_labelIds.png 246 | leftImg8bit/val/frankfurt/frankfurt_000001_072155_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_072155_gtFine_labelIds.png 247 | leftImg8bit/val/frankfurt/frankfurt_000001_072295_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_072295_gtFine_labelIds.png 248 | leftImg8bit/val/frankfurt/frankfurt_000001_073088_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_073088_gtFine_labelIds.png 249 | leftImg8bit/val/frankfurt/frankfurt_000001_073243_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_073243_gtFine_labelIds.png 250 | leftImg8bit/val/frankfurt/frankfurt_000001_073464_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_073464_gtFine_labelIds.png 251 | leftImg8bit/val/frankfurt/frankfurt_000001_073911_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_073911_gtFine_labelIds.png 252 | leftImg8bit/val/frankfurt/frankfurt_000001_075296_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_075296_gtFine_labelIds.png 253 | leftImg8bit/val/frankfurt/frankfurt_000001_075984_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_075984_gtFine_labelIds.png 254 | leftImg8bit/val/frankfurt/frankfurt_000001_076502_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_076502_gtFine_labelIds.png 255 | leftImg8bit/val/frankfurt/frankfurt_000001_077092_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_077092_gtFine_labelIds.png 256 | leftImg8bit/val/frankfurt/frankfurt_000001_077233_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_077233_gtFine_labelIds.png 257 | leftImg8bit/val/frankfurt/frankfurt_000001_077434_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_077434_gtFine_labelIds.png 258 | leftImg8bit/val/frankfurt/frankfurt_000001_078803_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_078803_gtFine_labelIds.png 259 | leftImg8bit/val/frankfurt/frankfurt_000001_079206_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_079206_gtFine_labelIds.png 260 | leftImg8bit/val/frankfurt/frankfurt_000001_080091_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_080091_gtFine_labelIds.png 261 | leftImg8bit/val/frankfurt/frankfurt_000001_080391_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_080391_gtFine_labelIds.png 262 | leftImg8bit/val/frankfurt/frankfurt_000001_080830_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_080830_gtFine_labelIds.png 263 | leftImg8bit/val/frankfurt/frankfurt_000001_082087_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_082087_gtFine_labelIds.png 264 | leftImg8bit/val/frankfurt/frankfurt_000001_082466_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_082466_gtFine_labelIds.png 265 | leftImg8bit/val/frankfurt/frankfurt_000001_083029_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_083029_gtFine_labelIds.png 266 | leftImg8bit/val/frankfurt/frankfurt_000001_083199_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_083199_gtFine_labelIds.png 267 | leftImg8bit/val/frankfurt/frankfurt_000001_083852_leftImg8bit.png gtFine/val/frankfurt/frankfurt_000001_083852_gtFine_labelIds.png 268 | leftImg8bit/val/lindau/lindau_000000_000019_leftImg8bit.png gtFine/val/lindau/lindau_000000_000019_gtFine_labelIds.png 269 | leftImg8bit/val/lindau/lindau_000001_000019_leftImg8bit.png gtFine/val/lindau/lindau_000001_000019_gtFine_labelIds.png 270 | leftImg8bit/val/lindau/lindau_000002_000019_leftImg8bit.png gtFine/val/lindau/lindau_000002_000019_gtFine_labelIds.png 271 | leftImg8bit/val/lindau/lindau_000003_000019_leftImg8bit.png gtFine/val/lindau/lindau_000003_000019_gtFine_labelIds.png 272 | leftImg8bit/val/lindau/lindau_000004_000019_leftImg8bit.png gtFine/val/lindau/lindau_000004_000019_gtFine_labelIds.png 273 | leftImg8bit/val/lindau/lindau_000005_000019_leftImg8bit.png gtFine/val/lindau/lindau_000005_000019_gtFine_labelIds.png 274 | leftImg8bit/val/lindau/lindau_000006_000019_leftImg8bit.png gtFine/val/lindau/lindau_000006_000019_gtFine_labelIds.png 275 | leftImg8bit/val/lindau/lindau_000007_000019_leftImg8bit.png gtFine/val/lindau/lindau_000007_000019_gtFine_labelIds.png 276 | leftImg8bit/val/lindau/lindau_000008_000019_leftImg8bit.png gtFine/val/lindau/lindau_000008_000019_gtFine_labelIds.png 277 | leftImg8bit/val/lindau/lindau_000009_000019_leftImg8bit.png gtFine/val/lindau/lindau_000009_000019_gtFine_labelIds.png 278 | leftImg8bit/val/lindau/lindau_000010_000019_leftImg8bit.png gtFine/val/lindau/lindau_000010_000019_gtFine_labelIds.png 279 | leftImg8bit/val/lindau/lindau_000011_000019_leftImg8bit.png gtFine/val/lindau/lindau_000011_000019_gtFine_labelIds.png 280 | leftImg8bit/val/lindau/lindau_000012_000019_leftImg8bit.png gtFine/val/lindau/lindau_000012_000019_gtFine_labelIds.png 281 | leftImg8bit/val/lindau/lindau_000013_000019_leftImg8bit.png gtFine/val/lindau/lindau_000013_000019_gtFine_labelIds.png 282 | leftImg8bit/val/lindau/lindau_000014_000019_leftImg8bit.png gtFine/val/lindau/lindau_000014_000019_gtFine_labelIds.png 283 | leftImg8bit/val/lindau/lindau_000015_000019_leftImg8bit.png gtFine/val/lindau/lindau_000015_000019_gtFine_labelIds.png 284 | leftImg8bit/val/lindau/lindau_000016_000019_leftImg8bit.png gtFine/val/lindau/lindau_000016_000019_gtFine_labelIds.png 285 | leftImg8bit/val/lindau/lindau_000017_000019_leftImg8bit.png gtFine/val/lindau/lindau_000017_000019_gtFine_labelIds.png 286 | leftImg8bit/val/lindau/lindau_000018_000019_leftImg8bit.png gtFine/val/lindau/lindau_000018_000019_gtFine_labelIds.png 287 | leftImg8bit/val/lindau/lindau_000019_000019_leftImg8bit.png gtFine/val/lindau/lindau_000019_000019_gtFine_labelIds.png 288 | leftImg8bit/val/lindau/lindau_000020_000019_leftImg8bit.png gtFine/val/lindau/lindau_000020_000019_gtFine_labelIds.png 289 | leftImg8bit/val/lindau/lindau_000021_000019_leftImg8bit.png gtFine/val/lindau/lindau_000021_000019_gtFine_labelIds.png 290 | leftImg8bit/val/lindau/lindau_000022_000019_leftImg8bit.png gtFine/val/lindau/lindau_000022_000019_gtFine_labelIds.png 291 | leftImg8bit/val/lindau/lindau_000023_000019_leftImg8bit.png gtFine/val/lindau/lindau_000023_000019_gtFine_labelIds.png 292 | leftImg8bit/val/lindau/lindau_000024_000019_leftImg8bit.png gtFine/val/lindau/lindau_000024_000019_gtFine_labelIds.png 293 | leftImg8bit/val/lindau/lindau_000025_000019_leftImg8bit.png gtFine/val/lindau/lindau_000025_000019_gtFine_labelIds.png 294 | leftImg8bit/val/lindau/lindau_000026_000019_leftImg8bit.png gtFine/val/lindau/lindau_000026_000019_gtFine_labelIds.png 295 | leftImg8bit/val/lindau/lindau_000027_000019_leftImg8bit.png gtFine/val/lindau/lindau_000027_000019_gtFine_labelIds.png 296 | leftImg8bit/val/lindau/lindau_000028_000019_leftImg8bit.png gtFine/val/lindau/lindau_000028_000019_gtFine_labelIds.png 297 | leftImg8bit/val/lindau/lindau_000029_000019_leftImg8bit.png gtFine/val/lindau/lindau_000029_000019_gtFine_labelIds.png 298 | leftImg8bit/val/lindau/lindau_000030_000019_leftImg8bit.png gtFine/val/lindau/lindau_000030_000019_gtFine_labelIds.png 299 | leftImg8bit/val/lindau/lindau_000031_000019_leftImg8bit.png gtFine/val/lindau/lindau_000031_000019_gtFine_labelIds.png 300 | leftImg8bit/val/lindau/lindau_000032_000019_leftImg8bit.png gtFine/val/lindau/lindau_000032_000019_gtFine_labelIds.png 301 | leftImg8bit/val/lindau/lindau_000033_000019_leftImg8bit.png gtFine/val/lindau/lindau_000033_000019_gtFine_labelIds.png 302 | leftImg8bit/val/lindau/lindau_000034_000019_leftImg8bit.png gtFine/val/lindau/lindau_000034_000019_gtFine_labelIds.png 303 | leftImg8bit/val/lindau/lindau_000035_000019_leftImg8bit.png gtFine/val/lindau/lindau_000035_000019_gtFine_labelIds.png 304 | leftImg8bit/val/lindau/lindau_000036_000019_leftImg8bit.png gtFine/val/lindau/lindau_000036_000019_gtFine_labelIds.png 305 | leftImg8bit/val/lindau/lindau_000037_000019_leftImg8bit.png gtFine/val/lindau/lindau_000037_000019_gtFine_labelIds.png 306 | leftImg8bit/val/lindau/lindau_000038_000019_leftImg8bit.png gtFine/val/lindau/lindau_000038_000019_gtFine_labelIds.png 307 | leftImg8bit/val/lindau/lindau_000039_000019_leftImg8bit.png gtFine/val/lindau/lindau_000039_000019_gtFine_labelIds.png 308 | leftImg8bit/val/lindau/lindau_000040_000019_leftImg8bit.png gtFine/val/lindau/lindau_000040_000019_gtFine_labelIds.png 309 | leftImg8bit/val/lindau/lindau_000041_000019_leftImg8bit.png gtFine/val/lindau/lindau_000041_000019_gtFine_labelIds.png 310 | leftImg8bit/val/lindau/lindau_000042_000019_leftImg8bit.png gtFine/val/lindau/lindau_000042_000019_gtFine_labelIds.png 311 | leftImg8bit/val/lindau/lindau_000043_000019_leftImg8bit.png gtFine/val/lindau/lindau_000043_000019_gtFine_labelIds.png 312 | leftImg8bit/val/lindau/lindau_000044_000019_leftImg8bit.png gtFine/val/lindau/lindau_000044_000019_gtFine_labelIds.png 313 | leftImg8bit/val/lindau/lindau_000045_000019_leftImg8bit.png gtFine/val/lindau/lindau_000045_000019_gtFine_labelIds.png 314 | leftImg8bit/val/lindau/lindau_000046_000019_leftImg8bit.png gtFine/val/lindau/lindau_000046_000019_gtFine_labelIds.png 315 | leftImg8bit/val/lindau/lindau_000047_000019_leftImg8bit.png gtFine/val/lindau/lindau_000047_000019_gtFine_labelIds.png 316 | leftImg8bit/val/lindau/lindau_000048_000019_leftImg8bit.png gtFine/val/lindau/lindau_000048_000019_gtFine_labelIds.png 317 | leftImg8bit/val/lindau/lindau_000049_000019_leftImg8bit.png gtFine/val/lindau/lindau_000049_000019_gtFine_labelIds.png 318 | leftImg8bit/val/lindau/lindau_000050_000019_leftImg8bit.png gtFine/val/lindau/lindau_000050_000019_gtFine_labelIds.png 319 | leftImg8bit/val/lindau/lindau_000051_000019_leftImg8bit.png gtFine/val/lindau/lindau_000051_000019_gtFine_labelIds.png 320 | leftImg8bit/val/lindau/lindau_000052_000019_leftImg8bit.png gtFine/val/lindau/lindau_000052_000019_gtFine_labelIds.png 321 | leftImg8bit/val/lindau/lindau_000053_000019_leftImg8bit.png gtFine/val/lindau/lindau_000053_000019_gtFine_labelIds.png 322 | leftImg8bit/val/lindau/lindau_000054_000019_leftImg8bit.png gtFine/val/lindau/lindau_000054_000019_gtFine_labelIds.png 323 | leftImg8bit/val/lindau/lindau_000055_000019_leftImg8bit.png gtFine/val/lindau/lindau_000055_000019_gtFine_labelIds.png 324 | leftImg8bit/val/lindau/lindau_000056_000019_leftImg8bit.png gtFine/val/lindau/lindau_000056_000019_gtFine_labelIds.png 325 | leftImg8bit/val/lindau/lindau_000057_000019_leftImg8bit.png gtFine/val/lindau/lindau_000057_000019_gtFine_labelIds.png 326 | leftImg8bit/val/lindau/lindau_000058_000019_leftImg8bit.png gtFine/val/lindau/lindau_000058_000019_gtFine_labelIds.png 327 | leftImg8bit/val/munster/munster_000000_000019_leftImg8bit.png gtFine/val/munster/munster_000000_000019_gtFine_labelIds.png 328 | leftImg8bit/val/munster/munster_000001_000019_leftImg8bit.png gtFine/val/munster/munster_000001_000019_gtFine_labelIds.png 329 | leftImg8bit/val/munster/munster_000002_000019_leftImg8bit.png gtFine/val/munster/munster_000002_000019_gtFine_labelIds.png 330 | leftImg8bit/val/munster/munster_000003_000019_leftImg8bit.png gtFine/val/munster/munster_000003_000019_gtFine_labelIds.png 331 | leftImg8bit/val/munster/munster_000004_000019_leftImg8bit.png gtFine/val/munster/munster_000004_000019_gtFine_labelIds.png 332 | leftImg8bit/val/munster/munster_000005_000019_leftImg8bit.png gtFine/val/munster/munster_000005_000019_gtFine_labelIds.png 333 | leftImg8bit/val/munster/munster_000006_000019_leftImg8bit.png gtFine/val/munster/munster_000006_000019_gtFine_labelIds.png 334 | leftImg8bit/val/munster/munster_000007_000019_leftImg8bit.png gtFine/val/munster/munster_000007_000019_gtFine_labelIds.png 335 | leftImg8bit/val/munster/munster_000008_000019_leftImg8bit.png gtFine/val/munster/munster_000008_000019_gtFine_labelIds.png 336 | leftImg8bit/val/munster/munster_000009_000019_leftImg8bit.png gtFine/val/munster/munster_000009_000019_gtFine_labelIds.png 337 | leftImg8bit/val/munster/munster_000010_000019_leftImg8bit.png gtFine/val/munster/munster_000010_000019_gtFine_labelIds.png 338 | leftImg8bit/val/munster/munster_000011_000019_leftImg8bit.png gtFine/val/munster/munster_000011_000019_gtFine_labelIds.png 339 | leftImg8bit/val/munster/munster_000012_000019_leftImg8bit.png gtFine/val/munster/munster_000012_000019_gtFine_labelIds.png 340 | leftImg8bit/val/munster/munster_000013_000019_leftImg8bit.png gtFine/val/munster/munster_000013_000019_gtFine_labelIds.png 341 | leftImg8bit/val/munster/munster_000014_000019_leftImg8bit.png gtFine/val/munster/munster_000014_000019_gtFine_labelIds.png 342 | leftImg8bit/val/munster/munster_000015_000019_leftImg8bit.png gtFine/val/munster/munster_000015_000019_gtFine_labelIds.png 343 | leftImg8bit/val/munster/munster_000016_000019_leftImg8bit.png gtFine/val/munster/munster_000016_000019_gtFine_labelIds.png 344 | leftImg8bit/val/munster/munster_000017_000019_leftImg8bit.png gtFine/val/munster/munster_000017_000019_gtFine_labelIds.png 345 | leftImg8bit/val/munster/munster_000018_000019_leftImg8bit.png gtFine/val/munster/munster_000018_000019_gtFine_labelIds.png 346 | leftImg8bit/val/munster/munster_000019_000019_leftImg8bit.png gtFine/val/munster/munster_000019_000019_gtFine_labelIds.png 347 | leftImg8bit/val/munster/munster_000020_000019_leftImg8bit.png gtFine/val/munster/munster_000020_000019_gtFine_labelIds.png 348 | leftImg8bit/val/munster/munster_000021_000019_leftImg8bit.png gtFine/val/munster/munster_000021_000019_gtFine_labelIds.png 349 | leftImg8bit/val/munster/munster_000022_000019_leftImg8bit.png gtFine/val/munster/munster_000022_000019_gtFine_labelIds.png 350 | leftImg8bit/val/munster/munster_000023_000019_leftImg8bit.png gtFine/val/munster/munster_000023_000019_gtFine_labelIds.png 351 | leftImg8bit/val/munster/munster_000024_000019_leftImg8bit.png gtFine/val/munster/munster_000024_000019_gtFine_labelIds.png 352 | leftImg8bit/val/munster/munster_000025_000019_leftImg8bit.png gtFine/val/munster/munster_000025_000019_gtFine_labelIds.png 353 | leftImg8bit/val/munster/munster_000026_000019_leftImg8bit.png gtFine/val/munster/munster_000026_000019_gtFine_labelIds.png 354 | leftImg8bit/val/munster/munster_000027_000019_leftImg8bit.png gtFine/val/munster/munster_000027_000019_gtFine_labelIds.png 355 | leftImg8bit/val/munster/munster_000028_000019_leftImg8bit.png gtFine/val/munster/munster_000028_000019_gtFine_labelIds.png 356 | leftImg8bit/val/munster/munster_000029_000019_leftImg8bit.png gtFine/val/munster/munster_000029_000019_gtFine_labelIds.png 357 | leftImg8bit/val/munster/munster_000030_000019_leftImg8bit.png gtFine/val/munster/munster_000030_000019_gtFine_labelIds.png 358 | leftImg8bit/val/munster/munster_000031_000019_leftImg8bit.png gtFine/val/munster/munster_000031_000019_gtFine_labelIds.png 359 | leftImg8bit/val/munster/munster_000032_000019_leftImg8bit.png gtFine/val/munster/munster_000032_000019_gtFine_labelIds.png 360 | leftImg8bit/val/munster/munster_000033_000019_leftImg8bit.png gtFine/val/munster/munster_000033_000019_gtFine_labelIds.png 361 | leftImg8bit/val/munster/munster_000034_000019_leftImg8bit.png gtFine/val/munster/munster_000034_000019_gtFine_labelIds.png 362 | leftImg8bit/val/munster/munster_000035_000019_leftImg8bit.png gtFine/val/munster/munster_000035_000019_gtFine_labelIds.png 363 | leftImg8bit/val/munster/munster_000036_000019_leftImg8bit.png gtFine/val/munster/munster_000036_000019_gtFine_labelIds.png 364 | leftImg8bit/val/munster/munster_000037_000019_leftImg8bit.png gtFine/val/munster/munster_000037_000019_gtFine_labelIds.png 365 | leftImg8bit/val/munster/munster_000038_000019_leftImg8bit.png gtFine/val/munster/munster_000038_000019_gtFine_labelIds.png 366 | leftImg8bit/val/munster/munster_000039_000019_leftImg8bit.png gtFine/val/munster/munster_000039_000019_gtFine_labelIds.png 367 | leftImg8bit/val/munster/munster_000040_000019_leftImg8bit.png gtFine/val/munster/munster_000040_000019_gtFine_labelIds.png 368 | leftImg8bit/val/munster/munster_000041_000019_leftImg8bit.png gtFine/val/munster/munster_000041_000019_gtFine_labelIds.png 369 | leftImg8bit/val/munster/munster_000042_000019_leftImg8bit.png gtFine/val/munster/munster_000042_000019_gtFine_labelIds.png 370 | leftImg8bit/val/munster/munster_000043_000019_leftImg8bit.png gtFine/val/munster/munster_000043_000019_gtFine_labelIds.png 371 | leftImg8bit/val/munster/munster_000044_000019_leftImg8bit.png gtFine/val/munster/munster_000044_000019_gtFine_labelIds.png 372 | leftImg8bit/val/munster/munster_000045_000019_leftImg8bit.png gtFine/val/munster/munster_000045_000019_gtFine_labelIds.png 373 | leftImg8bit/val/munster/munster_000046_000019_leftImg8bit.png gtFine/val/munster/munster_000046_000019_gtFine_labelIds.png 374 | leftImg8bit/val/munster/munster_000047_000019_leftImg8bit.png gtFine/val/munster/munster_000047_000019_gtFine_labelIds.png 375 | leftImg8bit/val/munster/munster_000048_000019_leftImg8bit.png gtFine/val/munster/munster_000048_000019_gtFine_labelIds.png 376 | leftImg8bit/val/munster/munster_000049_000019_leftImg8bit.png gtFine/val/munster/munster_000049_000019_gtFine_labelIds.png 377 | leftImg8bit/val/munster/munster_000050_000019_leftImg8bit.png gtFine/val/munster/munster_000050_000019_gtFine_labelIds.png 378 | leftImg8bit/val/munster/munster_000051_000019_leftImg8bit.png gtFine/val/munster/munster_000051_000019_gtFine_labelIds.png 379 | leftImg8bit/val/munster/munster_000052_000019_leftImg8bit.png gtFine/val/munster/munster_000052_000019_gtFine_labelIds.png 380 | leftImg8bit/val/munster/munster_000053_000019_leftImg8bit.png gtFine/val/munster/munster_000053_000019_gtFine_labelIds.png 381 | leftImg8bit/val/munster/munster_000054_000019_leftImg8bit.png gtFine/val/munster/munster_000054_000019_gtFine_labelIds.png 382 | leftImg8bit/val/munster/munster_000055_000019_leftImg8bit.png gtFine/val/munster/munster_000055_000019_gtFine_labelIds.png 383 | leftImg8bit/val/munster/munster_000056_000019_leftImg8bit.png gtFine/val/munster/munster_000056_000019_gtFine_labelIds.png 384 | leftImg8bit/val/munster/munster_000057_000019_leftImg8bit.png gtFine/val/munster/munster_000057_000019_gtFine_labelIds.png 385 | leftImg8bit/val/munster/munster_000058_000019_leftImg8bit.png gtFine/val/munster/munster_000058_000019_gtFine_labelIds.png 386 | leftImg8bit/val/munster/munster_000059_000019_leftImg8bit.png gtFine/val/munster/munster_000059_000019_gtFine_labelIds.png 387 | leftImg8bit/val/munster/munster_000060_000019_leftImg8bit.png gtFine/val/munster/munster_000060_000019_gtFine_labelIds.png 388 | leftImg8bit/val/munster/munster_000061_000019_leftImg8bit.png gtFine/val/munster/munster_000061_000019_gtFine_labelIds.png 389 | leftImg8bit/val/munster/munster_000062_000019_leftImg8bit.png gtFine/val/munster/munster_000062_000019_gtFine_labelIds.png 390 | leftImg8bit/val/munster/munster_000063_000019_leftImg8bit.png gtFine/val/munster/munster_000063_000019_gtFine_labelIds.png 391 | leftImg8bit/val/munster/munster_000064_000019_leftImg8bit.png gtFine/val/munster/munster_000064_000019_gtFine_labelIds.png 392 | leftImg8bit/val/munster/munster_000065_000019_leftImg8bit.png gtFine/val/munster/munster_000065_000019_gtFine_labelIds.png 393 | leftImg8bit/val/munster/munster_000066_000019_leftImg8bit.png gtFine/val/munster/munster_000066_000019_gtFine_labelIds.png 394 | leftImg8bit/val/munster/munster_000067_000019_leftImg8bit.png gtFine/val/munster/munster_000067_000019_gtFine_labelIds.png 395 | leftImg8bit/val/munster/munster_000068_000019_leftImg8bit.png gtFine/val/munster/munster_000068_000019_gtFine_labelIds.png 396 | leftImg8bit/val/munster/munster_000069_000019_leftImg8bit.png gtFine/val/munster/munster_000069_000019_gtFine_labelIds.png 397 | leftImg8bit/val/munster/munster_000070_000019_leftImg8bit.png gtFine/val/munster/munster_000070_000019_gtFine_labelIds.png 398 | leftImg8bit/val/munster/munster_000071_000019_leftImg8bit.png gtFine/val/munster/munster_000071_000019_gtFine_labelIds.png 399 | leftImg8bit/val/munster/munster_000072_000019_leftImg8bit.png gtFine/val/munster/munster_000072_000019_gtFine_labelIds.png 400 | leftImg8bit/val/munster/munster_000073_000019_leftImg8bit.png gtFine/val/munster/munster_000073_000019_gtFine_labelIds.png 401 | leftImg8bit/val/munster/munster_000074_000019_leftImg8bit.png gtFine/val/munster/munster_000074_000019_gtFine_labelIds.png 402 | leftImg8bit/val/munster/munster_000075_000019_leftImg8bit.png gtFine/val/munster/munster_000075_000019_gtFine_labelIds.png 403 | leftImg8bit/val/munster/munster_000076_000019_leftImg8bit.png gtFine/val/munster/munster_000076_000019_gtFine_labelIds.png 404 | leftImg8bit/val/munster/munster_000077_000019_leftImg8bit.png gtFine/val/munster/munster_000077_000019_gtFine_labelIds.png 405 | leftImg8bit/val/munster/munster_000078_000019_leftImg8bit.png gtFine/val/munster/munster_000078_000019_gtFine_labelIds.png 406 | leftImg8bit/val/munster/munster_000079_000019_leftImg8bit.png gtFine/val/munster/munster_000079_000019_gtFine_labelIds.png 407 | leftImg8bit/val/munster/munster_000080_000019_leftImg8bit.png gtFine/val/munster/munster_000080_000019_gtFine_labelIds.png 408 | leftImg8bit/val/munster/munster_000081_000019_leftImg8bit.png gtFine/val/munster/munster_000081_000019_gtFine_labelIds.png 409 | leftImg8bit/val/munster/munster_000082_000019_leftImg8bit.png gtFine/val/munster/munster_000082_000019_gtFine_labelIds.png 410 | leftImg8bit/val/munster/munster_000083_000019_leftImg8bit.png gtFine/val/munster/munster_000083_000019_gtFine_labelIds.png 411 | leftImg8bit/val/munster/munster_000084_000019_leftImg8bit.png gtFine/val/munster/munster_000084_000019_gtFine_labelIds.png 412 | leftImg8bit/val/munster/munster_000085_000019_leftImg8bit.png gtFine/val/munster/munster_000085_000019_gtFine_labelIds.png 413 | leftImg8bit/val/munster/munster_000086_000019_leftImg8bit.png gtFine/val/munster/munster_000086_000019_gtFine_labelIds.png 414 | leftImg8bit/val/munster/munster_000087_000019_leftImg8bit.png gtFine/val/munster/munster_000087_000019_gtFine_labelIds.png 415 | leftImg8bit/val/munster/munster_000088_000019_leftImg8bit.png gtFine/val/munster/munster_000088_000019_gtFine_labelIds.png 416 | leftImg8bit/val/munster/munster_000089_000019_leftImg8bit.png gtFine/val/munster/munster_000089_000019_gtFine_labelIds.png 417 | leftImg8bit/val/munster/munster_000090_000019_leftImg8bit.png gtFine/val/munster/munster_000090_000019_gtFine_labelIds.png 418 | leftImg8bit/val/munster/munster_000091_000019_leftImg8bit.png gtFine/val/munster/munster_000091_000019_gtFine_labelIds.png 419 | leftImg8bit/val/munster/munster_000092_000019_leftImg8bit.png gtFine/val/munster/munster_000092_000019_gtFine_labelIds.png 420 | leftImg8bit/val/munster/munster_000093_000019_leftImg8bit.png gtFine/val/munster/munster_000093_000019_gtFine_labelIds.png 421 | leftImg8bit/val/munster/munster_000094_000019_leftImg8bit.png gtFine/val/munster/munster_000094_000019_gtFine_labelIds.png 422 | leftImg8bit/val/munster/munster_000095_000019_leftImg8bit.png gtFine/val/munster/munster_000095_000019_gtFine_labelIds.png 423 | leftImg8bit/val/munster/munster_000096_000019_leftImg8bit.png gtFine/val/munster/munster_000096_000019_gtFine_labelIds.png 424 | leftImg8bit/val/munster/munster_000097_000019_leftImg8bit.png gtFine/val/munster/munster_000097_000019_gtFine_labelIds.png 425 | leftImg8bit/val/munster/munster_000098_000019_leftImg8bit.png gtFine/val/munster/munster_000098_000019_gtFine_labelIds.png 426 | leftImg8bit/val/munster/munster_000099_000019_leftImg8bit.png gtFine/val/munster/munster_000099_000019_gtFine_labelIds.png 427 | leftImg8bit/val/munster/munster_000100_000019_leftImg8bit.png gtFine/val/munster/munster_000100_000019_gtFine_labelIds.png 428 | leftImg8bit/val/munster/munster_000101_000019_leftImg8bit.png gtFine/val/munster/munster_000101_000019_gtFine_labelIds.png 429 | leftImg8bit/val/munster/munster_000102_000019_leftImg8bit.png gtFine/val/munster/munster_000102_000019_gtFine_labelIds.png 430 | leftImg8bit/val/munster/munster_000103_000019_leftImg8bit.png gtFine/val/munster/munster_000103_000019_gtFine_labelIds.png 431 | leftImg8bit/val/munster/munster_000104_000019_leftImg8bit.png gtFine/val/munster/munster_000104_000019_gtFine_labelIds.png 432 | leftImg8bit/val/munster/munster_000105_000019_leftImg8bit.png gtFine/val/munster/munster_000105_000019_gtFine_labelIds.png 433 | leftImg8bit/val/munster/munster_000106_000019_leftImg8bit.png gtFine/val/munster/munster_000106_000019_gtFine_labelIds.png 434 | leftImg8bit/val/munster/munster_000107_000019_leftImg8bit.png gtFine/val/munster/munster_000107_000019_gtFine_labelIds.png 435 | leftImg8bit/val/munster/munster_000108_000019_leftImg8bit.png gtFine/val/munster/munster_000108_000019_gtFine_labelIds.png 436 | leftImg8bit/val/munster/munster_000109_000019_leftImg8bit.png gtFine/val/munster/munster_000109_000019_gtFine_labelIds.png 437 | leftImg8bit/val/munster/munster_000110_000019_leftImg8bit.png gtFine/val/munster/munster_000110_000019_gtFine_labelIds.png 438 | leftImg8bit/val/munster/munster_000111_000019_leftImg8bit.png gtFine/val/munster/munster_000111_000019_gtFine_labelIds.png 439 | leftImg8bit/val/munster/munster_000112_000019_leftImg8bit.png gtFine/val/munster/munster_000112_000019_gtFine_labelIds.png 440 | leftImg8bit/val/munster/munster_000113_000019_leftImg8bit.png gtFine/val/munster/munster_000113_000019_gtFine_labelIds.png 441 | leftImg8bit/val/munster/munster_000114_000019_leftImg8bit.png gtFine/val/munster/munster_000114_000019_gtFine_labelIds.png 442 | leftImg8bit/val/munster/munster_000115_000019_leftImg8bit.png gtFine/val/munster/munster_000115_000019_gtFine_labelIds.png 443 | leftImg8bit/val/munster/munster_000116_000019_leftImg8bit.png gtFine/val/munster/munster_000116_000019_gtFine_labelIds.png 444 | leftImg8bit/val/munster/munster_000117_000019_leftImg8bit.png gtFine/val/munster/munster_000117_000019_gtFine_labelIds.png 445 | leftImg8bit/val/munster/munster_000118_000019_leftImg8bit.png gtFine/val/munster/munster_000118_000019_gtFine_labelIds.png 446 | leftImg8bit/val/munster/munster_000119_000019_leftImg8bit.png gtFine/val/munster/munster_000119_000019_gtFine_labelIds.png 447 | leftImg8bit/val/munster/munster_000120_000019_leftImg8bit.png gtFine/val/munster/munster_000120_000019_gtFine_labelIds.png 448 | leftImg8bit/val/munster/munster_000121_000019_leftImg8bit.png gtFine/val/munster/munster_000121_000019_gtFine_labelIds.png 449 | leftImg8bit/val/munster/munster_000122_000019_leftImg8bit.png gtFine/val/munster/munster_000122_000019_gtFine_labelIds.png 450 | leftImg8bit/val/munster/munster_000123_000019_leftImg8bit.png gtFine/val/munster/munster_000123_000019_gtFine_labelIds.png 451 | leftImg8bit/val/munster/munster_000124_000019_leftImg8bit.png gtFine/val/munster/munster_000124_000019_gtFine_labelIds.png 452 | leftImg8bit/val/munster/munster_000125_000019_leftImg8bit.png gtFine/val/munster/munster_000125_000019_gtFine_labelIds.png 453 | leftImg8bit/val/munster/munster_000126_000019_leftImg8bit.png gtFine/val/munster/munster_000126_000019_gtFine_labelIds.png 454 | leftImg8bit/val/munster/munster_000127_000019_leftImg8bit.png gtFine/val/munster/munster_000127_000019_gtFine_labelIds.png 455 | leftImg8bit/val/munster/munster_000128_000019_leftImg8bit.png gtFine/val/munster/munster_000128_000019_gtFine_labelIds.png 456 | leftImg8bit/val/munster/munster_000129_000019_leftImg8bit.png gtFine/val/munster/munster_000129_000019_gtFine_labelIds.png 457 | leftImg8bit/val/munster/munster_000130_000019_leftImg8bit.png gtFine/val/munster/munster_000130_000019_gtFine_labelIds.png 458 | leftImg8bit/val/munster/munster_000131_000019_leftImg8bit.png gtFine/val/munster/munster_000131_000019_gtFine_labelIds.png 459 | leftImg8bit/val/munster/munster_000132_000019_leftImg8bit.png gtFine/val/munster/munster_000132_000019_gtFine_labelIds.png 460 | leftImg8bit/val/munster/munster_000133_000019_leftImg8bit.png gtFine/val/munster/munster_000133_000019_gtFine_labelIds.png 461 | leftImg8bit/val/munster/munster_000134_000019_leftImg8bit.png gtFine/val/munster/munster_000134_000019_gtFine_labelIds.png 462 | leftImg8bit/val/munster/munster_000135_000019_leftImg8bit.png gtFine/val/munster/munster_000135_000019_gtFine_labelIds.png 463 | leftImg8bit/val/munster/munster_000136_000019_leftImg8bit.png gtFine/val/munster/munster_000136_000019_gtFine_labelIds.png 464 | leftImg8bit/val/munster/munster_000137_000019_leftImg8bit.png gtFine/val/munster/munster_000137_000019_gtFine_labelIds.png 465 | leftImg8bit/val/munster/munster_000138_000019_leftImg8bit.png gtFine/val/munster/munster_000138_000019_gtFine_labelIds.png 466 | leftImg8bit/val/munster/munster_000139_000019_leftImg8bit.png gtFine/val/munster/munster_000139_000019_gtFine_labelIds.png 467 | leftImg8bit/val/munster/munster_000140_000019_leftImg8bit.png gtFine/val/munster/munster_000140_000019_gtFine_labelIds.png 468 | leftImg8bit/val/munster/munster_000141_000019_leftImg8bit.png gtFine/val/munster/munster_000141_000019_gtFine_labelIds.png 469 | leftImg8bit/val/munster/munster_000142_000019_leftImg8bit.png gtFine/val/munster/munster_000142_000019_gtFine_labelIds.png 470 | leftImg8bit/val/munster/munster_000143_000019_leftImg8bit.png gtFine/val/munster/munster_000143_000019_gtFine_labelIds.png 471 | leftImg8bit/val/munster/munster_000144_000019_leftImg8bit.png gtFine/val/munster/munster_000144_000019_gtFine_labelIds.png 472 | leftImg8bit/val/munster/munster_000145_000019_leftImg8bit.png gtFine/val/munster/munster_000145_000019_gtFine_labelIds.png 473 | leftImg8bit/val/munster/munster_000146_000019_leftImg8bit.png gtFine/val/munster/munster_000146_000019_gtFine_labelIds.png 474 | leftImg8bit/val/munster/munster_000147_000019_leftImg8bit.png gtFine/val/munster/munster_000147_000019_gtFine_labelIds.png 475 | leftImg8bit/val/munster/munster_000148_000019_leftImg8bit.png gtFine/val/munster/munster_000148_000019_gtFine_labelIds.png 476 | leftImg8bit/val/munster/munster_000149_000019_leftImg8bit.png gtFine/val/munster/munster_000149_000019_gtFine_labelIds.png 477 | leftImg8bit/val/munster/munster_000150_000019_leftImg8bit.png gtFine/val/munster/munster_000150_000019_gtFine_labelIds.png 478 | leftImg8bit/val/munster/munster_000151_000019_leftImg8bit.png gtFine/val/munster/munster_000151_000019_gtFine_labelIds.png 479 | leftImg8bit/val/munster/munster_000152_000019_leftImg8bit.png gtFine/val/munster/munster_000152_000019_gtFine_labelIds.png 480 | leftImg8bit/val/munster/munster_000153_000019_leftImg8bit.png gtFine/val/munster/munster_000153_000019_gtFine_labelIds.png 481 | leftImg8bit/val/munster/munster_000154_000019_leftImg8bit.png gtFine/val/munster/munster_000154_000019_gtFine_labelIds.png 482 | leftImg8bit/val/munster/munster_000155_000019_leftImg8bit.png gtFine/val/munster/munster_000155_000019_gtFine_labelIds.png 483 | leftImg8bit/val/munster/munster_000156_000019_leftImg8bit.png gtFine/val/munster/munster_000156_000019_gtFine_labelIds.png 484 | leftImg8bit/val/munster/munster_000157_000019_leftImg8bit.png gtFine/val/munster/munster_000157_000019_gtFine_labelIds.png 485 | leftImg8bit/val/munster/munster_000158_000019_leftImg8bit.png gtFine/val/munster/munster_000158_000019_gtFine_labelIds.png 486 | leftImg8bit/val/munster/munster_000159_000019_leftImg8bit.png gtFine/val/munster/munster_000159_000019_gtFine_labelIds.png 487 | leftImg8bit/val/munster/munster_000160_000019_leftImg8bit.png gtFine/val/munster/munster_000160_000019_gtFine_labelIds.png 488 | leftImg8bit/val/munster/munster_000161_000019_leftImg8bit.png gtFine/val/munster/munster_000161_000019_gtFine_labelIds.png 489 | leftImg8bit/val/munster/munster_000162_000019_leftImg8bit.png gtFine/val/munster/munster_000162_000019_gtFine_labelIds.png 490 | leftImg8bit/val/munster/munster_000163_000019_leftImg8bit.png gtFine/val/munster/munster_000163_000019_gtFine_labelIds.png 491 | leftImg8bit/val/munster/munster_000164_000019_leftImg8bit.png gtFine/val/munster/munster_000164_000019_gtFine_labelIds.png 492 | leftImg8bit/val/munster/munster_000165_000019_leftImg8bit.png gtFine/val/munster/munster_000165_000019_gtFine_labelIds.png 493 | leftImg8bit/val/munster/munster_000166_000019_leftImg8bit.png gtFine/val/munster/munster_000166_000019_gtFine_labelIds.png 494 | leftImg8bit/val/munster/munster_000167_000019_leftImg8bit.png gtFine/val/munster/munster_000167_000019_gtFine_labelIds.png 495 | leftImg8bit/val/munster/munster_000168_000019_leftImg8bit.png gtFine/val/munster/munster_000168_000019_gtFine_labelIds.png 496 | leftImg8bit/val/munster/munster_000169_000019_leftImg8bit.png gtFine/val/munster/munster_000169_000019_gtFine_labelIds.png 497 | leftImg8bit/val/munster/munster_000170_000019_leftImg8bit.png gtFine/val/munster/munster_000170_000019_gtFine_labelIds.png 498 | leftImg8bit/val/munster/munster_000171_000019_leftImg8bit.png gtFine/val/munster/munster_000171_000019_gtFine_labelIds.png 499 | leftImg8bit/val/munster/munster_000172_000019_leftImg8bit.png gtFine/val/munster/munster_000172_000019_gtFine_labelIds.png 500 | leftImg8bit/val/munster/munster_000173_000019_leftImg8bit.png gtFine/val/munster/munster_000173_000019_gtFine_labelIds.png 501 | --------------------------------------------------------------------------------