├── ENet-segnet └── README.md ├── Enet_lapa_camvid_pytorch ├── datasets │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-35.pyc │ │ ├── __init__.cpython-37.pyc │ │ ├── camvid_loader.cpython-35.pyc │ │ ├── data_io.cpython-35.pyc │ │ ├── data_io.cpython-37.pyc │ │ ├── lapa_loader.cpython-35.pyc │ │ ├── npynpynpy_dataset.cpython-35.pyc │ │ ├── npynpynpy_dataset.cpython-37.pyc │ │ ├── npynpynpy_train.cpython-35.pyc │ │ ├── npynpynpy_train.cpython-37.pyc │ │ ├── npypngnpy_dataset.cpython-35.pyc │ │ ├── npypngnpy_dataset.cpython-37.pyc │ │ ├── pngpng_test.cpython-35.pyc │ │ ├── pngpng_test.cpython-37.pyc │ │ ├── pngpng_test_cv2.cpython-35.pyc │ │ ├── pngpng_test_cv2.cpython-37.pyc │ │ ├── pngpng_test_cv2_remap.cpython-35.pyc │ │ ├── pngpng_test_cv2_remap.cpython-37.pyc │ │ ├── pngpng_train.cpython-35.pyc │ │ ├── pngpngnpy_dataset.cpython-35.pyc │ │ ├── pngpngnpy_dataset.cpython-37.pyc │ │ ├── pngpngnpy_train.cpython-35.pyc │ │ ├── pngpngnpy_train.cpython-37.pyc │ │ ├── pngpngnpypng_train_1080.cpython-35.pyc │ │ ├── pngpngnpypng_train_1080.cpython-37.pyc │ │ ├── pngpngnpypng_train_1080_7.cpython-35.pyc │ │ ├── pngpngnpypng_train_1080_7.cpython-37.pyc │ │ ├── pngpngnpypng_train_600w.cpython-35.pyc │ │ ├── pngpngnpypng_train_600w.cpython-37.pyc │ │ ├── pngpngpngpng_train.cpython-35.pyc │ │ └── pngpngpngpng_train.cpython-37.pyc │ ├── camvid_loader.py │ ├── data_io.py │ ├── lapa_loader.py │ └── sample.png ├── main_test.py ├── main_train.py ├── models │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-35.pyc │ │ └── enet_ori.cpython-35.pyc │ └── enet_ori.py ├── test │ ├── camvid_test_model_297_8000 │ │ ├── checkpoint_297_0008000.tar │ │ ├── result_297_8000_down2 │ │ │ ├── 0001TP_008580_gt_color.png │ │ │ ├── 0001TP_008580_pre_color.png │ │ │ ├── 0001TP_008640_gt_color.png │ │ │ ├── 0001TP_008640_pre_color.png │ │ │ ├── 0001TP_008700_gt_color.png │ │ │ ├── 0001TP_008700_pre_color.png │ │ │ ├── 0001TP_009900_gt_color.png │ │ │ ├── 0001TP_009900_pre_color.png │ │ │ ├── 0001TP_009930_gt_color.png │ │ │ ├── 0001TP_009930_pre_color.png │ │ │ ├── 0001TP_009990_gt_color.png │ │ │ ├── 0001TP_009990_pre_color.png │ │ │ ├── 0001TP_010020_gt_color.png │ │ │ ├── 0001TP_010020_pre_color.png │ │ │ ├── 0001TP_010110_gt_color.png │ │ │ ├── 0001TP_010110_pre_color.png │ │ │ ├── 0001TP_010170_gt_color.png │ │ │ ├── 0001TP_010170_pre_color.png │ │ │ ├── 0001TP_010320_gt_color.png │ │ │ ├── 0001TP_010320_pre_color.png │ │ │ ├── 0001TP_010380_gt_color.png │ │ │ ├── 0001TP_010380_pre_color.png │ │ │ ├── Seq05VD_f00000_gt_color.png │ │ │ ├── Seq05VD_f00000_pre_color.png │ │ │ ├── Seq05VD_f00150_gt_color.png │ │ │ ├── Seq05VD_f00150_pre_color.png │ │ │ ├── Seq05VD_f00240_gt_color.png │ │ │ ├── Seq05VD_f00240_pre_color.png │ │ │ ├── Seq05VD_f00330_gt_color.png │ │ │ ├── Seq05VD_f00330_pre_color.png │ │ │ ├── Seq05VD_f00630_gt_color.png │ │ │ ├── Seq05VD_f00630_pre_color.png │ │ │ ├── Seq05VD_f00810_gt_color.png │ │ │ ├── Seq05VD_f00810_pre_color.png │ │ │ ├── Seq05VD_f00960_gt_color.png │ │ │ ├── Seq05VD_f00960_pre_color.png │ │ │ ├── Seq05VD_f01020_gt_color.png │ │ │ ├── Seq05VD_f01020_pre_color.png │ │ │ ├── Seq05VD_f01080_gt_color.png │ │ │ ├── Seq05VD_f01080_pre_color.png │ │ │ ├── Seq05VD_f01200_gt_color.png │ │ │ ├── Seq05VD_f01200_pre_color.png │ │ │ ├── Seq05VD_f01350_gt_color.png │ │ │ ├── Seq05VD_f01350_pre_color.png │ │ │ ├── Seq05VD_f01440_gt_color.png │ │ │ ├── Seq05VD_f01440_pre_color.png │ │ │ ├── Seq05VD_f01770_gt_color.png │ │ │ ├── Seq05VD_f01770_pre_color.png │ │ │ ├── Seq05VD_f01980_gt_color.png │ │ │ ├── Seq05VD_f01980_pre_color.png │ │ │ ├── Seq05VD_f02220_gt_color.png │ │ │ ├── Seq05VD_f02220_pre_color.png │ │ │ ├── Seq05VD_f02250_gt_color.png │ │ │ ├── Seq05VD_f02250_pre_color.png │ │ │ ├── Seq05VD_f02430_gt_color.png │ │ │ ├── Seq05VD_f02430_pre_color.png │ │ │ ├── Seq05VD_f02910_gt_color.png │ │ │ ├── Seq05VD_f02910_pre_color.png │ │ │ ├── Seq05VD_f03030_gt_color.png │ │ │ ├── Seq05VD_f03030_pre_color.png │ │ │ ├── Seq05VD_f03120_gt_color.png │ │ │ ├── Seq05VD_f03120_pre_color.png │ │ │ ├── Seq05VD_f03180_gt_color.png │ │ │ ├── Seq05VD_f03180_pre_color.png │ │ │ ├── Seq05VD_f03210_gt_color.png │ │ │ ├── Seq05VD_f03210_pre_color.png │ │ │ ├── Seq05VD_f03240_gt_color.png │ │ │ ├── Seq05VD_f03240_pre_color.png │ │ │ ├── Seq05VD_f03300_gt_color.png │ │ │ ├── Seq05VD_f03300_pre_color.png │ │ │ ├── Seq05VD_f03390_gt_color.png │ │ │ ├── Seq05VD_f03390_pre_color.png │ │ │ ├── Seq05VD_f03450_gt_color.png │ │ │ ├── Seq05VD_f03450_pre_color.png │ │ │ ├── Seq05VD_f03570_gt_color.png │ │ │ ├── Seq05VD_f03570_pre_color.png │ │ │ ├── Seq05VD_f03660_gt_color.png │ │ │ ├── Seq05VD_f03660_pre_color.png │ │ │ ├── Seq05VD_f03750_gt_color.png │ │ │ ├── Seq05VD_f03750_pre_color.png │ │ │ ├── Seq05VD_f03990_gt_color.png │ │ │ ├── Seq05VD_f03990_pre_color.png │ │ │ ├── Seq05VD_f04110_gt_color.png │ │ │ ├── Seq05VD_f04110_pre_color.png │ │ │ ├── Seq05VD_f04320_gt_color.png │ │ │ ├── Seq05VD_f04320_pre_color.png │ │ │ ├── Seq05VD_f04380_gt_color.png │ │ │ ├── Seq05VD_f04380_pre_color.png │ │ │ ├── Seq05VD_f04590_gt_color.png │ │ │ ├── Seq05VD_f04590_pre_color.png │ │ │ ├── Seq05VD_f04620_gt_color.png │ │ │ ├── Seq05VD_f04620_pre_color.png │ │ │ ├── Seq05VD_f04770_gt_color.png │ │ │ ├── Seq05VD_f04770_pre_color.png │ │ │ ├── Seq05VD_f04890_gt_color.png │ │ │ ├── Seq05VD_f04890_pre_color.png │ │ │ ├── Seq05VD_f05040_gt_color.png │ │ │ ├── Seq05VD_f05040_pre_color.png │ │ │ ├── Seq05VD_f05100_gt_color.png │ │ │ └── Seq05VD_f05100_pre_color.png │ │ └── result_297_8000_randomcrop │ │ │ ├── 0001TP_008580_gt_color.png │ │ │ ├── 0001TP_008580_pre_color.png │ │ │ ├── 0001TP_008640_gt_color.png │ │ │ ├── 0001TP_008640_pre_color.png │ │ │ ├── 0001TP_008700_gt_color.png │ │ │ ├── 0001TP_008700_pre_color.png │ │ │ ├── 0001TP_009900_gt_color.png │ │ │ ├── 0001TP_009900_pre_color.png │ │ │ ├── 0001TP_009930_gt_color.png │ │ │ ├── 0001TP_009930_pre_color.png │ │ │ ├── 0001TP_009990_gt_color.png │ │ │ ├── 0001TP_009990_pre_color.png │ │ │ ├── 0001TP_010020_gt_color.png │ │ │ ├── 0001TP_010020_pre_color.png │ │ │ ├── 0001TP_010110_gt_color.png │ │ │ ├── 0001TP_010110_pre_color.png │ │ │ ├── 0001TP_010170_gt_color.png │ │ │ ├── 0001TP_010170_pre_color.png │ │ │ ├── 0001TP_010320_gt_color.png │ │ │ ├── 0001TP_010320_pre_color.png │ │ │ ├── 0001TP_010380_gt_color.png │ │ │ ├── 0001TP_010380_pre_color.png │ │ │ ├── Seq05VD_f00000_gt_color.png │ │ │ ├── Seq05VD_f00000_pre_color.png │ │ │ ├── Seq05VD_f00150_gt_color.png │ │ │ ├── Seq05VD_f00150_pre_color.png │ │ │ ├── Seq05VD_f00240_gt_color.png │ │ │ ├── Seq05VD_f00240_pre_color.png │ │ │ ├── Seq05VD_f00330_gt_color.png │ │ │ ├── Seq05VD_f00330_pre_color.png │ │ │ ├── Seq05VD_f00630_gt_color.png │ │ │ ├── Seq05VD_f00630_pre_color.png │ │ │ ├── Seq05VD_f00810_gt_color.png │ │ │ ├── Seq05VD_f00810_pre_color.png │ │ │ ├── Seq05VD_f00960_gt_color.png │ │ │ ├── Seq05VD_f00960_pre_color.png │ │ │ ├── Seq05VD_f01020_gt_color.png │ │ │ ├── Seq05VD_f01020_pre_color.png │ │ │ ├── Seq05VD_f01080_gt_color.png │ │ │ ├── Seq05VD_f01080_pre_color.png │ │ │ ├── Seq05VD_f01200_gt_color.png │ │ │ ├── Seq05VD_f01200_pre_color.png │ │ │ ├── Seq05VD_f01350_gt_color.png │ │ │ ├── Seq05VD_f01350_pre_color.png │ │ │ ├── Seq05VD_f01440_gt_color.png │ │ │ ├── Seq05VD_f01440_pre_color.png │ │ │ ├── Seq05VD_f01770_gt_color.png │ │ │ ├── Seq05VD_f01770_pre_color.png │ │ │ ├── Seq05VD_f01980_gt_color.png │ │ │ ├── Seq05VD_f01980_pre_color.png │ │ │ ├── Seq05VD_f02220_gt_color.png │ │ │ ├── Seq05VD_f02220_pre_color.png │ │ │ ├── Seq05VD_f02250_gt_color.png │ │ │ ├── Seq05VD_f02250_pre_color.png │ │ │ ├── Seq05VD_f02430_gt_color.png │ │ │ ├── Seq05VD_f02430_pre_color.png │ │ │ ├── Seq05VD_f02910_gt_color.png │ │ │ ├── Seq05VD_f02910_pre_color.png │ │ │ ├── Seq05VD_f03030_gt_color.png │ │ │ ├── Seq05VD_f03030_pre_color.png │ │ │ ├── Seq05VD_f03120_gt_color.png │ │ │ ├── Seq05VD_f03120_pre_color.png │ │ │ ├── Seq05VD_f03180_gt_color.png │ │ │ ├── Seq05VD_f03180_pre_color.png │ │ │ ├── Seq05VD_f03210_gt_color.png │ │ │ ├── Seq05VD_f03210_pre_color.png │ │ │ ├── Seq05VD_f03240_gt_color.png │ │ │ ├── Seq05VD_f03240_pre_color.png │ │ │ ├── Seq05VD_f03300_gt_color.png │ │ │ ├── Seq05VD_f03300_pre_color.png │ │ │ ├── Seq05VD_f03390_gt_color.png │ │ │ ├── Seq05VD_f03390_pre_color.png │ │ │ ├── Seq05VD_f03450_gt_color.png │ │ │ ├── Seq05VD_f03450_pre_color.png │ │ │ ├── Seq05VD_f03570_gt_color.png │ │ │ ├── Seq05VD_f03570_pre_color.png │ │ │ ├── Seq05VD_f03660_gt_color.png │ │ │ ├── Seq05VD_f03660_pre_color.png │ │ │ ├── Seq05VD_f03750_gt_color.png │ │ │ ├── Seq05VD_f03750_pre_color.png │ │ │ ├── Seq05VD_f03990_gt_color.png │ │ │ ├── Seq05VD_f03990_pre_color.png │ │ │ ├── Seq05VD_f04110_gt_color.png │ │ │ ├── Seq05VD_f04110_pre_color.png │ │ │ ├── Seq05VD_f04320_gt_color.png │ │ │ ├── Seq05VD_f04320_pre_color.png │ │ │ ├── Seq05VD_f04380_gt_color.png │ │ │ ├── Seq05VD_f04380_pre_color.png │ │ │ ├── Seq05VD_f04590_gt_color.png │ │ │ ├── Seq05VD_f04590_pre_color.png │ │ │ ├── Seq05VD_f04620_gt_color.png │ │ │ ├── Seq05VD_f04620_pre_color.png │ │ │ ├── Seq05VD_f04770_gt_color.png │ │ │ ├── Seq05VD_f04770_pre_color.png │ │ │ ├── Seq05VD_f04890_gt_color.png │ │ │ ├── Seq05VD_f04890_pre_color.png │ │ │ ├── Seq05VD_f05040_gt_color.png │ │ │ ├── Seq05VD_f05040_pre_color.png │ │ │ ├── Seq05VD_f05100_gt_color.png │ │ │ └── Seq05VD_f05100_pre_color.png │ └── lapa_test_model_23_51000 │ │ ├── checkpoint_23_0051000.tar │ │ └── result │ │ ├── 10009865324_0_gt_color.png │ │ ├── 10009865324_0_pre_color.png │ │ ├── 10012551673_5_gt_color.png │ │ ├── 10012551673_5_pre_color.png │ │ ├── 10012551673_8_gt_color.png │ │ ├── 10012551673_8_pre_color.png │ │ ├── 10014368575_1_gt_color.png │ │ ├── 10014368575_1_pre_color.png │ │ ├── 10014990385_0_gt_color.png │ │ ├── 10014990385_0_pre_color.png │ │ ├── 10020518914_0_gt_color.png │ │ ├── 10020518914_0_pre_color.png │ │ ├── 10023065833_0_gt_color.png │ │ ├── 10023065833_0_pre_color.png │ │ ├── 10023946615_0_gt_color.png │ │ ├── 10023946615_0_pre_color.png │ │ ├── 10036490423_1_gt_color.png │ │ ├── 10036490423_1_pre_color.png │ │ ├── 10036529425_0_gt_color.png │ │ ├── 10036529425_0_pre_color.png │ │ ├── 10037648863_0_gt_color.png │ │ ├── 10037648863_0_pre_color.png │ │ ├── 10041043896_2_gt_color.png │ │ ├── 10041043896_2_pre_color.png │ │ ├── 10044526923_0_gt_color.png │ │ ├── 10044526923_0_pre_color.png │ │ ├── 10044601603_0_gt_color.png │ │ ├── 10044601603_0_pre_color.png │ │ ├── 10050349593_2_gt_color.png │ │ ├── 10050349593_2_pre_color.png │ │ ├── 10053066166_1_gt_color.png │ │ ├── 10053066166_1_pre_color.png │ │ ├── 10056694475_15_gt_color.png │ │ ├── 10056694475_15_pre_color.png │ │ ├── 10056694475_7_gt_color.png │ │ ├── 10056694475_7_pre_color.png │ │ ├── 1007017481_0_gt_color.png │ │ ├── 1007017481_0_pre_color.png │ │ ├── 10071381506_3_gt_color.png │ │ ├── 10071381506_3_pre_color.png │ │ ├── 10072705803_0_gt_color.png │ │ └── 10072705803_0_pre_color.png ├── test_camvid_enet.sh ├── test_lapa_enet.sh ├── train_camvid_enet.sh ├── train_camvid_enet_resume.sh ├── train_lapa_enet.sh ├── train_lapa_enet_resume.sh └── utils │ ├── __init__.py │ ├── __pycache__ │ ├── __init__.cpython-35.pyc │ ├── color_map.cpython-35.pyc │ ├── experiment.cpython-35.pyc │ ├── metrics.cpython-35.pyc │ └── visualization.cpython-35.pyc │ ├── color_map.py │ ├── experiment.py │ ├── metrics.py │ └── visualization.py ├── Enet_lapa_softKD_pytorch ├── datasets │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-35.pyc │ │ ├── __init__.cpython-37.pyc │ │ ├── data_io.cpython-35.pyc │ │ ├── data_io.cpython-37.pyc │ │ ├── lapa_loader.cpython-35.pyc │ │ ├── lapa_loader.cpython-37.pyc │ │ ├── npynpynpy_dataset.cpython-35.pyc │ │ ├── npynpynpy_dataset.cpython-37.pyc │ │ ├── npynpynpy_train.cpython-35.pyc │ │ ├── npynpynpy_train.cpython-37.pyc │ │ ├── npypngnpy_dataset.cpython-35.pyc │ │ ├── npypngnpy_dataset.cpython-37.pyc │ │ ├── pngpng_test.cpython-35.pyc │ │ ├── pngpng_test.cpython-37.pyc │ │ ├── pngpng_test_cv2.cpython-35.pyc │ │ ├── pngpng_test_cv2.cpython-37.pyc │ │ ├── pngpng_test_cv2_remap.cpython-35.pyc │ │ ├── pngpng_test_cv2_remap.cpython-37.pyc │ │ ├── pngpng_train.cpython-35.pyc │ │ ├── pngpng_train.cpython-37.pyc │ │ ├── pngpngnpy_dataset.cpython-35.pyc │ │ ├── pngpngnpy_dataset.cpython-37.pyc │ │ ├── pngpngnpy_train.cpython-35.pyc │ │ ├── pngpngnpy_train.cpython-37.pyc │ │ ├── pngpngnpypng_train_1080.cpython-35.pyc │ │ ├── pngpngnpypng_train_1080.cpython-37.pyc │ │ ├── pngpngnpypng_train_1080_7.cpython-35.pyc │ │ ├── pngpngnpypng_train_1080_7.cpython-37.pyc │ │ ├── pngpngnpypng_train_600w.cpython-35.pyc │ │ ├── pngpngnpypng_train_600w.cpython-37.pyc │ │ ├── pngpngpngpng_train.cpython-35.pyc │ │ └── pngpngpngpng_train.cpython-37.pyc │ ├── data_io.py │ ├── lapa │ │ └── lapa_color_map.txt │ └── lapa_loader.py ├── kd │ ├── resume_s_model_enet │ │ └── checkpoint_59_0041000.tar │ └── t_model_unet │ │ └── checkpoint_60_0181000.tar ├── main_test.py ├── main_train_softKD.py ├── models │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-35.pyc │ │ ├── __init__.cpython-37.pyc │ │ ├── basic_module.cpython-35.pyc │ │ ├── basic_module.cpython-37.pyc │ │ ├── enet_ori.cpython-35.pyc │ │ ├── enet_ori.cpython-37.pyc │ │ ├── loss.cpython-35.pyc │ │ ├── mobilenetV2.cpython-35.pyc │ │ ├── mobilenetV2.cpython-37.pyc │ │ ├── unet_seg.cpython-35.pyc │ │ └── unet_seg.cpython-37.pyc │ ├── basic_module.py │ ├── densenet.py │ ├── enet_ori.py │ ├── loss.py │ └── unet_seg.py ├── results │ ├── enet_resume_afterKD_result │ │ ├── 10009865324_0_gt_color.png │ │ ├── 10009865324_0_pre_color.png │ │ ├── 10012551673_5_gt_color.png │ │ ├── 10012551673_5_pre_color.png │ │ ├── 10012551673_8_gt_color.png │ │ ├── 10012551673_8_pre_color.png │ │ ├── 10014368575_1_gt_color.png │ │ ├── 10014368575_1_pre_color.png │ │ ├── 10014368575_2_gt_color.png │ │ ├── 10014368575_2_pre_color.png │ │ ├── 10014990385_0_gt_color.png │ │ ├── 10014990385_0_pre_color.png │ │ ├── 10014990385_1_gt_color.png │ │ ├── 10014990385_1_pre_color.png │ │ ├── 10020518914_0_gt_color.png │ │ ├── 10020518914_0_pre_color.png │ │ ├── 10022989606_0_gt_color.png │ │ ├── 10022989606_0_pre_color.png │ │ ├── 10023065833_0_gt_color.png │ │ ├── 10023065833_0_pre_color.png │ │ ├── 10023946615_0_gt_color.png │ │ ├── 10023946615_0_pre_color.png │ │ ├── 10036490423_1_gt_color.png │ │ ├── 10036490423_1_pre_color.png │ │ ├── 10036529425_0_gt_color.png │ │ ├── 10036529425_0_pre_color.png │ │ ├── 10037648863_0_gt_color.png │ │ ├── 10037648863_0_pre_color.png │ │ ├── 10041043896_2_gt_color.png │ │ ├── 10041043896_2_pre_color.png │ │ ├── 10044526923_0_gt_color.png │ │ ├── 10044526923_0_pre_color.png │ │ ├── 10044601603_0_gt_color.png │ │ ├── 10044601603_0_pre_color.png │ │ ├── 10049319155_0_gt_color.png │ │ ├── 10049319155_0_pre_color.png │ │ ├── 10050349593_2_gt_color.png │ │ ├── 10050349593_2_pre_color.png │ │ ├── 10053066166_1_gt_color.png │ │ ├── 10053066166_1_pre_color.png │ │ ├── 10056137585_4_gt_color.png │ │ ├── 10056137585_4_pre_color.png │ │ ├── 10056694475_15_gt_color.png │ │ ├── 10056694475_15_pre_color.png │ │ ├── 10056694475_7_gt_color.png │ │ ├── 10056694475_7_pre_color.png │ │ ├── 1007017481_0_gt_color.png │ │ ├── 1007017481_0_pre_color.png │ │ ├── 10071381506_3_gt_color.png │ │ ├── 10071381506_3_pre_color.png │ │ ├── 10072705803_0_gt_color.png │ │ ├── 10072705803_0_pre_color.png │ │ └── checkpoint_100_0090000.tar │ └── enet_resume_beforeKD_result │ │ ├── 10009865324_0_gt_color.png │ │ ├── 10009865324_0_pre_color.png │ │ ├── 10012551673_5_gt_color.png │ │ ├── 10012551673_5_pre_color.png │ │ ├── 10012551673_8_gt_color.png │ │ ├── 10012551673_8_pre_color.png │ │ ├── 10014368575_1_gt_color.png │ │ ├── 10014368575_1_pre_color.png │ │ ├── 10014368575_2_gt_color.png │ │ ├── 10014368575_2_pre_color.png │ │ ├── 10014990385_0_gt_color.png │ │ ├── 10014990385_0_pre_color.png │ │ ├── 10014990385_1_gt_color.png │ │ ├── 10014990385_1_pre_color.png │ │ ├── 10020518914_0_gt_color.png │ │ ├── 10020518914_0_pre_color.png │ │ ├── 10022989606_0_gt_color.png │ │ ├── 10022989606_0_pre_color.png │ │ ├── 10023065833_0_gt_color.png │ │ ├── 10023065833_0_pre_color.png │ │ ├── 10023946615_0_gt_color.png │ │ ├── 10023946615_0_pre_color.png │ │ ├── 10036490423_1_gt_color.png │ │ ├── 10036490423_1_pre_color.png │ │ ├── 10036529425_0_gt_color.png │ │ ├── 10036529425_0_pre_color.png │ │ ├── 10037648863_0_gt_color.png │ │ ├── 10037648863_0_pre_color.png │ │ ├── 10041043896_2_gt_color.png │ │ ├── 10041043896_2_pre_color.png │ │ ├── 10044526923_0_gt_color.png │ │ ├── 10044526923_0_pre_color.png │ │ ├── 10044601603_0_gt_color.png │ │ ├── 10044601603_0_pre_color.png │ │ ├── 10049319155_0_gt_color.png │ │ ├── 10049319155_0_pre_color.png │ │ ├── 10050349593_2_gt_color.png │ │ ├── 10050349593_2_pre_color.png │ │ ├── 10053066166_1_gt_color.png │ │ ├── 10053066166_1_pre_color.png │ │ ├── 10056137585_4_gt_color.png │ │ ├── 10056137585_4_pre_color.png │ │ ├── 10056694475_15_gt_color.png │ │ ├── 10056694475_15_pre_color.png │ │ ├── 10056694475_7_gt_color.png │ │ ├── 10056694475_7_pre_color.png │ │ ├── 1007017481_0_gt_color.png │ │ ├── 1007017481_0_pre_color.png │ │ ├── 10071381506_3_gt_color.png │ │ ├── 10071381506_3_pre_color.png │ │ ├── 10072705803_0_gt_color.png │ │ ├── 10072705803_0_pre_color.png │ │ └── checkpoint_59_0041000.tar ├── src │ ├── Lapa_test.txt │ ├── Lapa_train.txt │ ├── Lapa_train_new.txt │ ├── Lapa_val.txt │ └── make_list.py ├── test_lapa_enet.sh ├── train_lapa_enet_resume_softKD.sh └── utils │ ├── __init__.py │ ├── __pycache__ │ ├── __init__.cpython-35.pyc │ ├── __init__.cpython-37.pyc │ ├── color_map.cpython-35.pyc │ ├── color_map.cpython-37.pyc │ ├── experiment.cpython-35.pyc │ ├── experiment.cpython-37.pyc │ ├── metrics.cpython-35.pyc │ ├── metrics.cpython-37.pyc │ ├── visualization.cpython-35.pyc │ └── visualization.cpython-37.pyc │ ├── color_map.py │ ├── experiment.py │ ├── metrics.py │ └── visualization.py ├── LICENSE ├── Liver-CT-Segmentation ├── README.md ├── datasets │ ├── data_io.py │ ├── pngpng_test.py │ └── pngpng_train.py ├── liver_ct_data │ ├── png_24bitsTo8bits_batch_pil.py │ ├── test.png │ └── train.png ├── main_test.py ├── main_train.py └── models │ └── model_seg.py └── README.md /ENet-segnet/README.md: -------------------------------------------------------------------------------- 1 | # ENet-Segmentation 2 | > This work is mainly applied for segmentation using ENet. Our goal is to verify the performance of the **[ENet](https://arxiv.org/abs/1606.02147)** model on different datasets. 3 | 4 | 5 | ## Dependencies 6 | + pytorch (1.4.0) 7 | + python (3.5.0) 8 | + cudatoolkit(10.0) 9 | + torchvision (0.2.0) 10 | + tensorboard (1.6.0) 11 | + pillow 12 | + matplotlib 13 | + skimage 14 | 15 | ## Datasets 16 | 1、Camvid-Tutorial 17 | The ori **[Camvid](http://mi.eng.cam.ac.uk/research/projects/VideoRec/CamVid/)** data with the size of `960*720`. In this work, we used the camvid dataset in **[SegNet-Tutorial](https://github.com/alexgkendall/SegNet-Tutorial)** which named as `Camvid-Tutorial` in `12` classes with `480*360` size. 18 | 19 | ## ENet 20 | 21 | 22 | ## Results 23 | 24 | 25 | 26 | ## Acknowledgement 27 | This work is mainly inspired by **[ENet](https://arxiv.org/abs/1606.02147)**, **[SegNet-Tutorial](https://github.com/alexgkendall/SegNet-Tutorial)**. 28 | -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | from .lapa_loader import LapaPngPng 2 | from .camvid_loader import CamvidPngPng 3 | 4 | __datasets__ = { 5 | "lapa": LapaPngPng, 6 | "camvid": CamvidPngPng 7 | } 8 | -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/camvid_loader.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/camvid_loader.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/data_io.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/data_io.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/data_io.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/data_io.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/lapa_loader.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/lapa_loader.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_train.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/npynpynpy_train.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpng_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/data_io.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import re 3 | import torchvision.transforms as transforms 4 | 5 | 6 | mean = [0.485, 0.456, 0.406] 7 | std = [0.229, 0.224, 0.225] 8 | get_transform = transforms.Compose([ 9 | transforms.ColorJitter(brightness=0.5, contrast=0.5), 10 | transforms.ToTensor(), # [0,255] ---> [0.0,1.0] 11 | transforms.Normalize(mean=mean, std=std), 12 | ]) 13 | 14 | # read all lines in a file 15 | def read_all_lines(filename): 16 | with open(filename) as f: 17 | lines = [line.rstrip() for line in f.readlines()] 18 | return lines 19 | 20 | 21 | # read an .pfm file into numpy array, used to load SceneFlow disparity files 22 | def pfm_imread(filename): 23 | file = open(filename, 'rb') 24 | color = None 25 | width = None 26 | height = None 27 | scale = None 28 | endian = None 29 | 30 | header = file.readline().decode('utf-8').rstrip() 31 | if header == 'PF': 32 | color = True 33 | elif header == 'Pf': 34 | color = False 35 | else: 36 | raise Exception('Not a PFM file.') 37 | 38 | dim_match = re.match(r'^(\d+)\s(\d+)\s$', file.readline().decode('utf-8')) 39 | if dim_match: 40 | width, height = map(int, dim_match.groups()) 41 | else: 42 | raise Exception('Malformed PFM header.') 43 | 44 | scale = float(file.readline().rstrip()) 45 | if scale < 0: # little-endian 46 | endian = '<' 47 | scale = -scale 48 | else: 49 | endian = '>' # big-endian 50 | 51 | data = np.fromfile(file, endian + 'f') 52 | shape = (height, width, 3) if color else (height, width) 53 | 54 | data = np.reshape(data, shape) 55 | data = np.flipud(data) 56 | return data, scale 57 | -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/datasets/sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/datasets/sample.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/models/__init__.py: -------------------------------------------------------------------------------- 1 | from models.enet_ori import enet_ori_model 2 | 3 | __models__ = { 4 | "enet": enet_ori_model 5 | } 6 | -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/models/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/models/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/models/__pycache__/enet_ori.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/models/__pycache__/enet_ori.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/checkpoint_297_0008000.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/checkpoint_297_0008000.tar -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008580_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008580_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008580_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008580_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008640_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008640_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008640_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008640_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008700_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008700_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008700_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_008700_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009900_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009900_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009900_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009900_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009930_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009930_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009930_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009930_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009990_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009990_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009990_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_009990_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010020_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010020_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010020_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010020_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010110_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010110_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010110_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010110_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010170_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010170_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010170_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010170_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010320_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010320_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010320_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010320_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010380_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010380_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010380_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/0001TP_010380_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00000_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00000_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00000_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00000_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00150_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00150_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00150_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00150_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00240_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00240_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00240_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00240_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00330_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00330_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00330_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00330_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00630_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00630_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00630_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00630_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00810_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00810_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00810_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00810_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00960_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00960_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00960_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f00960_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01020_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01020_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01020_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01020_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01080_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01080_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01080_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01080_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01200_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01200_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01200_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01200_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01350_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01350_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01350_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01350_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01440_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01440_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01440_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01440_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01770_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01770_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01770_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01770_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01980_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01980_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01980_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f01980_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02220_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02220_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02220_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02220_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02250_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02250_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02250_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02250_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02430_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02430_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02430_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02430_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02910_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02910_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02910_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f02910_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03030_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03030_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03030_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03030_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03120_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03120_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03120_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03120_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03180_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03180_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03180_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03180_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03210_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03210_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03210_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03210_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03240_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03240_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03240_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03240_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03300_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03300_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03300_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03300_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03390_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03390_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03390_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03390_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03450_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03450_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03450_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03450_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03570_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03570_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03570_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03570_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03660_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03660_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03660_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03660_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03750_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03750_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03750_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03750_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03990_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03990_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03990_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f03990_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04110_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04110_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04110_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04110_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04320_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04320_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04320_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04320_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04380_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04380_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04380_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04380_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04590_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04590_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04590_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04590_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04620_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04620_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04620_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04620_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04770_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04770_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04770_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04770_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04890_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04890_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04890_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f04890_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05040_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05040_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05040_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05040_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05100_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05100_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05100_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_down2/Seq05VD_f05100_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008580_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008580_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008580_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008580_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008640_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008640_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008640_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008640_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008700_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008700_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008700_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_008700_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009900_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009900_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009900_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009900_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009930_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009930_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009930_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009930_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009990_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009990_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009990_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_009990_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010020_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010020_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010020_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010020_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010110_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010110_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010110_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010110_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010170_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010170_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010170_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010170_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010320_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010320_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010320_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010320_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010380_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010380_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010380_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/0001TP_010380_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00000_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00000_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00000_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00000_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00150_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00150_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00150_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00150_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00240_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00240_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00240_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00240_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00330_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00330_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00330_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00330_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00630_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00630_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00630_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00630_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00810_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00810_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00810_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00810_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00960_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00960_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00960_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f00960_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01020_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01020_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01020_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01020_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01080_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01080_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01080_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01080_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01200_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01200_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01200_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01200_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01350_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01350_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01350_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01350_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01440_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01440_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01440_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01440_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01770_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01770_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01770_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01770_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01980_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01980_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01980_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f01980_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02220_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02220_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02220_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02220_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02250_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02250_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02250_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02250_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02430_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02430_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02430_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02430_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02910_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02910_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02910_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f02910_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03030_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03030_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03030_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03030_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03120_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03120_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03120_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03120_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03180_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03180_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03180_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03180_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03210_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03210_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03210_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03210_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03240_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03240_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03240_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03240_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03300_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03300_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03300_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03300_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03390_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03390_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03390_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03390_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03450_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03450_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03450_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03450_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03570_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03570_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03570_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03570_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03660_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03660_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03660_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03660_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03750_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03750_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03750_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03750_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03990_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03990_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03990_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f03990_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04110_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04110_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04110_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04110_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04320_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04320_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04320_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04320_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04380_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04380_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04380_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04380_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04590_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04590_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04590_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04590_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04620_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04620_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04620_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04620_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04770_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04770_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04770_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04770_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04890_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04890_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04890_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f04890_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05040_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05040_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05040_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05040_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05100_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05100_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05100_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/camvid_test_model_297_8000/result_297_8000_randomcrop/Seq05VD_f05100_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/checkpoint_23_0051000.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/checkpoint_23_0051000.tar -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10009865324_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10009865324_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10009865324_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10009865324_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_5_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_5_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_5_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_5_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_8_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_8_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_8_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10012551673_8_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014368575_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014368575_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014368575_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014368575_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014990385_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014990385_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014990385_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10014990385_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10020518914_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10020518914_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10020518914_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10020518914_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023065833_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023065833_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023065833_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023065833_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023946615_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023946615_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023946615_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10023946615_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036490423_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036490423_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036490423_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036490423_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036529425_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036529425_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036529425_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10036529425_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10037648863_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10037648863_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10037648863_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10037648863_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10041043896_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10041043896_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10041043896_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10041043896_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044526923_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044526923_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044526923_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044526923_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044601603_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044601603_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044601603_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10044601603_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10050349593_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10050349593_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10050349593_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10050349593_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10053066166_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10053066166_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10053066166_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10053066166_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_15_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_15_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_15_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_15_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_7_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_7_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_7_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10056694475_7_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/1007017481_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/1007017481_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/1007017481_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/1007017481_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10071381506_3_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10071381506_3_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10071381506_3_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10071381506_3_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10072705803_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10072705803_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10072705803_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/test/lapa_test_model_23_51000/result/10072705803_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test_camvid_enet.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=2 python main_test.py --mode test \ 2 | --dataset camvid \ 3 | --model enet \ 4 | --channels 3 \ 5 | --out_channels 13 \ 6 | --logdir ./test/test_model \ 7 | --datapath /data/segmentation/camvid-segnet/test-data-rgb/ \ 8 | --testlist /data/segmentation/camvid-segnet/camSegnet-test-data-50-label.txt \ 9 | --test_batch_size 1 \ 10 | --test_crop_height 360 --test_crop_width 720 \ 11 | --saveresult ./test/result/ -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/test_lapa_enet.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=0 python main_test.py --mode test \ 2 | --dataset lapa \ 3 | --model enet \ 4 | --channels 3 \ 5 | --out_channels 11 \ 6 | --logdir ./test/test_model_23_51000 \ 7 | --datapath /data/LaPa/ \ 8 | --testlist lapa_test_30.txt \ 9 | --test_batch_size 1 \ 10 | --test_crop_height 512 --test_crop_width 512 \ 11 | --saveresult ./test/test_model_23_51000/result/ -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/train_camvid_enet.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=5,6,7 python main_train.py \ 2 | --mode train \ 3 | --in_channels 3 \ 4 | --out_channels 13 \ 5 | --epochs 600 \ 6 | --lrepochs 300,400,460,500:2 \ 7 | --dataset camvid \ 8 | --model enet \ 9 | --datapath /data/segmentation/camvid-segnet/ \ 10 | --trainlist /data/segmentation/camvid-segnet/camSegnet-train-data-651.txt \ 11 | --batch_size 30 \ 12 | --train_crop_height 360 --train_crop_width 480 \ 13 | --logdir ./trained/trained_enet_ori_camseg-new-2 \ 14 | --save_freq 1000 \ 15 | --summary_freq 20 -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/train_camvid_enet_resume.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=5,6,7 python main_train.py \ 2 | --resume \ 3 | --mode train \ 4 | --in_channels 3 \ 5 | --out_channels 13 \ 6 | --epochs 600 \ 7 | --lrepochs 350,400,460,500:2 \ 8 | --dataset camvid \ 9 | --model enet \ 10 | --datapath /data/segmentation/camvid-segnet/ \ 11 | --trainlist /data/segmentation/camvid-segnet/camSegnet-train-data-651.txt \ 12 | --batch_size 30 \ 13 | --train_crop_height 360 --train_crop_width 480 \ 14 | --logdir ./trained/trained_enet_ori_camseg-new-1 \ 15 | --checkpoint_path ./trained/trained_enet_ori_camseg-new-1/checkpoint_297_0008000.tar \ 16 | --save_freq 500 \ 17 | --summary_freq 20 -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/train_lapa_enet.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=2,3 python main_train.py \ 2 | --mode train \ 3 | --in_channels 3 \ 4 | --out_channels 11 \ 5 | --epochs 60 \ 6 | --lrepochs 30,38,47,55:2 \ 7 | --dataset lapa \ 8 | --model enet \ 9 | --datapath /data/LaPa/ \ 10 | --trainlist ./src/Lapa_train.txt \ 11 | --batch_size 8 \ 12 | --train_crop_height 512 --train_crop_width 512 \ 13 | --logdir ./trained/trained_enet_Lapa-2 \ 14 | --save_freq 2000 \ 15 | --summary_freq 20 -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/train_lapa_enet_resume.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=6 python main_train.py \ 2 | --resume \ 3 | --mode train \ 4 | --in_channels 3 \ 5 | --out_channels 11 \ 6 | --epochs 60 \ 7 | --lrepochs 25,30,34,40:2 \ 8 | --dataset lapa \ 9 | --model enet \ 10 | --datapath /data/LaPa/ \ 11 | --trainlist ./src/Lapa_train.txt \ 12 | --batch_size 2 \ 13 | --train_crop_height 512 --train_crop_width 512 \ 14 | --logdir ./trained/trained_enet_Lapa-2 \ 15 | --checkpoint_path ./trained/trained_enet_Lapa-2/old-2/checkpoint_23_0051000.tar \ 16 | --save_freq 500 \ 17 | --summary_freq 20 -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from utils.experiment import * 2 | from utils.visualization import * 3 | from utils.metrics import D1_metric, Thres_metric, EPE_metric -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/utils/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/__pycache__/color_map.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/utils/__pycache__/color_map.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/__pycache__/experiment.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/utils/__pycache__/experiment.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/__pycache__/metrics.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/utils/__pycache__/metrics.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/__pycache__/visualization.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_camvid_pytorch/utils/__pycache__/visualization.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/color_map.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import numpy as np 3 | 4 | ''' 5 | lapa: 11 class 6 | label class 7 | 0 background 8 | 1 skin 9 | 2 left eyebrow 10 | 3 right eyebrow 11 | 4 left eye 12 | 5 right eye 13 | 6 nose 14 | 7 upper lip 15 | 8 inner mouth 16 | 9 lower lip 17 | 10 hair 18 | label color 19 | 0 [0, 0, 0] 20 | 1 [0, 153, 255] 21 | 2 [102, 255, 153] 22 | 3 [0, 204, 153] 23 | 4 [255, 255, 102] 24 | 5 [255, 255, 204] 25 | 6 [255, 153, 0] 26 | 7 [255, 102, 255] 27 | 8 [102, 0, 51] 28 | 9 [255, 204, 255] 29 | 10 [255, 0, 102] 30 | 31 | ''' 32 | 33 | 34 | lapa_map = [[0, 0, 0], [0, 153, 255], [102, 255, 153], [0, 204, 153], [255, 255, 102], [255, 255, 204], [255, 153, 0], 35 | [255, 102, 255], [102, 0, 51], [255, 204, 255], [255, 0, 102]] 36 | lapa_cm = np.array(lapa_map).astype("uint8") 37 | 38 | 39 | 40 | 41 | # VOC数据集中对应的标签 voc-32 class 42 | voc_classes = ["Animal", "Archway","Bicyclist","Bridge","Building","Car","CartLuggagePram", 43 | "Child","Column_Pole", "Fence", "LaneMkgsDriv", "LaneMkgsNonDriv", "Misc_Text", 44 | "MotorcycleScooter", "OtherMoving", "ParkingBlock", "Pedestrian", "Road", "RoadShoulder", 45 | "Sidewalk", "SignSymbol", "Sky", "SUVPickupTruck", "TrafficCone", "TrafficLight", 46 | "Train", "Tree", "Truck_Bus", "Tunnel", "VegetationMisc", "Void", "Wall"] 47 | # 各种标签所对应的颜色 48 | voc_colormap = [[64,128,64],[192,0,128],[0,128,192],[0,128,64],[128,0,0],[64,0,128], 49 | [64,0,192],[192,128,64],[192,192,128],[64,64,128],[128,0,192],[192,0,64], 50 | [128,128,64],[192,0,192],[128,64,64],[64,192,128],[64,64,0],[128,64,128], 51 | [128,128,192],[0,0,192],[192,128,128],[128,128,128],[64,128,192],[0,0,64], 52 | [0,64,64],[192,64,128],[128,128,0],[192,128,192],[64,0,64],[192,192,0], 53 | [0,0,0],[64,192,0]] 54 | voc_cm = np.array(voc_colormap).astype("uint8") 55 | 56 | # camvid-13 class 57 | # color_encoding = OrderedDict([ 58 | # ('sky', (128, 128, 128)), 59 | # ('building', (128, 0, 0)), 60 | # ('pole', (192, 192, 128)), 61 | # ('road_marking', (255, 69, 0)), 62 | # ('road', (128, 64, 128)), 63 | # ('pavement', (60, 40, 222)), 64 | # ('tree', (128, 128, 0)), 65 | # ('sign_symbol', (192, 128, 128)), 66 | # ('fence', (64, 64, 128)), 67 | # ('car', (64, 0, 128)), 68 | # ('pedestrian', (64, 64, 0)), 69 | # ('bicyclist', (0, 128, 192)), 70 | # ('unlabeled', (0, 0, 0)) 71 | # ]) -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/metrics.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | from utils.experiment import make_nograd_func 4 | from torch.autograd import Variable 5 | from torch import Tensor 6 | 7 | 8 | # Update D1 from >3px to >=3px & >5% 9 | # matlab code: 10 | # E = abs(D_gt - D_est); 11 | # n_err = length(find(D_gt > 0 & E > tau(1) & E. / abs(D_gt) > tau(2))); 12 | # n_total = length(find(D_gt > 0)); 13 | # d_err = n_err / n_total; 14 | 15 | def check_shape_for_metric_computation(*vars): 16 | assert isinstance(vars, tuple) 17 | for var in vars: 18 | assert len(var.size()) == 3 19 | assert var.size() == vars[0].size() 20 | 21 | # a wrapper to compute metrics for each image individually 22 | def compute_metric_for_each_image(metric_func): 23 | def wrapper(D_ests, D_gts, masks, *nargs): 24 | check_shape_for_metric_computation(D_ests, D_gts, masks) 25 | bn = D_gts.shape[0] # batch size 26 | results = [] # a list to store results for each image 27 | # compute result one by one 28 | for idx in range(bn): 29 | # if tensor, then pick idx, else pass the same value 30 | cur_nargs = [x[idx] if isinstance(x, (Tensor, Variable)) else x for x in nargs] 31 | if masks[idx].float().mean() / (D_gts[idx] > 0).float().mean() < 0.1: 32 | print("masks[idx].float().mean() too small, skip") 33 | else: 34 | ret = metric_func(D_ests[idx], D_gts[idx], masks[idx], *cur_nargs) 35 | results.append(ret) 36 | if len(results) == 0: 37 | print("masks[idx].float().mean() too small for all images in this batch, return 0") 38 | return torch.tensor(0, dtype=torch.float32, device=D_gts.device) 39 | else: 40 | return torch.stack(results).mean() 41 | return wrapper 42 | 43 | @make_nograd_func 44 | @compute_metric_for_each_image 45 | def D1_metric(D_est, D_gt, mask): 46 | D_est, D_gt = D_est[mask], D_gt[mask] 47 | E = torch.abs(D_gt - D_est) 48 | err_mask = (E > 3) & (E / D_gt.abs() > 0.05) 49 | return torch.mean(err_mask.float()) 50 | 51 | @make_nograd_func 52 | @compute_metric_for_each_image 53 | def Thres_metric(D_est, D_gt, mask, thres): 54 | assert isinstance(thres, (int, float)) 55 | D_est, D_gt = D_est[mask], D_gt[mask] 56 | E = torch.abs(D_gt - D_est) 57 | err_mask = E > thres 58 | return torch.mean(err_mask.float()) 59 | 60 | # NOTE: please do not use this to build up training loss 61 | @make_nograd_func 62 | @compute_metric_for_each_image 63 | def EPE_metric(D_est, D_gt, mask): 64 | D_est, D_gt = D_est[mask], D_gt[mask] 65 | return F.l1_loss(D_est, D_gt, size_average=True) 66 | -------------------------------------------------------------------------------- /Enet_lapa_camvid_pytorch/utils/visualization.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import print_function 3 | import torch 4 | import torch.nn as nn 5 | import torch.utils.data 6 | from torch.autograd import Variable, Function 7 | import torch.nn.functional as F 8 | import math 9 | import numpy as np 10 | 11 | 12 | def gen_error_colormap(): 13 | cols = np.array( 14 | [[0 / 3.0, 0.1875 / 3.0, 49, 54, 149], 15 | [0.1875 / 3.0, 0.375 / 3.0, 69, 117, 180], 16 | [0.375 / 3.0, 0.75 / 3.0, 116, 173, 209], 17 | [0.75 / 3.0, 1.5 / 3.0, 171, 217, 233], 18 | [1.5 / 3.0, 3 / 3.0, 224, 243, 248], 19 | [3 / 3.0, 6 / 3.0, 254, 224, 144], 20 | [6 / 3.0, 12 / 3.0, 253, 174, 97], 21 | [12 / 3.0, 24 / 3.0, 244, 109, 67], 22 | [24 / 3.0, 48 / 3.0, 215, 48, 39], 23 | [48 / 3.0, np.inf, 165, 0, 38]], dtype=np.float32) 24 | cols[:, 2: 5] /= 255. 25 | return cols 26 | 27 | 28 | error_colormap = gen_error_colormap() 29 | 30 | 31 | # segmentation上色 32 | def colorEncode(labelmap, colors, mode='RGB'): 33 | labelmap = labelmap.astype('int') 34 | labelmap_rgb = np.zeros((labelmap.shape[0], labelmap.shape[1], 3), 35 | dtype=np.uint8) 36 | for label in unique(labelmap): 37 | if label < 0: 38 | continue 39 | labelmap_rgb += (labelmap == label)[:, :, np.newaxis] * \ 40 | np.tile(colors[label], 41 | (labelmap.shape[0], labelmap.shape[1], 1)) 42 | 43 | if mode == 'BGR': 44 | return labelmap_rgb[:, :, ::-1] 45 | else: 46 | return labelmap_rgb 47 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | from .pngpng_train import PngPngTrain 2 | from .pngpng_test import PngPng 3 | from .lapa_loader import LapaPngPng 4 | 5 | __datasets__ = { 6 | "pngpngtrain": PngPngTrain, 7 | "pngpngtest": PngPng, 8 | "lapa": LapaPngPng 9 | } -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/data_io.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/data_io.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/data_io.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/data_io.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/lapa_loader.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/lapa_loader.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/lapa_loader.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/lapa_loader.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_dataset.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_train.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/npynpynpy_train.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/npypngnpy_dataset.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_test_cv2_remap.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_train.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpng_train.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_dataset.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpy_train.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_1080_7.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngnpypng_train_600w.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/datasets/__pycache__/pngpngpngpng_train.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/data_io.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import re 3 | import torchvision.transforms as transforms 4 | 5 | 6 | mean = [0.485, 0.456, 0.406] 7 | std = [0.229, 0.224, 0.225] 8 | get_transform = transforms.Compose([ 9 | transforms.ColorJitter(brightness=0.5, contrast=0.5), 10 | transforms.ToTensor(), # [0,255] ---> [0.0,1.0] 11 | transforms.Normalize(mean=mean, std=std), 12 | ]) 13 | 14 | # read all lines in a file 15 | def read_all_lines(filename): 16 | with open(filename) as f: 17 | lines = [line.rstrip() for line in f.readlines()] 18 | return lines 19 | 20 | 21 | # read an .pfm file into numpy array, used to load SceneFlow disparity files 22 | def pfm_imread(filename): 23 | file = open(filename, 'rb') 24 | color = None 25 | width = None 26 | height = None 27 | scale = None 28 | endian = None 29 | 30 | header = file.readline().decode('utf-8').rstrip() 31 | if header == 'PF': 32 | color = True 33 | elif header == 'Pf': 34 | color = False 35 | else: 36 | raise Exception('Not a PFM file.') 37 | 38 | dim_match = re.match(r'^(\d+)\s(\d+)\s$', file.readline().decode('utf-8')) 39 | if dim_match: 40 | width, height = map(int, dim_match.groups()) 41 | else: 42 | raise Exception('Malformed PFM header.') 43 | 44 | scale = float(file.readline().rstrip()) 45 | if scale < 0: # little-endian 46 | endian = '<' 47 | scale = -scale 48 | else: 49 | endian = '>' # big-endian 50 | 51 | data = np.fromfile(file, endian + 'f') 52 | shape = (height, width, 3) if color else (height, width) 53 | 54 | data = np.reshape(data, shape) 55 | data = np.flipud(data) 56 | return data, scale 57 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/datasets/lapa/lapa_color_map.txt: -------------------------------------------------------------------------------- 1 | lapa: 2 | label class 3 | 0 background 4 | 1 skin 5 | 2 left eyebrow 6 | 3 right eyebrow 7 | 4 left eye 8 | 5 right eye 9 | 6 nose 10 | 7 upper lip 11 | 8 inner mouth 12 | 9 lower lip 13 | 10 hair 14 | label color 15 | 0 [0, 0, 0] 16 | 1 [0, 153, 255] 17 | 2 [102, 255, 153] 18 | 3 [0, 204, 153] 19 | 4 [255, 255, 102] 20 | 5 [255, 255, 204] 21 | 6 [255, 153, 0] 22 | 7 [255, 102, 255] 23 | 8 [102, 0, 51] 24 | 9 [255, 204, 255] 25 | 10 [255, 0, 102] -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/kd/resume_s_model_enet/checkpoint_59_0041000.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/kd/resume_s_model_enet/checkpoint_59_0041000.tar -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/kd/t_model_unet/checkpoint_60_0181000.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/kd/t_model_unet/checkpoint_60_0181000.tar -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__init__.py: -------------------------------------------------------------------------------- 1 | from models.enet_ori import enet_ori_model 2 | from models.unet_seg import unet_model 3 | from models.mobilenetV2 import mobileV2_model 4 | 5 | __models__ = { 6 | "enet": enet_ori_model, 7 | "unet": unet_model, 8 | "mobilev2": mobileV2_model 9 | } 10 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/basic_module.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/basic_module.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/basic_module.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/basic_module.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/enet_ori.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/enet_ori.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/enet_ori.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/enet_ori.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/loss.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/loss.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/mobilenetV2.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/mobilenetV2.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/mobilenetV2.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/mobilenetV2.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/unet_seg.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/unet_seg.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/__pycache__/unet_seg.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/models/__pycache__/unet_seg.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/loss.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import torch 3 | import torch.nn as nn 4 | import math 5 | import numpy as np 6 | from torch.nn import functional as F 7 | 8 | # pixel-wise loss 9 | # input shape: [N, C, H, W] ---> output: float 10 | def pixelWiseLoss(preds_S, preds_T): 11 | preds_T = preds_T.detach() 12 | # print(preds_S.requires_grad, preds_T.requires_grad) 13 | assert preds_S.shape == preds_T.shape, 'the output dim of teacher and student differ' 14 | N, C, H, W = preds_S.shape 15 | softmax_pred_T = F.softmax(preds_T.permute(0, 2, 3, 1).contiguous().view(-1, C), dim=1) 16 | logsoftmax = nn.LogSoftmax(dim=1) 17 | loss = (torch.sum(- softmax_pred_T * logsoftmax(preds_S.permute(0, 2, 3, 1).contiguous().view(-1, C)))) / W / H 18 | return loss / N 19 | 20 | 21 | # input shape: [N, C, H, W] ---> output: float 22 | def KLpixelWiseLoss(preds_S, preds_T): 23 | preds_T = preds_T.detach() 24 | # print(preds_S.requires_grad, preds_T.requires_grad) # RuntimeError: the derivative for 'target' is not implemented 25 | preds_S_logsoft = nn.functional.log_softmax(preds_S, dim=1) 26 | preds_T_soft = nn.functional.softmax(preds_T, dim=1) 27 | loss = nn.functional.kl_div(preds_S_logsoft, preds_T_soft) # 默认为 reduction='mean' ??? 28 | return loss 29 | 30 | 31 | # feature similarity 32 | def L2(f_): 33 | return (((f_**2).sum(dim=1))**0.5).reshape(f_.shape[0], 1, f_.shape[2], f_.shape[3]) + 1e-8 34 | 35 | def similarity(feat): 36 | feat = feat.float() 37 | tmp = L2(feat).detach() 38 | feat = feat/tmp 39 | feat = feat.reshape(feat.shape[0], feat.shape[1], -1) 40 | return torch.einsum('icm,icn->imn', [feat, feat]) 41 | 42 | def sim_dis_compute(f_S, f_T): 43 | sim_err = ((similarity(f_T) - similarity(f_S))**2)/((f_T.shape[-1]*f_T.shape[-2])**2)/f_T.shape[0] 44 | sim_dis = sim_err.sum() 45 | return sim_dis 46 | 47 | # pair-wise loss 48 | def pairWiseLoss(preds_S, preds_T): 49 | scale = 0.5 50 | feat_S = preds_S[self.feat_ind] # student 51 | feat_T = preds_T[self.feat_ind] # teacher 52 | feat_T.detach() 53 | 54 | total_w, total_h = feat_T.shape[2], feat_T.shape[3] 55 | patch_w, patch_h = int(total_w * scale), int(total_h * scale) # 类似于论文中的参数b,b个node形成一个团簇当做是一个新node 56 | # 无参 57 | maxpool = nn.MaxPool2d(kernel_size=(patch_w, patch_h), stride=(patch_w, patch_h), padding=0, ceil_mode=True) # change 58 | loss = sim_dis_compute(maxpool(feat_S), maxpool(feat_T)) 59 | return loss 60 | 61 | 62 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/models/unet_seg.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from collections import namedtuple 3 | import torch 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | from .basic_module import * 7 | 8 | 9 | # unet_model 10 | class unet_model(nn.Module): 11 | def __init__(self, out_ch): 12 | super(unet_model, self).__init__() 13 | # self.conv1 = DoubleConv(3, 32) 14 | self.conv1 = convbn(3, 32, 3, 1, 1, 1) # (32, h, w) 15 | self.pool1 = nn.MaxPool2d(2) # (32, h/2, w/2) 16 | self.conv2 = DoubleConv(32, 64) # (64, h/2, w/2) 17 | self.pool2 = nn.MaxPool2d(2) # (64, h/4, w/4) 18 | self.conv3 = DoubleConv(64, 128) # (128, h/4, w/4) 19 | self.pool3 = nn.MaxPool2d(2) # (128, h/8, w/8) 20 | self.conv4 = DoubleConv(128, 256) # (256, h/8, w/8) 21 | self.pool4 = nn.MaxPool2d(2) # (256, h/16, w/16) 22 | self.conv5 = DoubleConv(256, 128) # (128, h/16, w/16) 23 | 24 | self.up6 = nn.ConvTranspose2d(128, 128, 2, stride=2) # (128, h/8, w/8) 25 | # self.conv6 = DoubleConv(384, 128) # up6+c4 26 | self.conv6 = convbn(384, 128, 3, 1, 1, 1) # (128, h/8, w/8) 27 | self.up7 = nn.ConvTranspose2d(128, 128, 2, stride=2) # (128, h/4, w/4) 28 | self.conv7 = DoubleConv(256, 128) # up7+c3 29 | 30 | self.up8 = nn.ConvTranspose2d(128, 64, 2, stride=2) # (64, h/2, w/2) 31 | self.conv8 = DoubleConv(128, 64) # up8+c2 32 | 33 | self.up9 = nn.ConvTranspose2d(64, 64, 2, stride=2) # (64, h, w) 34 | 35 | # self.conv9 = DoubleConv(96, 32) # up9+c1 36 | self.conv9 = convbn(96, 32, 3, 1, 1, 1) 37 | self.conv10 = nn.Conv2d(32, out_ch, 1) # (out_ch, h, w) 38 | 39 | for m in self.modules(): 40 | if isinstance(m, nn.Conv2d): 41 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 42 | m.weight.data.normal_(0, math.sqrt(2. / n)) 43 | elif isinstance(m, nn.BatchNorm2d): 44 | m.weight.data.fill_(1) 45 | m.bias.data.zero_() 46 | elif isinstance(m, nn.Linear): 47 | m.bias.data.zero_() 48 | 49 | def forward(self, x): 50 | c1 = self.conv1(x) # [3, 64] 51 | p1 = self.pool1(c1) 52 | c2 = self.conv2(p1) # [64, 128] 53 | p2 = self.pool2(c2) 54 | c3 = self.conv3(p2) # [128, 256] 55 | p3 = self.pool3(c3) 56 | c4 = self.conv4(p3) # (256, 256) 57 | p4 = self.pool4(c4) 58 | c5 = self.conv5(p4) 59 | up_6 = self.up6(c5) 60 | merge6 = torch.cat([up_6, c4], dim=1) 61 | c6 = self.conv6(merge6) 62 | up_7 = self.up7(c6) 63 | merge7 = torch.cat([up_7, c3], dim=1) 64 | c7 = self.conv7(merge7) 65 | up_8 = self.up8(c7) 66 | merge8 = torch.cat([up_8, c2], dim=1) 67 | c8 = self.conv8(merge8) 68 | up_9 = self.up9(c8) 69 | merge9 = torch.cat([up_9, c1], dim=1) 70 | c9 = self.conv9(merge9) 71 | c9 = F.relu(c9) 72 | c10 = self.conv10(c9) 73 | # out = nn.Sigmoid()(c10) 74 | # return out 75 | return c10 76 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10009865324_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10009865324_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10009865324_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10009865324_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_5_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_5_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_5_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_5_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_8_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_8_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_8_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10012551673_8_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014368575_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10014990385_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10020518914_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10020518914_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10020518914_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10020518914_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10022989606_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10022989606_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10022989606_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10022989606_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023065833_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023065833_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023065833_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023065833_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023946615_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023946615_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023946615_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10023946615_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036490423_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036490423_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036490423_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036490423_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036529425_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036529425_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036529425_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10036529425_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10037648863_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10037648863_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10037648863_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10037648863_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10041043896_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10041043896_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10041043896_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10041043896_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044526923_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044526923_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044526923_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044526923_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044601603_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044601603_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044601603_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10044601603_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10049319155_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10049319155_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10049319155_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10049319155_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10050349593_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10050349593_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10050349593_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10050349593_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10053066166_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10053066166_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10053066166_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10053066166_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056137585_4_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056137585_4_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056137585_4_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056137585_4_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_15_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_15_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_15_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_15_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_7_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_7_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_7_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10056694475_7_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/1007017481_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/1007017481_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/1007017481_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/1007017481_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10071381506_3_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10071381506_3_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10071381506_3_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10071381506_3_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10072705803_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10072705803_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10072705803_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/10072705803_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/checkpoint_100_0090000.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_afterKD_result/checkpoint_100_0090000.tar -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10009865324_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10009865324_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10009865324_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10009865324_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_5_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_5_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_5_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_5_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_8_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_8_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_8_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10012551673_8_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014368575_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10014990385_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10020518914_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10020518914_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10020518914_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10020518914_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10022989606_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10022989606_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10022989606_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10022989606_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023065833_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023065833_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023065833_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023065833_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023946615_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023946615_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023946615_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10023946615_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036490423_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036490423_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036490423_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036490423_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036529425_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036529425_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036529425_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10036529425_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10037648863_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10037648863_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10037648863_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10037648863_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10041043896_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10041043896_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10041043896_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10041043896_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044526923_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044526923_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044526923_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044526923_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044601603_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044601603_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044601603_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10044601603_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10049319155_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10049319155_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10049319155_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10049319155_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10050349593_2_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10050349593_2_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10050349593_2_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10050349593_2_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10053066166_1_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10053066166_1_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10053066166_1_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10053066166_1_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056137585_4_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056137585_4_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056137585_4_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056137585_4_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_15_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_15_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_15_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_15_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_7_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_7_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_7_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10056694475_7_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/1007017481_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/1007017481_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/1007017481_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/1007017481_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10071381506_3_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10071381506_3_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10071381506_3_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10071381506_3_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10072705803_0_gt_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10072705803_0_gt_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10072705803_0_pre_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/10072705803_0_pre_color.png -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/checkpoint_59_0041000.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/results/enet_resume_beforeKD_result/checkpoint_59_0041000.tar -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/src/make_list.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from pathlib import Path 3 | import os 4 | import random 5 | 6 | 7 | def read_img_path(img_path): 8 | data_pathes = sorted(Path(img_path).glob('*.jpg')) # list 9 | 10 | for path in data_pathes: 11 | color_path = str(path) 12 | line = color_path.split('/') 13 | img_name = line[-1][:-4] 14 | label_path = '/'.join(line[:-2]) + '/labels/' + img_name + '.png' 15 | item = color_path + ' ' + label_path 16 | print(item) 17 | # 写入txt文本 ##### 18 | File = open('Lapa_test.txt', 'a+') 19 | File.write(item + '\n') 20 | File.flush() 21 | # ##### 22 | 23 | # def make_(list_path): 24 | # fopen1 = open(list_path) 25 | # lines1 = fopen1.readlines() 26 | # b1 = [] 27 | # for line1 in lines1: 28 | # line1 = line1.replace('\n', '') 29 | 30 | if __name__ == '__main__': 31 | root_path = '/LaPa/test/images/' 32 | read_img_path(root_path) 33 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/test_lapa_enet.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=1 python main_test.py --mode test \ 2 | --dataset lapa \ 3 | --model enet \ 4 | --channels 3 \ 5 | --out_channels 11 \ 6 | --logdir ./kd/resume_s_model_enet \ 7 | --datapath ./LaPa/ \ 8 | --testlist lapa_test_30.txt \ 9 | --test_batch_size 1 \ 10 | --test_crop_height 512 --test_crop_width 512 \ 11 | --saveresult ./enet_59_41000-2/ -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/train_lapa_enet_resume_softKD.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=0,1 python main_train_softKD.py \ 2 | --resume \ 3 | --checkpoint_path ./kd/resume_s_model_enet/checkpoint_59_0041000.tar \ 4 | --loadteacher \ 5 | --ckpt_path_teacher ./kd/t_model_unet/checkpoint_60_0181000.tar \ 6 | --mode train \ 7 | --in_channels 3 \ 8 | --out_channels 11 \ 9 | --epochs 100 \ 10 | --lrepochs 80,85,90,95:2 \ 11 | --dataset lapa \ 12 | --w_pi 10.0 \ 13 | --model_teacher unet \ 14 | --model enet \ 15 | --datapath ./LaPa/ \ 16 | --trainlist ./src/Lapa_train_new.txt \ 17 | --batch_size 20 \ 18 | --train_crop_height 320 --train_crop_width 320 \ 19 | --logdir ./kd/trained/unet_resume_enet_softkd \ 20 | --save_freq 500 \ 21 | --summary_freq 20 -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from utils.experiment import * 2 | from utils.visualization import * 3 | from utils.metrics import D1_metric, Thres_metric, EPE_metric -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/color_map.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/color_map.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/color_map.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/color_map.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/experiment.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/experiment.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/experiment.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/experiment.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/metrics.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/metrics.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/metrics.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/metrics.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/visualization.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/visualization.cpython-35.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/__pycache__/visualization.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Enet_lapa_softKD_pytorch/utils/__pycache__/visualization.cpython-37.pyc -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/color_map.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import numpy as np 3 | 4 | 5 | 6 | 7 | 8 | ''' 9 | lapa: 11 class 10 | label class 11 | 0 background 12 | 1 skin 13 | 2 left eyebrow 14 | 3 right eyebrow 15 | 4 left eye 16 | 5 right eye 17 | 6 nose 18 | 7 upper lip 19 | 8 inner mouth 20 | 9 lower lip 21 | 10 hair 22 | label color 23 | 0 [0, 0, 0] 24 | 1 [0, 153, 255] 25 | 2 [102, 255, 153] 26 | 3 [0, 204, 153] 27 | 4 [255, 255, 102] 28 | 5 [255, 255, 204] 29 | 6 [255, 153, 0] 30 | 7 [255, 102, 255] 31 | 8 [102, 0, 51] 32 | 9 [255, 204, 255] 33 | 10 [255, 0, 102] 34 | 35 | ''' 36 | 37 | 38 | lapa_map = [[0, 0, 0], [0, 153, 255], [102, 255, 153], [0, 204, 153], [255, 255, 102], [255, 255, 204], [255, 153, 0], 39 | [255, 102, 255], [102, 0, 51], [255, 204, 255], [255, 0, 102]] 40 | lapa_cm = np.array(lapa_map).astype("uint8") 41 | 42 | # lapa_cm[pred[0].cpu().detach().numpy()] 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/metrics.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | from utils.experiment import make_nograd_func 4 | from torch.autograd import Variable 5 | from torch import Tensor 6 | 7 | 8 | # Update D1 from >3px to >=3px & >5% 9 | # matlab code: 10 | # E = abs(D_gt - D_est); 11 | # n_err = length(find(D_gt > 0 & E > tau(1) & E. / abs(D_gt) > tau(2))); 12 | # n_total = length(find(D_gt > 0)); 13 | # d_err = n_err / n_total; 14 | 15 | def check_shape_for_metric_computation(*vars): 16 | assert isinstance(vars, tuple) 17 | for var in vars: 18 | assert len(var.size()) == 3 19 | assert var.size() == vars[0].size() 20 | 21 | # a wrapper to compute metrics for each image individually 22 | def compute_metric_for_each_image(metric_func): 23 | def wrapper(D_ests, D_gts, masks, *nargs): 24 | check_shape_for_metric_computation(D_ests, D_gts, masks) 25 | bn = D_gts.shape[0] # batch size 26 | results = [] # a list to store results for each image 27 | # compute result one by one 28 | for idx in range(bn): 29 | # if tensor, then pick idx, else pass the same value 30 | cur_nargs = [x[idx] if isinstance(x, (Tensor, Variable)) else x for x in nargs] 31 | if masks[idx].float().mean() / (D_gts[idx] > 0).float().mean() < 0.1: 32 | print("masks[idx].float().mean() too small, skip") 33 | else: 34 | ret = metric_func(D_ests[idx], D_gts[idx], masks[idx], *cur_nargs) 35 | results.append(ret) 36 | if len(results) == 0: 37 | print("masks[idx].float().mean() too small for all images in this batch, return 0") 38 | return torch.tensor(0, dtype=torch.float32, device=D_gts.device) 39 | else: 40 | return torch.stack(results).mean() 41 | return wrapper 42 | 43 | @make_nograd_func 44 | @compute_metric_for_each_image 45 | def D1_metric(D_est, D_gt, mask): 46 | D_est, D_gt = D_est[mask], D_gt[mask] 47 | E = torch.abs(D_gt - D_est) 48 | err_mask = (E > 3) & (E / D_gt.abs() > 0.05) 49 | return torch.mean(err_mask.float()) 50 | 51 | @make_nograd_func 52 | @compute_metric_for_each_image 53 | def Thres_metric(D_est, D_gt, mask, thres): 54 | assert isinstance(thres, (int, float)) 55 | D_est, D_gt = D_est[mask], D_gt[mask] 56 | E = torch.abs(D_gt - D_est) 57 | err_mask = E > thres 58 | return torch.mean(err_mask.float()) 59 | 60 | # NOTE: please do not use this to build up training loss 61 | @make_nograd_func 62 | @compute_metric_for_each_image 63 | def EPE_metric(D_est, D_gt, mask): 64 | D_est, D_gt = D_est[mask], D_gt[mask] 65 | return F.l1_loss(D_est, D_gt, size_average=True) 66 | -------------------------------------------------------------------------------- /Enet_lapa_softKD_pytorch/utils/visualization.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import print_function 3 | import torch 4 | import torch.nn as nn 5 | import torch.utils.data 6 | from torch.autograd import Variable, Function 7 | import torch.nn.functional as F 8 | import math 9 | import numpy as np 10 | 11 | 12 | def gen_error_colormap(): 13 | cols = np.array( 14 | [[0 / 3.0, 0.1875 / 3.0, 49, 54, 149], 15 | [0.1875 / 3.0, 0.375 / 3.0, 69, 117, 180], 16 | [0.375 / 3.0, 0.75 / 3.0, 116, 173, 209], 17 | [0.75 / 3.0, 1.5 / 3.0, 171, 217, 233], 18 | [1.5 / 3.0, 3 / 3.0, 224, 243, 248], 19 | [3 / 3.0, 6 / 3.0, 254, 224, 144], 20 | [6 / 3.0, 12 / 3.0, 253, 174, 97], 21 | [12 / 3.0, 24 / 3.0, 244, 109, 67], 22 | [24 / 3.0, 48 / 3.0, 215, 48, 39], 23 | [48 / 3.0, np.inf, 165, 0, 38]], dtype=np.float32) 24 | cols[:, 2: 5] /= 255. 25 | return cols 26 | 27 | 28 | error_colormap = gen_error_colormap() 29 | 30 | 31 | class disp_error_image_func(Function): 32 | def forward(self, D_est_tensor, D_gt_tensor, abs_thres=3., rel_thres=0.05, dilate_radius=1): 33 | D_gt_np = D_gt_tensor.detach().cpu().numpy() 34 | D_est_np = D_est_tensor.detach().cpu().numpy() 35 | B, H, W = D_gt_np.shape 36 | # valid mask 37 | mask = D_gt_np > 0 38 | # error in percentage. When error <= 1, the pixel is valid since <= 3px & 5% 39 | error = np.abs(D_gt_np - D_est_np) 40 | error[np.logical_not(mask)] = 0 41 | error[mask] = np.minimum(error[mask] / abs_thres, (error[mask] / D_gt_np[mask]) / rel_thres) 42 | # get colormap 43 | cols = error_colormap 44 | # create error image 45 | error_image = np.zeros([B, H, W, 3], dtype=np.float32) 46 | for i in range(cols.shape[0]): 47 | error_image[np.logical_and(error >= cols[i][0], error < cols[i][1])] = cols[i, 2:] 48 | # TODO: imdilate 49 | # error_image = cv2.imdilate(D_err, strel('disk', dilate_radius)); 50 | error_image[np.logical_not(mask)] = 0. 51 | # show color tag in the top-left cornor of the image 52 | for i in range(cols.shape[0]): 53 | distance = 20 54 | error_image[:, :10, i * distance:(i + 1) * distance, :] = cols[i, 2:] 55 | 56 | return torch.from_numpy(np.ascontiguousarray(error_image.transpose([0, 3, 1, 2]))) 57 | 58 | def backward(self, grad_output): 59 | return None 60 | 61 | # segmentation上色 62 | def colorEncode(labelmap, colors, mode='RGB'): 63 | labelmap = labelmap.astype('int') 64 | labelmap_rgb = np.zeros((labelmap.shape[0], labelmap.shape[1], 3), 65 | dtype=np.uint8) 66 | for label in unique(labelmap): 67 | if label < 0: 68 | continue 69 | labelmap_rgb += (labelmap == label)[:, :, np.newaxis] * \ 70 | np.tile(colors[label], 71 | (labelmap.shape[0], labelmap.shape[1], 1)) 72 | 73 | if mode == 'BGR': 74 | return labelmap_rgb[:, :, ::-1] 75 | else: 76 | return labelmap_rgb 77 | -------------------------------------------------------------------------------- /Liver-CT-Segmentation/README.md: -------------------------------------------------------------------------------- 1 | # Liver segmentation 2 | > This project mainly introduces the liver CT segmentation. 3 | 4 | 5 | ## Dependencies 6 | + pytorch (1.4.0) 7 | + python (3.7.0+) 8 | + cuda-toolkit(10.0+) 9 | + torchvision (0.2.0+) 10 | + tensorboard (1.6.0) 11 | 12 | ## UNET-like-model 13 | The proposed segmentation network is a `unet-like` cnn model. Seeing `models/model_seg.py` for specific network architecture.The loss function used in the project is `nn.BCEWithLogitsLoss` based on `pytorch`. 14 | 15 | ## Results 16 | In `liver_ct_data` directory, I provide you `train` and `test` samples: 17 | ![train](liver_ct_data/train.png) 18 | ![test](liver_ct_data/test.png) 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /Liver-CT-Segmentation/datasets/data_io.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import re 3 | import torchvision.transforms as transforms 4 | 5 | 6 | def get_transform(): 7 | mean = [0.485, 0.456, 0.406] 8 | std = [0.229, 0.224, 0.225] 9 | 10 | return transforms.Compose([ 11 | transforms.ToTensor(), # [0,255] ---> [0.0,1.0] 12 | transforms.Normalize(mean=mean, std=std), 13 | ]) 14 | 15 | 16 | # read all lines in a file 17 | def read_all_lines(filename): 18 | with open(filename) as f: 19 | lines = [line.rstrip() for line in f.readlines()] 20 | return lines 21 | 22 | 23 | # read an .pfm file into numpy array, used to load SceneFlow disparity files 24 | def pfm_imread(filename): 25 | file = open(filename, 'rb') 26 | color = None 27 | width = None 28 | height = None 29 | scale = None 30 | endian = None 31 | 32 | header = file.readline().decode('utf-8').rstrip() 33 | if header == 'PF': 34 | color = True 35 | elif header == 'Pf': 36 | color = False 37 | else: 38 | raise Exception('Not a PFM file.') 39 | 40 | dim_match = re.match(r'^(\d+)\s(\d+)\s$', file.readline().decode('utf-8')) 41 | if dim_match: 42 | width, height = map(int, dim_match.groups()) 43 | else: 44 | raise Exception('Malformed PFM header.') 45 | 46 | scale = float(file.readline().rstrip()) 47 | if scale < 0: # little-endian 48 | endian = '<' 49 | scale = -scale 50 | else: 51 | endian = '>' # big-endian 52 | 53 | data = np.fromfile(file, endian + 'f') 54 | shape = (height, width, 3) if color else (height, width) 55 | 56 | data = np.reshape(data, shape) 57 | data = np.flipud(data) 58 | return data, scale 59 | -------------------------------------------------------------------------------- /Liver-CT-Segmentation/datasets/pngpng_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import random 4 | from torch.utils.data import Dataset 5 | from PIL import Image 6 | import numpy as np 7 | from datasets.data_io import get_transform, read_all_lines 8 | import torchvision.transforms as transforms 9 | import torch 10 | import torch.nn as nn 11 | import cv2 12 | 13 | processed = transforms.Compose([ 14 | transforms.ToTensor(), 15 | ] 16 | ) 17 | 18 | class PngPng(Dataset): 19 | def __init__(self, datapath, list_filename, training, crop_h, crop_w, channel): 20 | self.datapath = datapath 21 | 22 | self.left_filenames, self.right_filenames = self.load_path(list_filename) 23 | self.training = training 24 | self.crop_h = crop_h 25 | self.crop_w = crop_w 26 | self.channel = channel 27 | 28 | def load_path(self, list_filename): 29 | lines = read_all_lines(list_filename) 30 | splits = [line.split() for line in lines] 31 | left_images = [x[0] for x in splits] 32 | right_images = [x[1] for x in splits] 33 | 34 | return left_images, right_images 35 | 36 | def load_image(self, filename): 37 | if self.channel == 3: 38 | return Image.open(filename).convert('RGB') 39 | elif self.channel == 1: 40 | return Image.open(filename).convert('L') 41 | 42 | def __len__(self): 43 | return len(self.left_filenames) 44 | 45 | def __getitem__(self, index): 46 | left_img = self.load_image(os.path.join(self.datapath, self.left_filenames[index])) 47 | right_img = self.load_image(os.path.join(self.datapath, self.right_filenames[index])) 48 | 49 | # add left_png.name 50 | left_pathname = self.left_filenames[index] 51 | 52 | if self.training: 53 | return {"ori": left_img, 54 | "gt": right_img} 55 | else: 56 | w, h = left_img.size 57 | x1 = (w - self.crop_w)/2 58 | y1 = (h - self.crop_h)/2 59 | 60 | # randomly png crop 61 | left_img = left_img.crop((x1, y1, x1 + self.crop_w, y1 + self.crop_h)) 62 | right_img = right_img.crop((x1, y1, x1 + self.crop_w, y1 + self.crop_h)) 63 | 64 | left_img = processed(left_img) 65 | right_img = processed(right_img) 66 | 67 | return {"ori": left_img, 68 | "gt": right_img, 69 | "left_name": left_pathname} -------------------------------------------------------------------------------- /Liver-CT-Segmentation/liver_ct_data/png_24bitsTo8bits_batch_pil.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import numpy as np 4 | import sys 5 | from PIL import Image 6 | # convert 24bits.png to 8bits.png 7 | path = "/**path**/" 8 | for filename in os.listdir(path): 9 | if os.path.splitext(filename)[1] == '.png': 10 | img = Image.open(path + filename).convert('L') 11 | filename = filename[:-4] 12 | img.save(path + filename + '.png') 13 | print('All png have been convert to npy in the folder of {}'.format(path)) 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /Liver-CT-Segmentation/liver_ct_data/test.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Liver-CT-Segmentation/liver_ct_data/test.png -------------------------------------------------------------------------------- /Liver-CT-Segmentation/liver_ct_data/train.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ssssober/Image-Segmentation/36a3b0764aa9a0d2f100b5eefd755352635b698d/Liver-CT-Segmentation/liver_ct_data/train.png -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Image-Segmentation 2 | This project is mainly used to explore image or video segmentation task. The research objects include: 3 | outdoor landscapes, indoor scenes, common objects, human face and body, medical images and so on. 4 | The methods used mainly include the classical CNN models. We will continue to explore it. 5 | --------------------------------------------------------------------------------