├── DeepVO ├── ConvLSTM_Cell.py ├── Test_ConvLSTM_Cell.py └── train.py ├── Deeplab-v2 ├── README.md ├── cityscapes │ ├── generate_dataset_txt.py │ ├── test.txt │ ├── train.txt │ ├── train_coarse.txt │ ├── train_extra.txt │ ├── train_fine.txt │ ├── val.txt │ ├── val_coarse.txt │ └── val_fine.txt ├── dataset_voc2012 │ ├── test.txt │ ├── train.txt │ └── val.txt ├── evaluate.py ├── inference.py ├── log.txt ├── main.py ├── main_msc.py ├── mode │ ├── __init__.py │ ├── model.py │ ├── model_msc.py │ └── network.py ├── my-scripts │ ├── Convert_data.ipynb │ ├── colours.ipynb │ ├── colours.txt │ ├── creat_image_txt.ipynb │ ├── createTfRecord_pascal_SBD.py │ ├── rename_mat_file.ipynb │ └── save_colour.txt ├── nyu │ ├── .gitignore │ ├── README.md │ ├── config │ │ └── deeplab_largeFOV │ │ │ ├── solver.prototxt │ │ │ ├── solver2.prototxt │ │ │ ├── test.prototxt │ │ │ └── train.prototxt │ ├── create_labels_249.py │ ├── features │ │ └── deeplab_largeFOV │ │ │ └── val │ │ │ └── fc8 │ │ │ └── labels │ │ │ └── README.md │ ├── features2 │ │ └── README.md │ ├── list │ │ ├── creat_diff.sh │ │ ├── train_all.txt │ │ ├── train_aug.txt │ │ ├── val.txt │ │ └── val_id.txt │ ├── log │ │ └── README.md │ ├── mat_image.py │ ├── mat_label.py │ ├── model │ │ └── deeplab_largeFOV │ │ │ └── model_download.txt │ ├── res │ │ └── README.md │ ├── run_densecrf.sh │ ├── run_densecrf_grid_search.sh │ ├── run_pascal.sh │ └── sub.sed ├── pascal-context │ ├── .gitignore │ ├── README.md │ ├── config │ │ └── deeplab_largeFOV │ │ │ ├── solver.prototxt │ │ │ ├── solver2.prototxt │ │ │ ├── test.prototxt │ │ │ └── train.prototxt │ ├── convert_rawmat_labels.py │ ├── create_labels_249.py │ ├── features │ │ └── deeplab_largeFOV │ │ │ └── val │ │ │ └── fc8 │ │ │ └── labels │ │ │ └── README.md │ ├── features2 │ │ └── README.md │ ├── list │ │ ├── train_all.txt │ │ ├── train_aug.txt │ │ ├── val.txt │ │ └── val_id.txt │ ├── model │ │ └── deeplab_largeFOV │ │ │ └── model_download.txt │ ├── res │ │ └── README.md │ ├── run_densecrf.sh │ ├── run_densecrf_grid_search.sh │ ├── run_pascal.sh │ └── sub.sed ├── utils │ ├── __init__.py │ ├── __init__.pyc │ ├── convert_labels.py │ ├── image_reader.py │ ├── image_reader.pyc │ ├── label_utils.py │ ├── label_utils.pyc │ ├── mat2png.py │ ├── plot_training_curve.py │ ├── write_to_log.py │ └── write_to_log.pyc └── voc2012 │ ├── .gitignore │ ├── README.md │ ├── config │ └── deeplab_largeFOV │ │ ├── solver.prototxt │ │ ├── solver2.prototxt │ │ ├── test.prototxt │ │ └── train.prototxt │ ├── convert_labels.py │ ├── create_labels_21.py │ ├── features │ └── deeplab_largeFOV │ │ └── val │ │ └── fc8 │ │ └── labels │ │ └── README.md │ ├── features2 │ └── README.md │ ├── jpg2ppm.sh │ ├── list │ ├── test.txt │ ├── test_id.txt │ ├── train.txt │ ├── train_aug.txt │ ├── trainval.txt │ ├── trainval_aug.txt │ ├── val.txt │ └── val_id.txt │ ├── log │ └── README.md │ ├── mat2png.py │ ├── model │ └── deeplab_largeFOV │ │ └── model_download.txt │ ├── res │ └── README.md │ ├── run_densecrf.sh │ ├── run_densecrf_grid_search.sh │ ├── run_pascal.sh │ ├── sub.sed │ └── utils.py ├── README.md ├── U-net ├── BSDDataLoader.py ├── README.md ├── train.py └── unet.py └── tensorflow_codes ├── .gitignore ├── README.md ├── inference.py ├── input ├── ade20k.jpg ├── indoor_1.jpg ├── indoor_2.jpg ├── outdoor_1.png ├── pascal_voc.jpg └── test_pycaffe.jpg ├── model.py ├── model └── cityscapes │ ├── enet.ckpt.data-00000-of-00001 │ ├── enet.ckpt.index │ └── enet.ckpt.meta ├── model_train_op.py ├── network.py ├── output ├── enet_outdoor_1.png ├── fcn_indoor_1.jpg ├── fcn_indoor_2.jpg ├── icnet_outdoor_1.png ├── pspnet_indoor_1.jpg └── pspnet_indoor_2.jpg ├── tools.py ├── train.py └── utils └── color150.mat /DeepVO/ConvLSTM_Cell.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/DeepVO/ConvLSTM_Cell.py -------------------------------------------------------------------------------- /DeepVO/Test_ConvLSTM_Cell.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/DeepVO/Test_ConvLSTM_Cell.py -------------------------------------------------------------------------------- /DeepVO/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/DeepVO/train.py -------------------------------------------------------------------------------- /Deeplab-v2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/README.md -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/generate_dataset_txt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/generate_dataset_txt.py -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/test.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/test.txt -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/train.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/train.txt -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/train_coarse.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/train_coarse.txt -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/train_extra.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/train_extra.txt -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/train_fine.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/train_fine.txt -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/val.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/val.txt -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/val_coarse.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/val_coarse.txt -------------------------------------------------------------------------------- /Deeplab-v2/cityscapes/val_fine.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/cityscapes/val_fine.txt -------------------------------------------------------------------------------- /Deeplab-v2/dataset_voc2012/test.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/dataset_voc2012/test.txt -------------------------------------------------------------------------------- /Deeplab-v2/dataset_voc2012/train.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/dataset_voc2012/train.txt -------------------------------------------------------------------------------- /Deeplab-v2/dataset_voc2012/val.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/dataset_voc2012/val.txt -------------------------------------------------------------------------------- /Deeplab-v2/evaluate.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/evaluate.py -------------------------------------------------------------------------------- /Deeplab-v2/inference.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/inference.py -------------------------------------------------------------------------------- /Deeplab-v2/log.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/log.txt -------------------------------------------------------------------------------- /Deeplab-v2/main.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/main.py -------------------------------------------------------------------------------- /Deeplab-v2/main_msc.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/main_msc.py -------------------------------------------------------------------------------- /Deeplab-v2/mode/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/mode/__init__.py -------------------------------------------------------------------------------- /Deeplab-v2/mode/model.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/mode/model.py -------------------------------------------------------------------------------- /Deeplab-v2/mode/model_msc.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/mode/model_msc.py -------------------------------------------------------------------------------- /Deeplab-v2/mode/network.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/mode/network.py -------------------------------------------------------------------------------- /Deeplab-v2/my-scripts/Convert_data.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/my-scripts/Convert_data.ipynb -------------------------------------------------------------------------------- /Deeplab-v2/my-scripts/colours.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/my-scripts/colours.ipynb -------------------------------------------------------------------------------- /Deeplab-v2/my-scripts/colours.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/my-scripts/colours.txt -------------------------------------------------------------------------------- /Deeplab-v2/my-scripts/creat_image_txt.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/my-scripts/creat_image_txt.ipynb -------------------------------------------------------------------------------- /Deeplab-v2/my-scripts/createTfRecord_pascal_SBD.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/my-scripts/createTfRecord_pascal_SBD.py -------------------------------------------------------------------------------- /Deeplab-v2/my-scripts/rename_mat_file.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/my-scripts/rename_mat_file.ipynb -------------------------------------------------------------------------------- /Deeplab-v2/my-scripts/save_colour.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/my-scripts/save_colour.txt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/.gitignore -------------------------------------------------------------------------------- /Deeplab-v2/nyu/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Deeplab-v2/nyu/config/deeplab_largeFOV/solver.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/config/deeplab_largeFOV/solver.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/config/deeplab_largeFOV/solver2.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/config/deeplab_largeFOV/solver2.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/config/deeplab_largeFOV/test.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/config/deeplab_largeFOV/test.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/config/deeplab_largeFOV/train.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/config/deeplab_largeFOV/train.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/create_labels_249.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/create_labels_249.py -------------------------------------------------------------------------------- /Deeplab-v2/nyu/features/deeplab_largeFOV/val/fc8/labels/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/features/deeplab_largeFOV/val/fc8/labels/README.md -------------------------------------------------------------------------------- /Deeplab-v2/nyu/features2/README.md: -------------------------------------------------------------------------------- 1 | Just a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/nyu/list/creat_diff.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/list/creat_diff.sh -------------------------------------------------------------------------------- /Deeplab-v2/nyu/list/train_all.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/list/train_all.txt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/list/train_aug.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/list/train_aug.txt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/list/val.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/list/val.txt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/list/val_id.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/list/val_id.txt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/log/README.md: -------------------------------------------------------------------------------- 1 | Juat a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/nyu/mat_image.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/mat_image.py -------------------------------------------------------------------------------- /Deeplab-v2/nyu/mat_label.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/mat_label.py -------------------------------------------------------------------------------- /Deeplab-v2/nyu/model/deeplab_largeFOV/model_download.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/model/deeplab_largeFOV/model_download.txt -------------------------------------------------------------------------------- /Deeplab-v2/nyu/res/README.md: -------------------------------------------------------------------------------- 1 | Just a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/nyu/run_densecrf.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/run_densecrf.sh -------------------------------------------------------------------------------- /Deeplab-v2/nyu/run_densecrf_grid_search.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/run_densecrf_grid_search.sh -------------------------------------------------------------------------------- /Deeplab-v2/nyu/run_pascal.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/run_pascal.sh -------------------------------------------------------------------------------- /Deeplab-v2/nyu/sub.sed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/nyu/sub.sed -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/.gitignore -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/config/deeplab_largeFOV/solver.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/config/deeplab_largeFOV/solver.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/config/deeplab_largeFOV/solver2.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/config/deeplab_largeFOV/solver2.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/config/deeplab_largeFOV/test.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/config/deeplab_largeFOV/test.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/config/deeplab_largeFOV/train.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/config/deeplab_largeFOV/train.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/convert_rawmat_labels.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/convert_rawmat_labels.py -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/create_labels_249.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/create_labels_249.py -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/features/deeplab_largeFOV/val/fc8/labels/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/features/deeplab_largeFOV/val/fc8/labels/README.md -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/features2/README.md: -------------------------------------------------------------------------------- 1 | Just a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/list/train_all.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/list/train_all.txt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/list/train_aug.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/list/train_aug.txt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/list/val.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/list/val.txt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/list/val_id.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/list/val_id.txt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/model/deeplab_largeFOV/model_download.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/model/deeplab_largeFOV/model_download.txt -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/res/README.md: -------------------------------------------------------------------------------- 1 | Just a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/run_densecrf.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/run_densecrf.sh -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/run_densecrf_grid_search.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/run_densecrf_grid_search.sh -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/run_pascal.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/run_pascal.sh -------------------------------------------------------------------------------- /Deeplab-v2/pascal-context/sub.sed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/pascal-context/sub.sed -------------------------------------------------------------------------------- /Deeplab-v2/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/__init__.py -------------------------------------------------------------------------------- /Deeplab-v2/utils/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/__init__.pyc -------------------------------------------------------------------------------- /Deeplab-v2/utils/convert_labels.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/convert_labels.py -------------------------------------------------------------------------------- /Deeplab-v2/utils/image_reader.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/image_reader.py -------------------------------------------------------------------------------- /Deeplab-v2/utils/image_reader.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/image_reader.pyc -------------------------------------------------------------------------------- /Deeplab-v2/utils/label_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/label_utils.py -------------------------------------------------------------------------------- /Deeplab-v2/utils/label_utils.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/label_utils.pyc -------------------------------------------------------------------------------- /Deeplab-v2/utils/mat2png.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/mat2png.py -------------------------------------------------------------------------------- /Deeplab-v2/utils/plot_training_curve.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/plot_training_curve.py -------------------------------------------------------------------------------- /Deeplab-v2/utils/write_to_log.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/write_to_log.py -------------------------------------------------------------------------------- /Deeplab-v2/utils/write_to_log.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/utils/write_to_log.pyc -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/.gitignore -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/config/deeplab_largeFOV/solver.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/config/deeplab_largeFOV/solver.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/config/deeplab_largeFOV/solver2.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/config/deeplab_largeFOV/solver2.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/config/deeplab_largeFOV/test.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/config/deeplab_largeFOV/test.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/config/deeplab_largeFOV/train.prototxt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/config/deeplab_largeFOV/train.prototxt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/convert_labels.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/convert_labels.py -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/create_labels_21.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/create_labels_21.py -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/features/deeplab_largeFOV/val/fc8/labels/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/features/deeplab_largeFOV/val/fc8/labels/README.md -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/features2/README.md: -------------------------------------------------------------------------------- 1 | Just a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/jpg2ppm.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/jpg2ppm.sh -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/test.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/test.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/test_id.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/test_id.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/train.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/train.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/train_aug.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/train_aug.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/trainval.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/trainval.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/trainval_aug.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/trainval_aug.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/val.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/val.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/list/val_id.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/list/val_id.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/log/README.md: -------------------------------------------------------------------------------- 1 | Juat a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/mat2png.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/mat2png.py -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/model/deeplab_largeFOV/model_download.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/model/deeplab_largeFOV/model_download.txt -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/res/README.md: -------------------------------------------------------------------------------- 1 | Just a file folder 2 | -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/run_densecrf.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/run_densecrf.sh -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/run_densecrf_grid_search.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/run_densecrf_grid_search.sh -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/run_pascal.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/run_pascal.sh -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/sub.sed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/sub.sed -------------------------------------------------------------------------------- /Deeplab-v2/voc2012/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/Deeplab-v2/voc2012/utils.py -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/README.md -------------------------------------------------------------------------------- /U-net/BSDDataLoader.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/U-net/BSDDataLoader.py -------------------------------------------------------------------------------- /U-net/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/U-net/README.md -------------------------------------------------------------------------------- /U-net/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/U-net/train.py -------------------------------------------------------------------------------- /U-net/unet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/U-net/unet.py -------------------------------------------------------------------------------- /tensorflow_codes/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/.gitignore -------------------------------------------------------------------------------- /tensorflow_codes/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/README.md -------------------------------------------------------------------------------- /tensorflow_codes/inference.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/inference.py -------------------------------------------------------------------------------- /tensorflow_codes/input/ade20k.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/input/ade20k.jpg -------------------------------------------------------------------------------- /tensorflow_codes/input/indoor_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/input/indoor_1.jpg -------------------------------------------------------------------------------- /tensorflow_codes/input/indoor_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/input/indoor_2.jpg -------------------------------------------------------------------------------- /tensorflow_codes/input/outdoor_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/input/outdoor_1.png -------------------------------------------------------------------------------- /tensorflow_codes/input/pascal_voc.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/input/pascal_voc.jpg -------------------------------------------------------------------------------- /tensorflow_codes/input/test_pycaffe.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/input/test_pycaffe.jpg -------------------------------------------------------------------------------- /tensorflow_codes/model.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/model.py -------------------------------------------------------------------------------- /tensorflow_codes/model/cityscapes/enet.ckpt.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/model/cityscapes/enet.ckpt.data-00000-of-00001 -------------------------------------------------------------------------------- /tensorflow_codes/model/cityscapes/enet.ckpt.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/model/cityscapes/enet.ckpt.index -------------------------------------------------------------------------------- /tensorflow_codes/model/cityscapes/enet.ckpt.meta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/model/cityscapes/enet.ckpt.meta -------------------------------------------------------------------------------- /tensorflow_codes/model_train_op.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/model_train_op.py -------------------------------------------------------------------------------- /tensorflow_codes/network.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/network.py -------------------------------------------------------------------------------- /tensorflow_codes/output/enet_outdoor_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/output/enet_outdoor_1.png -------------------------------------------------------------------------------- /tensorflow_codes/output/fcn_indoor_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/output/fcn_indoor_1.jpg -------------------------------------------------------------------------------- /tensorflow_codes/output/fcn_indoor_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/output/fcn_indoor_2.jpg -------------------------------------------------------------------------------- /tensorflow_codes/output/icnet_outdoor_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/output/icnet_outdoor_1.png -------------------------------------------------------------------------------- /tensorflow_codes/output/pspnet_indoor_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/output/pspnet_indoor_1.jpg -------------------------------------------------------------------------------- /tensorflow_codes/output/pspnet_indoor_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/output/pspnet_indoor_2.jpg -------------------------------------------------------------------------------- /tensorflow_codes/tools.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/tools.py -------------------------------------------------------------------------------- /tensorflow_codes/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/train.py -------------------------------------------------------------------------------- /tensorflow_codes/utils/color150.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tangzhenyu/SemanticSegmentation_DL/HEAD/tensorflow_codes/utils/color150.mat --------------------------------------------------------------------------------