├── architecture.png ├── spectral ├── README.md ├── extract_utils.py └── extract.py ├── common ├── utils.py ├── my_optim.py ├── evaluation.py ├── vis.py └── logger.py ├── model ├── base │ ├── correlation.py │ ├── feature.py │ └── conv4d.py ├── learner.py └── sccnet.py ├── data ├── dataset.py ├── dlrsd.py ├── isaid.py ├── fss.py ├── coco.py ├── pascal.py └── splits │ ├── pascal │ └── val │ │ ├── fold0.txt │ │ ├── fold3.txt │ │ ├── fold1.txt │ │ └── fold2.txt │ └── DLRSD │ ├── trn │ └── fold1.txt │ └── val │ └── fold1.txt ├── README.md ├── .gitignore ├── train.py └── test.py /architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/linhanwang/SCCNet/HEAD/architecture.png -------------------------------------------------------------------------------- /spectral/README.md: -------------------------------------------------------------------------------- 1 | # This document shows how to calculate eigen vectors 2 | 3 | 1. Extract features 4 | ------------ 5 | ``` 6 | python extract.py extract_features --images_list "${DataRoot}/images.txt" --images_root "${DataRoot}/images" --output_dir "${DataRoot}/features/resnet50" --model_name resnet50 --batch_size 1 7 | ``` 8 | 9 | 2. Calculate eigen vectors 10 | ------------ 11 | ``` 12 | python extract.py extract_eigs --images_root "${DataRoot}/images" --features_dir "${DataRoot}/features/resnet101" --which_matrix "laplacian" --output_dir "${DataRoot}/eigs/resnet101/laplacian_top5_c5_l26" --K 5 --image_downsample_factor 4 --image_color_lambda 5 13 | ``` 14 | -------------------------------------------------------------------------------- /common/utils.py: -------------------------------------------------------------------------------- 1 | r""" Helper functions """ 2 | import random 3 | 4 | import torch 5 | import numpy as np 6 | 7 | 8 | def fix_randseed(seed): 9 | r""" Set random seeds for reproducibility """ 10 | if seed is None: 11 | seed = int(random.random() * 1e5) 12 | np.random.seed(seed) 13 | torch.manual_seed(seed) 14 | torch.cuda.manual_seed(seed) 15 | torch.cuda.manual_seed_all(seed) 16 | torch.backends.cudnn.benchmark = False 17 | torch.backends.cudnn.deterministic = True 18 | 19 | 20 | def mean(x): 21 | return sum(x) / len(x) if len(x) > 0 else 0.0 22 | 23 | 24 | def to_cuda(batch): 25 | for key, value in batch.items(): 26 | if isinstance(value, torch.Tensor): 27 | batch[key] = value.cuda() 28 | return batch 29 | 30 | 31 | def to_cpu(tensor): 32 | return tensor.detach().clone().cpu() 33 | -------------------------------------------------------------------------------- /model/base/correlation.py: -------------------------------------------------------------------------------- 1 | r""" Provides functions that builds/manipulates correlation tensors """ 2 | import torch 3 | 4 | 5 | class Correlation: 6 | 7 | @classmethod 8 | def multilayer_correlation(cls, query_feats, support_feats, stack_ids): 9 | eps = 1e-5 10 | 11 | corrs = [] 12 | for idx, (query_feat, support_feat) in enumerate(zip(query_feats, support_feats)): 13 | bsz, ch, hb, wb = support_feat.size() 14 | support_feat = support_feat.view(bsz, ch, -1) 15 | support_feat = support_feat / (support_feat.norm(dim=1, p=2, keepdim=True) + eps) 16 | 17 | bsz, ch, ha, wa = query_feat.size() 18 | query_feat = query_feat.view(bsz, ch, -1) 19 | query_feat = query_feat / (query_feat.norm(dim=1, p=2, keepdim=True) + eps) 20 | 21 | corr = torch.bmm(query_feat.transpose(1, 2), support_feat).view(bsz, ha, wa, hb, wb) 22 | corr = corr.clamp(min=0) 23 | corrs.append(corr) 24 | 25 | corr_l4 = torch.stack(corrs[-stack_ids[0]:]).transpose(0, 1).contiguous() 26 | corr_l3 = torch.stack(corrs[-stack_ids[1]:-stack_ids[0]]).transpose(0, 1).contiguous() 27 | corr_l2 = torch.stack(corrs[-stack_ids[2]:-stack_ids[1]]).transpose(0, 1).contiguous() 28 | 29 | return [corr_l4, corr_l3, corr_l2] 30 | -------------------------------------------------------------------------------- /common/my_optim.py: -------------------------------------------------------------------------------- 1 | import torch.optim as optim 2 | 3 | def get_finetune_optimizer(args, model): 4 | lr = args.lr 5 | weight_list = [] 6 | bias_list = [] 7 | pretrain_weight_list = [] 8 | pretrain_bias_list =[] 9 | for name,value in model.named_parameters(): 10 | if 'model_res' in name or 'model_backbone' in name: 11 | if 'weight' in name: 12 | pretrain_weight_list.append(value) 13 | elif 'bias' in name: 14 | pretrain_bias_list.append(value) 15 | else: 16 | if 'weight' in name: 17 | weight_list.append(value) 18 | elif 'bias' in name: 19 | bias_list.append(value) 20 | 21 | opt = optim.SGD([{'params': pretrain_weight_list, 'lr':lr}, 22 | {'params': pretrain_bias_list, 'lr':lr*2}, 23 | {'params': weight_list, 'lr':lr*10}, 24 | {'params': bias_list, 'lr':lr*20}], momentum=0.90, weight_decay=0.0005) # momentum = 0.99 25 | # opt = optim.Adam([{'params': pretrain_weight_list, 'lr':lr}, 26 | # {'params': pretrain_bias_list, 'lr':lr*2}, 27 | # {'params': weight_list, 'lr':lr*10}, 28 | # {'params': bias_list, 'lr':lr*20}], weight_decay=0.0005) 29 | return opt 30 | 31 | def adjust_learning_rate_poly(args, optimizer, iter): 32 | base_lr = args.lr 33 | max_iter = args.max_steps 34 | reduce = ((1-float(iter)/max_iter)**(args.power)) 35 | lr = base_lr * reduce 36 | optimizer.param_groups[0]['lr'] = lr * 1 37 | optimizer.param_groups[1]['lr'] = lr * 2 38 | optimizer.param_groups[2]['lr'] = lr * 10 39 | optimizer.param_groups[3]['lr'] = lr * 20 40 | -------------------------------------------------------------------------------- /data/dataset.py: -------------------------------------------------------------------------------- 1 | r""" Dataloader builder for few-shot semantic segmentation dataset """ 2 | from torchvision import transforms 3 | from torch.utils.data import DataLoader 4 | 5 | from data.pascal import DatasetPASCAL 6 | from data.coco import DatasetCOCO 7 | from data.fss import DatasetFSS 8 | from data.isaid import DatasetISAID 9 | from data.dlrsd import DatasetDLRSD 10 | 11 | 12 | class FSSDataset: 13 | 14 | @classmethod 15 | def initialize(cls, img_size, datapath, use_original_imgsize): 16 | 17 | cls.datasets = { 18 | 'pascal': DatasetPASCAL, 19 | 'coco': DatasetCOCO, 20 | 'fss': DatasetFSS, 21 | 'isaid': DatasetISAID, 22 | 'dlrsd': DatasetDLRSD, 23 | } 24 | 25 | cls.img_mean = [0.485, 0.456, 0.406] 26 | cls.img_std = [0.229, 0.224, 0.225] 27 | cls.datapath = datapath 28 | cls.use_original_imgsize = use_original_imgsize 29 | 30 | cls.transform = transforms.Compose([transforms.Resize(size=(img_size, img_size)), 31 | transforms.ToTensor(), 32 | transforms.Normalize(cls.img_mean, cls.img_std)]) 33 | 34 | @classmethod 35 | def build_dataloader(cls, benchmark, bsz, nworker, fold, split, aug=False, shot=1): 36 | # Force randomness during training for diverse episode combinations 37 | # Freeze randomness during testing for reproducibility 38 | shuffle = split == 'trn' 39 | nworker = nworker if split == 'trn' else 0 40 | 41 | dataset = cls.datasets[benchmark](cls.datapath, fold=fold, transform=cls.transform, split=split, shot=shot, use_original_imgsize=cls.use_original_imgsize, aug=aug) 42 | dataloader = DataLoader(dataset, batch_size=bsz, shuffle=shuffle, num_workers=nworker) 43 | 44 | return dataloader 45 | -------------------------------------------------------------------------------- /common/evaluation.py: -------------------------------------------------------------------------------- 1 | r""" Evaluate mask prediction """ 2 | import torch 3 | 4 | 5 | class Evaluator: 6 | r""" Computes intersection and union between prediction and ground-truth """ 7 | @classmethod 8 | def initialize(cls): 9 | cls.ignore_index = 255 10 | 11 | @classmethod 12 | def classify_prediction(cls, pred_mask, batch): 13 | gt_mask = batch.get('query_mask') 14 | 15 | # Apply ignore_index in PASCAL-5i masks (following evaluation scheme in PFE-Net (TPAMI 2020)) 16 | query_ignore_idx = batch.get('query_ignore_idx') 17 | if query_ignore_idx is not None: 18 | assert torch.logical_and(query_ignore_idx, gt_mask).sum() == 0 19 | query_ignore_idx *= cls.ignore_index 20 | gt_mask = gt_mask + query_ignore_idx 21 | pred_mask[gt_mask == cls.ignore_index] = cls.ignore_index 22 | 23 | # compute intersection and union of each episode in a batch 24 | area_inter, area_pred, area_gt = [], [], [] 25 | for _pred_mask, _gt_mask in zip(pred_mask, gt_mask): 26 | _inter = _pred_mask[_pred_mask == _gt_mask] 27 | if _inter.size(0) == 0: # as torch.histc returns error if it gets empty tensor (pytorch 1.5.1) 28 | _area_inter = torch.tensor([0, 0], device=_pred_mask.device) 29 | else: 30 | _area_inter = torch.histc(_inter, bins=2, min=0, max=1) 31 | area_inter.append(_area_inter) 32 | area_pred.append(torch.histc(_pred_mask, bins=2, min=0, max=1)) 33 | area_gt.append(torch.histc(_gt_mask, bins=2, min=0, max=1)) 34 | area_inter = torch.stack(area_inter).t() 35 | area_pred = torch.stack(area_pred).t() 36 | area_gt = torch.stack(area_gt).t() 37 | area_union = area_pred + area_gt - area_inter 38 | 39 | return area_inter, area_union 40 | -------------------------------------------------------------------------------- /model/base/feature.py: -------------------------------------------------------------------------------- 1 | r""" Extracts intermediate features from given backbone network & layer ids """ 2 | 3 | 4 | def extract_feat_vgg(img, backbone, feat_ids, bottleneck_ids=None, lids=None): 5 | r""" Extract intermediate features from VGG """ 6 | feats = [] 7 | feat = img 8 | for lid, module in enumerate(backbone.features): 9 | feat = module(feat) 10 | if lid in feat_ids: 11 | feats.append(feat.clone()) 12 | return feats 13 | 14 | 15 | def extract_feat_res(img, backbone, feat_ids, bottleneck_ids, lids): 16 | r""" Extract intermediate features from ResNet""" 17 | feats = [] 18 | 19 | # Layer 0 20 | feat = backbone.conv1.forward(img) 21 | feat = backbone.bn1.forward(feat) 22 | feat = backbone.relu.forward(feat) 23 | feat = backbone.maxpool.forward(feat) 24 | 25 | # Layer 1-4 26 | for hid, (bid, lid) in enumerate(zip(bottleneck_ids, lids)): 27 | res = feat 28 | feat = backbone.__getattr__('layer%d' % lid)[bid].conv1.forward(feat) 29 | feat = backbone.__getattr__('layer%d' % lid)[bid].bn1.forward(feat) 30 | feat = backbone.__getattr__('layer%d' % lid)[bid].relu.forward(feat) 31 | feat = backbone.__getattr__('layer%d' % lid)[bid].conv2.forward(feat) 32 | feat = backbone.__getattr__('layer%d' % lid)[bid].bn2.forward(feat) 33 | feat = backbone.__getattr__('layer%d' % lid)[bid].relu.forward(feat) 34 | feat = backbone.__getattr__('layer%d' % lid)[bid].conv3.forward(feat) 35 | feat = backbone.__getattr__('layer%d' % lid)[bid].bn3.forward(feat) 36 | 37 | if bid == 0: 38 | res = backbone.__getattr__('layer%d' % lid)[bid].downsample.forward(res) 39 | 40 | feat += res 41 | 42 | if hid + 1 in feat_ids: 43 | feats.append(feat.clone()) 44 | 45 | feat = backbone.__getattr__('layer%d' % lid)[bid].relu.forward(feat) 46 | 47 | return feats -------------------------------------------------------------------------------- /model/base/conv4d.py: -------------------------------------------------------------------------------- 1 | r""" Implementation of center-pivot 4D convolution """ 2 | 3 | import torch 4 | import torch.nn as nn 5 | 6 | 7 | class CenterPivotConv4d(nn.Module): 8 | r""" CenterPivot 4D conv""" 9 | def __init__(self, in_channels, out_channels, kernel_size, stride, padding, bias=True): 10 | super(CenterPivotConv4d, self).__init__() 11 | 12 | self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size[:2], stride=stride[:2], 13 | bias=bias, padding=padding[:2]) 14 | self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size[2:], stride=stride[2:], 15 | bias=bias, padding=padding[2:]) 16 | 17 | self.stride34 = stride[2:] 18 | self.kernel_size = kernel_size 19 | self.stride = stride 20 | self.padding = padding 21 | self.idx_initialized = False 22 | 23 | def prune(self, ct): 24 | bsz, ch, ha, wa, hb, wb = ct.size() 25 | if not self.idx_initialized: 26 | idxh = torch.arange(start=0, end=hb, step=self.stride[2:][0], device=ct.device) 27 | idxw = torch.arange(start=0, end=wb, step=self.stride[2:][1], device=ct.device) 28 | self.len_h = len(idxh) 29 | self.len_w = len(idxw) 30 | self.idx = (idxw.repeat(self.len_h, 1) + idxh.repeat(self.len_w, 1).t() * wb).view(-1) 31 | self.idx_initialized = True 32 | ct_pruned = ct.view(bsz, ch, ha, wa, -1).index_select(4, self.idx).view(bsz, ch, ha, wa, self.len_h, self.len_w) 33 | 34 | return ct_pruned 35 | 36 | def forward(self, x): 37 | if self.stride[2:][-1] > 1: 38 | out1 = self.prune(x) 39 | else: 40 | out1 = x 41 | bsz, inch, ha, wa, hb, wb = out1.size() 42 | out1 = out1.permute(0, 4, 5, 1, 2, 3).contiguous().view(-1, inch, ha, wa) 43 | out1 = self.conv1(out1) 44 | outch, o_ha, o_wa = out1.size(-3), out1.size(-2), out1.size(-1) 45 | out1 = out1.view(bsz, hb, wb, outch, o_ha, o_wa).permute(0, 3, 4, 5, 1, 2).contiguous() 46 | 47 | bsz, inch, ha, wa, hb, wb = x.size() 48 | out2 = x.permute(0, 2, 3, 1, 4, 5).contiguous().view(-1, inch, hb, wb) 49 | out2 = self.conv2(out2) 50 | outch, o_hb, o_wb = out2.size(-3), out2.size(-2), out2.size(-1) 51 | out2 = out2.view(bsz, ha, wa, outch, o_hb, o_wb).permute(0, 3, 1, 2, 4, 5).contiguous() 52 | 53 | if out1.size()[-2:] != out2.size()[-2:] and self.padding[-2:] == (0, 0): 54 | out1 = out1.view(bsz, outch, o_ha, o_wa, -1).sum(dim=-1) 55 | out2 = out2.squeeze() 56 | 57 | y = out1 + out2 58 | return y 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Self-Correlation and Cross-Correlation Learning for Few-Shot Remote Sensing Image Semantic Segmentation 2 | 3 | 4 | Introduction 5 | ------------ 6 | This is the source code for our paper [Self-Correlation and Cross-Correlation Learning for Few-Shot Remote Sensing Image Semantic Segmentation](http://arxiv.org/abs/2309.05840), which is accepted to Sigspatial 2023. 7 | 8 | Network Architecture 9 | ------------ 10 | ![network](architecture.png) 11 | 12 | ### Installation 13 | * Install PyTorch 2.0.1 and other dependenies 14 | * Clone this repo 15 | 16 | ``` 17 | git clone https://github.com/linhanwang/SCCNet.git 18 | ``` 19 | 20 | ### Data Preparation 21 | 22 | Download remote_sensing.tar.gz from [huggingface](https://huggingface.co/datasets/LinhanWang/SCCNet/tree/main), unzip and put it under your directory 'SCCNet'. 23 | 24 | 25 | ### Train 26 | 27 | ``` 28 | python train.py --max_steps 200000 --freeze True --datapath './remote_sensing/iSAID_patches' --img_size 256 --backbone resnet50 --fold 0 --benchmark isaid --lr 9e-4 --bsz 32 --logpath exp_name 29 | ``` 30 | 31 | The log and checkpoints are stored under directory 'logs'. 32 | 33 | ### Test 34 | 35 | ``` 36 | python test.py --datapath './remote_sensing/iSAID_patches' --img_size 256 --backbone resnet50 --fold 0 --benchmark isaid --bsz 64 --nshot 1 --load './logs/exp_name/best_model.pt' 37 | ``` 38 | 39 | ~~We provide a pretrained-model for the above setting. You can download it from [here](https://drive.google.com/drive/folders/1IU3m_0qTgIzmz6mc_0J1b-dMGBZMnelg?usp=drive_link).~~ Apologies. The pretrained model was lost during the data migration. 40 | 41 | The fusion process is implemented in test.py, you can turn it on in test.sh. You can turn it on by setting fuse=True. The option eigen_path is used to indicate the directory of eigen vectors. The process to generate eigen vectors is described in [spectral](spectral/README.md). 42 | 43 | ### Citation 44 | 45 | If you find SCCNet useful in your research or applications, please cite using this BibTeX: 46 | 47 | ``` 48 | @inproceedings{wang2023self, 49 | title={Self-Correlation and Cross-Correlation Learning for Few-Shot Remote Sensing Image Semantic Segmentation}, 50 | author={Wang, Linhan and Lei, Shuo and He, Jianfeng and Wang, Shengkun and Zhang, Min and Lu, Chang-Tien}, 51 | booktitle={Proceedings of the 31st ACM International Conference on Advances in Geographic Information Systems}, 52 | pages={1--10}, 53 | year={2023} 54 | } 55 | ``` 56 | 57 | ### Acknowledgements 58 | 59 | We borrow code from public projects [SDM](https://github.com/caoql98/SDM), [HSNet](https://github.com/juhongm999/hsnet), [dss](https://github.com/lukemelas/deep-spectral-segmentation). 60 | -------------------------------------------------------------------------------- /data/dlrsd.py: -------------------------------------------------------------------------------- 1 | r""" iSAID-5i few-shot semantic segmentation dataset """ 2 | import os 3 | from typing_extensions import override 4 | 5 | import torch 6 | import PIL.Image as Image 7 | import numpy as np 8 | import torchvision.transforms.v2 as transforms2 9 | from .pascal import DatasetPASCAL 10 | 11 | 12 | class DatasetDLRSD(DatasetPASCAL): 13 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize, aug) -> None: 14 | self.split = 'val' if split in ['val', 'test'] else 'trn' 15 | self.fold = fold 16 | self.nfolds = 3 17 | self.nclass = 15 18 | self.benchmark = 'dlrsd' 19 | self.shot = shot 20 | self.use_original_imgsize = use_original_imgsize 21 | 22 | self.img_path = os.path.join(datapath, 'UCMerced_LandUse/Images') 23 | self.ann_path = os.path.join(datapath, 'DLRSD/Images') 24 | 25 | self.aug = aug and (self.split == 'trn') 26 | if self.aug: 27 | self.tv2 = transforms2.Compose([ 28 | transforms2.RandomHorizontalFlip(), 29 | transforms2.RandomRotation(30), 30 | # transforms2.RandomResizedCrop(size=256, scale=(0.5, 1.0)) 31 | ]) 32 | 33 | self.transform = transform 34 | 35 | self.class_ids = self.build_class_ids() 36 | self.img_metadata = self.build_img_metadata() 37 | self.img_metadata_classwise = self.build_img_metadata_classwise() 38 | 39 | @override 40 | def __len__(self): 41 | return len(self.img_metadata) # TODO: why hsnet use 100 for val 42 | 43 | @override 44 | def read_mask(self, img_name): 45 | r"""Return segmentation mask in PIL Image""" 46 | # mask = torch.tensor(np.array(Image.open(os.path.join(self.ann_path, img_name) + '_instance_color_RGB.png'))) 47 | mask = Image.open(os.path.join(self.ann_path, img_name[:-2], img_name + '.png')) 48 | return mask 49 | 50 | @override 51 | def read_img(self, img_name): 52 | r"""Return RGB image in PIL Image""" 53 | return Image.open(os.path.join(self.img_path, img_name[:-2], img_name) + '.tif') 54 | 55 | @override 56 | def build_img_metadata(self): 57 | 58 | def read_metadata(split, fold_id): 59 | fold_n_metadata = os.path.join('data/splits/DLRSD/%s/fold%d.txt' % (split, fold_id)) 60 | with open(fold_n_metadata, 'r') as f: 61 | fold_n_metadata = f.read().split('\n')[:-1] 62 | fold_n_metadata = [[data.split('__')[0], int(data.split('__')[1]) - 1] for data in fold_n_metadata] 63 | return fold_n_metadata 64 | 65 | img_metadata = [] 66 | if self.split == 'trn': # For training, read image-metadata of "the other" folds 67 | for fold_id in range(self.nfolds): 68 | if fold_id == self.fold: # Skip validation fold 69 | continue 70 | img_metadata += read_metadata(self.split, fold_id) 71 | elif self.split == 'val': # For validation, read image-metadata of "current" fold 72 | img_metadata = read_metadata(self.split, self.fold) 73 | else: 74 | raise Exception('Undefined split %s: ' % self.split) 75 | 76 | print('Total (%s) images are : %d' % (self.split, len(img_metadata))) 77 | 78 | return img_metadata 79 | -------------------------------------------------------------------------------- /data/isaid.py: -------------------------------------------------------------------------------- 1 | r""" iSAID-5i few-shot semantic segmentation dataset """ 2 | import os 3 | from typing_extensions import override 4 | 5 | import torch 6 | import PIL.Image as Image 7 | import numpy as np 8 | import torchvision.transforms.v2 as transforms2 9 | from .pascal import DatasetPASCAL 10 | 11 | 12 | class DatasetISAID(DatasetPASCAL): 13 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize, aug) -> None: 14 | self.split = 'val' if split in ['val', 'test'] else 'trn' 15 | self.fold = fold 16 | self.nfolds = 3 17 | self.nclass = 15 18 | self.benchmark = 'isaid' 19 | self.shot = shot 20 | self.use_original_imgsize = use_original_imgsize 21 | 22 | if self.split == 'trn': 23 | self.img_path = os.path.join(datapath, 'train/images') 24 | self.ann_path = os.path.join(datapath, 'train/semantic_png') 25 | else: 26 | self.img_path = os.path.join(datapath, 'val/images') 27 | self.ann_path = os.path.join(datapath, 'val/semantic_png') 28 | 29 | self.aug = aug and (self.split == 'trn') 30 | if self.aug: 31 | self.tv2 = transforms2.Compose([ 32 | transforms2.RandomHorizontalFlip(), 33 | transforms2.RandomRotation(30), 34 | # transforms2.RandomResizedCrop(size=256, scale=(0.5, 1.0)) 35 | ]) 36 | 37 | self.transform = transform 38 | 39 | self.class_ids = self.build_class_ids() 40 | self.img_metadata = self.build_img_metadata() 41 | self.img_metadata_classwise = self.build_img_metadata_classwise() 42 | 43 | @override 44 | def __len__(self): 45 | return len(self.img_metadata) # TODO: why hsnet use 100 for val 46 | 47 | @override 48 | def read_mask(self, img_name): 49 | r"""Return segmentation mask in PIL Image""" 50 | # mask = torch.tensor(np.array(Image.open(os.path.join(self.ann_path, img_name) + '_instance_color_RGB.png'))) 51 | mask = Image.open(os.path.join(self.ann_path, img_name) + '_instance_color_RGB.png') 52 | return mask 53 | 54 | @override 55 | def read_img(self, img_name): 56 | r"""Return RGB image in PIL Image""" 57 | return Image.open(os.path.join(self.img_path, img_name) + '.png') 58 | 59 | @override 60 | def build_img_metadata(self): 61 | 62 | def read_metadata(split, fold_id): 63 | fold_n_metadata = os.path.join('data/splits/isaid/%s/fold%d.txt' % (split, fold_id)) 64 | with open(fold_n_metadata, 'r') as f: 65 | fold_n_metadata = f.read().split('\n')[:-1] 66 | fold_n_metadata = [[data.split('__')[0], int(data.split('__')[1]) - 1] for data in fold_n_metadata] 67 | return fold_n_metadata 68 | 69 | img_metadata = [] 70 | if self.split == 'trn': # For training, read image-metadata of "the other" folds 71 | for fold_id in range(self.nfolds): 72 | if fold_id == self.fold: # Skip validation fold 73 | continue 74 | img_metadata += read_metadata(self.split, fold_id) 75 | elif self.split == 'val': # For validation, read image-metadata of "current" fold 76 | img_metadata = read_metadata(self.split, self.fold) 77 | else: 78 | raise Exception('Undefined split %s: ' % self.split) 79 | 80 | print('Total (%s) images are : %d' % (self.split, len(img_metadata))) 81 | 82 | return img_metadata 83 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /model/learner.py: -------------------------------------------------------------------------------- 1 | 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | from .base.conv4d import CenterPivotConv4d as Conv4d 6 | 7 | 8 | class HPNLearner(nn.Module): 9 | def __init__(self, inch): 10 | super(HPNLearner, self).__init__() 11 | 12 | def make_building_block(in_channel, out_channels, kernel_sizes, spt_strides, group=4): 13 | assert len(out_channels) == len(kernel_sizes) == len(spt_strides) 14 | 15 | building_block_layers = [] 16 | for idx, (outch, ksz, stride) in enumerate(zip(out_channels, kernel_sizes, spt_strides)): 17 | inch = in_channel if idx == 0 else out_channels[idx - 1] 18 | ksz4d = (ksz,) * 4 19 | str4d = (1, 1) + (stride,) * 2 20 | pad4d = (ksz // 2,) * 4 21 | 22 | building_block_layers.append(Conv4d(inch, outch, ksz4d, str4d, pad4d)) 23 | building_block_layers.append(nn.GroupNorm(group, outch)) 24 | building_block_layers.append(nn.ReLU(inplace=True)) 25 | 26 | return nn.Sequential(*building_block_layers) 27 | 28 | outch1, outch2, outch3 = 16, 64, 128 29 | 30 | # Squeezing building blocks 31 | self.encoder_layer4 = make_building_block(inch[0], [outch1, outch2, outch3], [3, 3, 3], [2, 2, 2]) 32 | self.encoder_layer3 = make_building_block(inch[1], [outch1, outch2, outch3], [5, 3, 3], [4, 2, 2]) 33 | self.encoder_layer2 = make_building_block(inch[2], [outch1, outch2, outch3], [5, 5, 3], [4, 4, 2]) 34 | 35 | # Mixing building blocks 36 | self.encoder_layer4to3 = make_building_block(outch3, [outch3, outch3, outch3], [3, 3, 3], [1, 1, 1]) 37 | self.encoder_layer3to2 = make_building_block(outch3, [outch3, outch3, outch3], [3, 3, 3], [1, 1, 1]) 38 | 39 | # Decoder layers 40 | self.decoder1 = nn.Sequential(nn.Conv2d(outch3, outch3, (3, 3), padding=(1, 1), bias=True), 41 | nn.ReLU(), 42 | nn.Conv2d(outch3, outch2, (3, 3), padding=(1, 1), bias=True), 43 | nn.ReLU()) 44 | 45 | self.decoder2 = nn.Sequential(nn.Conv2d(outch2, outch2, (3, 3), padding=(1, 1), bias=True), 46 | nn.ReLU(), 47 | nn.Conv2d(outch2, 2, (3, 3), padding=(1, 1), bias=True)) 48 | 49 | def interpolate_support_dims(self, hypercorr, spatial_size=None): 50 | bsz, ch, ha, wa, hb, wb = hypercorr.size() 51 | hypercorr = hypercorr.permute(0, 4, 5, 1, 2, 3).contiguous().view(bsz * hb * wb, ch, ha, wa) 52 | hypercorr = F.interpolate(hypercorr, spatial_size, mode='bilinear', align_corners=True) 53 | o_hb, o_wb = spatial_size 54 | hypercorr = hypercorr.view(bsz, hb, wb, ch, o_hb, o_wb).permute(0, 3, 4, 5, 1, 2).contiguous() 55 | return hypercorr 56 | 57 | def forward(self, hypercorr_pyramid): 58 | 59 | # Encode hypercorrelations from each layer (Squeezing building blocks) 60 | hypercorr_sqz4 = self.encoder_layer4(hypercorr_pyramid[0]) 61 | hypercorr_sqz3 = self.encoder_layer3(hypercorr_pyramid[1]) 62 | hypercorr_sqz2 = self.encoder_layer2(hypercorr_pyramid[2]) 63 | 64 | # Propagate encoded 4D-tensor (Mixing building blocks) 65 | hypercorr_sqz4 = self.interpolate_support_dims(hypercorr_sqz4, hypercorr_sqz3.size()[-4:-2]) 66 | hypercorr_mix43 = hypercorr_sqz4 + hypercorr_sqz3 67 | hypercorr_mix43 = self.encoder_layer4to3(hypercorr_mix43) 68 | 69 | hypercorr_mix43 = self.interpolate_support_dims(hypercorr_mix43, hypercorr_sqz2.size()[-4:-2]) 70 | hypercorr_mix432 = hypercorr_mix43 + hypercorr_sqz2 71 | hypercorr_mix432 = self.encoder_layer3to2(hypercorr_mix432) 72 | 73 | bsz, ch, ha, wa, hb, wb = hypercorr_mix432.size() 74 | hypercorr_encoded = hypercorr_mix432.view(bsz, ch, ha, wa, -1).mean(dim=-1) 75 | 76 | # Decode the encoded 4D-tensor 77 | hypercorr_decoded = self.decoder1(hypercorr_encoded) 78 | upsample_size = (hypercorr_decoded.size(-1) * 2,) * 2 79 | hypercorr_decoded = F.interpolate(hypercorr_decoded, upsample_size, mode='bilinear', align_corners=True) 80 | logit_mask = self.decoder2(hypercorr_decoded) 81 | 82 | return logit_mask 83 | -------------------------------------------------------------------------------- /common/vis.py: -------------------------------------------------------------------------------- 1 | r""" Visualize model predictions """ 2 | import os 3 | 4 | from PIL import Image 5 | import numpy as np 6 | import torchvision.transforms as transforms 7 | 8 | from . import utils 9 | 10 | 11 | class Visualizer: 12 | 13 | @classmethod 14 | def initialize(cls, visualize): 15 | cls.visualize = visualize 16 | if not visualize: 17 | return 18 | 19 | cls.colors = {'red': (255, 50, 50), 'blue': (102, 140, 255)} 20 | for key, value in cls.colors.items(): 21 | cls.colors[key] = tuple([c / 255 for c in cls.colors[key]]) 22 | 23 | cls.mean_img = [0.485, 0.456, 0.406] 24 | cls.std_img = [0.229, 0.224, 0.225] 25 | cls.to_pil = transforms.ToPILImage() 26 | cls.vis_path = './vis/' 27 | if not os.path.exists(cls.vis_path): os.makedirs(cls.vis_path) 28 | 29 | @classmethod 30 | def visualize_prediction_batch(cls, spt_img_b, spt_mask_b, qry_img_b, qry_mask_b, pred_mask_b, cls_id_b, batch_idx, iou_b=None): 31 | spt_img_b = utils.to_cpu(spt_img_b) 32 | spt_mask_b = utils.to_cpu(spt_mask_b) 33 | qry_img_b = utils.to_cpu(qry_img_b) 34 | qry_mask_b = utils.to_cpu(qry_mask_b) 35 | pred_mask_b = utils.to_cpu(pred_mask_b) 36 | cls_id_b = utils.to_cpu(cls_id_b) 37 | 38 | for sample_idx, (spt_img, spt_mask, qry_img, qry_mask, pred_mask, cls_id) in \ 39 | enumerate(zip(spt_img_b, spt_mask_b, qry_img_b, qry_mask_b, pred_mask_b, cls_id_b)): 40 | iou = iou_b[sample_idx] if iou_b is not None else None 41 | cls.visualize_prediction(spt_img, spt_mask, qry_img, qry_mask, pred_mask, cls_id, batch_idx, sample_idx, True, iou) 42 | 43 | @classmethod 44 | def to_numpy(cls, tensor, type): 45 | if type == 'img': 46 | return np.array(cls.to_pil(cls.unnormalize(tensor))).astype(np.uint8) 47 | elif type == 'mask': 48 | return np.array(tensor).astype(np.uint8) 49 | else: 50 | raise Exception('Undefined tensor type: %s' % type) 51 | 52 | @classmethod 53 | def visualize_prediction(cls, spt_imgs, spt_masks, qry_img, qry_mask, pred_mask, cls_id, batch_idx, sample_idx, label, iou=None): 54 | 55 | spt_color = cls.colors['blue'] 56 | qry_color = cls.colors['red'] 57 | pred_color = cls.colors['red'] 58 | 59 | spt_imgs = [cls.to_numpy(spt_img, 'img') for spt_img in spt_imgs] 60 | spt_pils = [cls.to_pil(spt_img) for spt_img in spt_imgs] 61 | spt_masks = [cls.to_numpy(spt_mask, 'mask') for spt_mask in spt_masks] 62 | spt_masked_pils = [Image.fromarray(cls.apply_mask(spt_img, spt_mask, spt_color)) for spt_img, spt_mask in zip(spt_imgs, spt_masks)] 63 | 64 | qry_img = cls.to_numpy(qry_img, 'img') 65 | qry_pil = cls.to_pil(qry_img) 66 | qry_mask = cls.to_numpy(qry_mask, 'mask') 67 | pred_mask = cls.to_numpy(pred_mask, 'mask') 68 | pred_masked_pil = Image.fromarray(cls.apply_mask(qry_img.astype(np.uint8), pred_mask.astype(np.uint8), pred_color)) 69 | qry_masked_pil = Image.fromarray(cls.apply_mask(qry_img.astype(np.uint8), qry_mask.astype(np.uint8), qry_color)) 70 | 71 | merged_pil = cls.merge_image_pair(spt_masked_pils + [pred_masked_pil, qry_pil, qry_masked_pil]) 72 | 73 | iou = iou.item() if iou else 0.0 74 | merged_pil.save(cls.vis_path + '%d_%d_class-%d_iou-%.2f' % (batch_idx, sample_idx, cls_id, iou) + '.jpg') 75 | 76 | @classmethod 77 | def merge_image_pair(cls, pil_imgs): 78 | r""" Horizontally aligns a pair of pytorch tensor images (3, H, W) and returns PIL object """ 79 | 80 | canvas_width = sum([pil.size[0] for pil in pil_imgs]) 81 | canvas_height = max([pil.size[1] for pil in pil_imgs]) 82 | canvas = Image.new('RGB', (canvas_width, canvas_height)) 83 | 84 | xpos = 0 85 | for pil in pil_imgs: 86 | canvas.paste(pil, (xpos, 0)) 87 | xpos += pil.size[0] 88 | 89 | return canvas 90 | 91 | @classmethod 92 | def apply_mask(cls, image, mask, color, alpha=0.5): 93 | r""" Apply mask to the given image. """ 94 | for c in range(3): 95 | image[:, :, c] = np.where(mask == 1, 96 | image[:, :, c] * 97 | (1 - alpha) + alpha * color[c] * 255, 98 | image[:, :, c]) 99 | return image 100 | 101 | @classmethod 102 | def unnormalize(cls, img): 103 | img = img.clone() 104 | for im_channel, mean, std in zip(img, cls.mean_img, cls.std_img): 105 | im_channel.mul_(std).add_(mean) 106 | return img 107 | -------------------------------------------------------------------------------- /data/fss.py: -------------------------------------------------------------------------------- 1 | r""" FSS-1000 few-shot semantic segmentation dataset """ 2 | import os 3 | import glob 4 | 5 | from torch.utils.data import Dataset 6 | import torch.nn.functional as F 7 | import torch 8 | import PIL.Image as Image 9 | import numpy as np 10 | 11 | 12 | class DatasetFSS(Dataset): 13 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize): 14 | self.split = split 15 | self.benchmark = 'fss' 16 | self.shot = shot 17 | 18 | self.base_path = os.path.join(datapath, 'FSS-1000') 19 | 20 | # Given predefined test split, load randomly generated training/val splits: 21 | # (reference regarding trn/val/test splits: https://github.com/HKUSTCV/FSS-1000/issues/7)) 22 | with open('./data/splits/fss/%s.txt' % split, 'r') as f: 23 | self.categories = f.read().split('\n')[:-1] 24 | self.categories = sorted(self.categories) 25 | 26 | self.class_ids = self.build_class_ids() 27 | self.img_metadata = self.build_img_metadata() 28 | 29 | self.transform = transform 30 | 31 | def __len__(self): 32 | return len(self.img_metadata) 33 | 34 | def __getitem__(self, idx): 35 | query_name, support_names, class_sample = self.sample_episode(idx) 36 | query_img, query_mask, support_imgs, support_masks = self.load_frame(query_name, support_names) 37 | 38 | query_img = self.transform(query_img) 39 | query_mask = F.interpolate(query_mask.unsqueeze(0).unsqueeze(0).float(), query_img.size()[-2:], mode='nearest').squeeze() 40 | 41 | support_imgs = torch.stack([self.transform(support_img) for support_img in support_imgs]) 42 | 43 | support_masks_tmp = [] 44 | for smask in support_masks: 45 | smask = F.interpolate(smask.unsqueeze(0).unsqueeze(0).float(), support_imgs.size()[-2:], mode='nearest').squeeze() 46 | support_masks_tmp.append(smask) 47 | support_masks = torch.stack(support_masks_tmp) 48 | 49 | batch = {'query_img': query_img, 50 | 'query_mask': query_mask, 51 | 'query_name': query_name, 52 | 53 | 'support_imgs': support_imgs, 54 | 'support_masks': support_masks, 55 | 'support_names': support_names, 56 | 57 | 'class_id': torch.tensor(class_sample)} 58 | 59 | return batch 60 | 61 | def load_frame(self, query_name, support_names): 62 | query_img = Image.open(query_name).convert('RGB') 63 | support_imgs = [Image.open(name).convert('RGB') for name in support_names] 64 | 65 | query_id = query_name.split('/')[-1].split('.')[0] 66 | query_name = os.path.join(os.path.dirname(query_name), query_id) + '.png' 67 | support_ids = [name.split('/')[-1].split('.')[0] for name in support_names] 68 | support_names = [os.path.join(os.path.dirname(name), sid) + '.png' for name, sid in zip(support_names, support_ids)] 69 | 70 | query_mask = self.read_mask(query_name) 71 | support_masks = [self.read_mask(name) for name in support_names] 72 | 73 | return query_img, query_mask, support_imgs, support_masks 74 | 75 | def read_mask(self, img_name): 76 | mask = torch.tensor(np.array(Image.open(img_name).convert('L'))) 77 | mask[mask < 128] = 0 78 | mask[mask >= 128] = 1 79 | return mask 80 | 81 | def sample_episode(self, idx): 82 | query_name = self.img_metadata[idx] 83 | class_sample = self.categories.index(query_name.split('/')[-2]) 84 | if self.split == 'val': 85 | class_sample += 520 86 | elif self.split == 'test': 87 | class_sample += 760 88 | 89 | support_names = [] 90 | while True: # keep sampling support set if query == support 91 | support_name = np.random.choice(range(1, 11), 1, replace=False)[0] 92 | support_name = os.path.join(os.path.dirname(query_name), str(support_name)) + '.jpg' 93 | if query_name != support_name: support_names.append(support_name) 94 | if len(support_names) == self.shot: break 95 | 96 | return query_name, support_names, class_sample 97 | 98 | def build_class_ids(self): 99 | if self.split == 'trn': 100 | class_ids = range(0, 520) 101 | elif self.split == 'val': 102 | class_ids = range(520, 760) 103 | elif self.split == 'test': 104 | class_ids = range(760, 1000) 105 | return class_ids 106 | 107 | def build_img_metadata(self): 108 | img_metadata = [] 109 | for cat in self.categories: 110 | img_paths = sorted([path for path in glob.glob('%s/*' % os.path.join(self.base_path, cat))]) 111 | for img_path in img_paths: 112 | if os.path.basename(img_path).split('.')[1] == 'jpg': 113 | img_metadata.append(img_path) 114 | return img_metadata 115 | -------------------------------------------------------------------------------- /data/coco.py: -------------------------------------------------------------------------------- 1 | r""" COCO-20i few-shot semantic segmentation dataset """ 2 | import os 3 | import pickle 4 | 5 | from torch.utils.data import Dataset 6 | import torch.nn.functional as F 7 | import torch 8 | import PIL.Image as Image 9 | import numpy as np 10 | 11 | 12 | class DatasetCOCO(Dataset): 13 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize): 14 | self.split = 'val' if split in ['val', 'test'] else 'trn' 15 | self.fold = fold 16 | self.nfolds = 4 17 | self.nclass = 80 18 | self.benchmark = 'coco' 19 | self.shot = shot 20 | self.split_coco = split if split == 'val2014' else 'train2014' 21 | self.base_path = os.path.join(datapath, 'COCO2014') 22 | self.transform = transform 23 | self.use_original_imgsize = use_original_imgsize 24 | 25 | self.class_ids = self.build_class_ids() 26 | self.img_metadata_classwise = self.build_img_metadata_classwise() 27 | self.img_metadata = self.build_img_metadata() 28 | 29 | def __len__(self): 30 | return len(self.img_metadata) if self.split == 'trn' else 1000 31 | 32 | def __getitem__(self, idx): 33 | # ignores idx during training & testing and perform uniform sampling over object classes to form an episode 34 | # (due to the large size of the COCO dataset) 35 | query_img, query_mask, support_imgs, support_masks, query_name, support_names, class_sample, org_qry_imsize = self.load_frame() 36 | 37 | query_img = self.transform(query_img) 38 | query_mask = query_mask.float() 39 | if not self.use_original_imgsize: 40 | query_mask = F.interpolate(query_mask.unsqueeze(0).unsqueeze(0).float(), query_img.size()[-2:], mode='nearest').squeeze() 41 | 42 | support_imgs = torch.stack([self.transform(support_img) for support_img in support_imgs]) 43 | for midx, smask in enumerate(support_masks): 44 | support_masks[midx] = F.interpolate(smask.unsqueeze(0).unsqueeze(0).float(), support_imgs.size()[-2:], mode='nearest').squeeze() 45 | support_masks = torch.stack(support_masks) 46 | 47 | batch = {'query_img': query_img, 48 | 'query_mask': query_mask, 49 | 'query_name': query_name, 50 | 51 | 'org_query_imsize': org_qry_imsize, 52 | 53 | 'support_imgs': support_imgs, 54 | 'support_masks': support_masks, 55 | 'support_names': support_names, 56 | 'class_id': torch.tensor(class_sample)} 57 | 58 | return batch 59 | 60 | def build_class_ids(self): 61 | nclass_trn = self.nclass // self.nfolds 62 | class_ids_val = [self.fold + self.nfolds * v for v in range(nclass_trn)] 63 | class_ids_trn = [x for x in range(self.nclass) if x not in class_ids_val] 64 | class_ids = class_ids_trn if self.split == 'trn' else class_ids_val 65 | 66 | return class_ids 67 | 68 | def build_img_metadata_classwise(self): 69 | with open('./data/splits/coco/%s/fold%d.pkl' % (self.split, self.fold), 'rb') as f: 70 | img_metadata_classwise = pickle.load(f) 71 | return img_metadata_classwise 72 | 73 | def build_img_metadata(self): 74 | img_metadata = [] 75 | for k in self.img_metadata_classwise.keys(): 76 | img_metadata += self.img_metadata_classwise[k] 77 | return sorted(list(set(img_metadata))) 78 | 79 | def read_mask(self, name): 80 | mask_path = os.path.join(self.base_path, 'annotations', name) 81 | mask = torch.tensor(np.array(Image.open(mask_path[:mask_path.index('.jpg')] + '.png'))) 82 | return mask 83 | 84 | def load_frame(self): 85 | class_sample = np.random.choice(self.class_ids, 1, replace=False)[0] 86 | query_name = np.random.choice(self.img_metadata_classwise[class_sample], 1, replace=False)[0] 87 | query_img = Image.open(os.path.join(self.base_path, query_name)).convert('RGB') 88 | query_mask = self.read_mask(query_name) 89 | 90 | org_qry_imsize = query_img.size 91 | 92 | query_mask[query_mask != class_sample + 1] = 0 93 | query_mask[query_mask == class_sample + 1] = 1 94 | 95 | support_names = [] 96 | while True: # keep sampling support set if query == support 97 | support_name = np.random.choice(self.img_metadata_classwise[class_sample], 1, replace=False)[0] 98 | if query_name != support_name: support_names.append(support_name) 99 | if len(support_names) == self.shot: break 100 | 101 | support_imgs = [] 102 | support_masks = [] 103 | for support_name in support_names: 104 | support_imgs.append(Image.open(os.path.join(self.base_path, support_name)).convert('RGB')) 105 | support_mask = self.read_mask(support_name) 106 | support_mask[support_mask != class_sample + 1] = 0 107 | support_mask[support_mask == class_sample + 1] = 1 108 | support_masks.append(support_mask) 109 | 110 | return query_img, query_mask, support_imgs, support_masks, query_name, support_names, class_sample, org_qry_imsize 111 | 112 | -------------------------------------------------------------------------------- /common/logger.py: -------------------------------------------------------------------------------- 1 | r""" Logging during training/testing """ 2 | import datetime 3 | import logging 4 | import os 5 | 6 | from tensorboardX import SummaryWriter 7 | import torch 8 | 9 | 10 | class AverageMeter: 11 | r""" Stores loss, evaluation results """ 12 | def __init__(self, dataset): 13 | self.benchmark = dataset.benchmark 14 | self.class_ids_interest = dataset.class_ids 15 | self.class_ids_interest = torch.tensor(self.class_ids_interest).cuda() 16 | 17 | if self.benchmark == 'isaid': 18 | self.nclass = 15 19 | elif self.benchmark == 'dlrsd': 20 | self.nclass = 15 21 | elif self.benchmark == 'pascal': 22 | self.nclass = 20 23 | elif self.benchmark == 'coco': 24 | self.nclass = 80 25 | elif self.benchmark == 'fss': 26 | self.nclass = 1000 27 | 28 | self.intersection_buf = torch.zeros([2, self.nclass]).float().cuda() 29 | self.union_buf = torch.zeros([2, self.nclass]).float().cuda() 30 | self.ones = torch.ones_like(self.union_buf) 31 | self.loss_buf = [] 32 | 33 | def update(self, inter_b, union_b, class_id, loss): 34 | self.intersection_buf.index_add_(1, class_id, inter_b.float()) 35 | self.union_buf.index_add_(1, class_id, union_b.float()) 36 | if loss is None: 37 | loss = torch.tensor(0.0) 38 | self.loss_buf.append(loss) 39 | 40 | def compute_iou(self): 41 | iou = self.intersection_buf.float() / \ 42 | torch.max(torch.stack([self.union_buf, self.ones]), dim=0)[0] 43 | iou = iou.index_select(1, self.class_ids_interest) 44 | miou = iou[1].mean() * 100 45 | 46 | fb_iou = (self.intersection_buf.index_select(1, self.class_ids_interest).sum(dim=1) / 47 | self.union_buf.index_select(1, self.class_ids_interest).sum(dim=1)).mean() * 100 48 | 49 | return miou, fb_iou 50 | 51 | def write_result(self, split, epoch): 52 | iou, fb_iou = self.compute_iou() 53 | 54 | loss_buf = torch.stack(self.loss_buf) 55 | msg = '\n*** %s ' % split 56 | msg += '[@Epoch %02d] ' % epoch 57 | msg += 'Avg L: %6.5f ' % loss_buf.mean() 58 | msg += 'mIoU: %5.2f ' % iou 59 | msg += 'FB-IoU: %5.2f ' % fb_iou 60 | 61 | msg += '***\n' 62 | Logger.info(msg) 63 | 64 | def write_process(self, batch_idx, datalen, epoch, write_batch_idx=20): 65 | if batch_idx % write_batch_idx == 0: 66 | msg = '[Epoch: %02d] ' % epoch if epoch != -1 else '' 67 | msg += '[Batch: %04d/%04d] ' % (batch_idx+1, datalen) 68 | iou, fb_iou = self.compute_iou() 69 | if epoch != -1: 70 | loss_buf = torch.stack(self.loss_buf) 71 | msg += 'L: %6.5f ' % loss_buf[-1] 72 | msg += 'Avg L: %6.5f ' % loss_buf.mean() 73 | msg += 'mIoU: %5.2f | ' % iou 74 | msg += 'FB-IoU: %5.2f' % fb_iou 75 | Logger.info(msg) 76 | 77 | 78 | class Logger: 79 | r""" Writes evaluation results of training/testing """ 80 | @classmethod 81 | def initialize(cls, args, training): 82 | logtime = datetime.datetime.now().__format__('_%m%d_%H%M%S') 83 | logpath = args.logpath if training else '_TEST_' + args.load.split('/')[-2].split('.')[0] + logtime 84 | if logpath == '': logpath = logtime 85 | 86 | cls.logpath = os.path.join('logs', logpath + '.log') 87 | cls.benchmark = args.benchmark 88 | os.makedirs(cls.logpath) 89 | 90 | logging.basicConfig(filemode='w', 91 | filename=os.path.join(cls.logpath, 'log.txt'), 92 | level=logging.INFO, 93 | format='%(message)s', 94 | datefmt='%m-%d %H:%M:%S') 95 | 96 | # Console log config 97 | console = logging.StreamHandler() 98 | console.setLevel(logging.INFO) 99 | formatter = logging.Formatter('%(message)s') 100 | console.setFormatter(formatter) 101 | logging.getLogger('').addHandler(console) 102 | 103 | # Tensorboard writer 104 | cls.tbd_writer = SummaryWriter(os.path.join(cls.logpath, 'tbd/runs')) 105 | 106 | # Log arguments 107 | logging.info('\n:=========== Few-shot Seg. with HSNet ===========') 108 | for arg_key in args.__dict__: 109 | logging.info('| %20s: %-24s' % (arg_key, str(args.__dict__[arg_key]))) 110 | logging.info(':================================================\n') 111 | 112 | @classmethod 113 | def info(cls, msg): 114 | r""" Writes log message to log.txt """ 115 | logging.info(msg) 116 | 117 | @classmethod 118 | def save_model_miou(cls, model, epoch, val_miou): 119 | torch.save(model.state_dict(), os.path.join(cls.logpath, 'best_model.pt')) 120 | cls.info('Model saved @%d w/ val. mIoU: %5.2f.\n' % (epoch, val_miou)) 121 | 122 | @classmethod 123 | def log_params(cls, model): 124 | backbone_param = 0 125 | learner_param = 0 126 | for k in model.state_dict().keys(): 127 | n_param = model.state_dict()[k].view(-1).size(0) 128 | if k.split('.')[0] in 'backbone': 129 | if k.split('.')[1] in ['classifier', 'fc']: # as fc layers are not used in HSNet 130 | continue 131 | backbone_param += n_param 132 | else: 133 | learner_param += n_param 134 | Logger.info('Backbone # param.: %d' % backbone_param) 135 | Logger.info('Learnable # param.: %d' % learner_param) 136 | Logger.info('Total # param.: %d' % (backbone_param + learner_param)) 137 | 138 | -------------------------------------------------------------------------------- /model/sccnet.py: -------------------------------------------------------------------------------- 1 | r""" SCCNetwork Implementation""" 2 | from functools import reduce 3 | from operator import add 4 | 5 | import torch 6 | import torch.nn as nn 7 | import torch.nn.functional as F 8 | from torchvision.models import resnet 9 | from torchvision.models import vgg 10 | 11 | from .base.feature import extract_feat_vgg, extract_feat_res 12 | from .base.correlation import Correlation 13 | from .learner import HPNLearner 14 | 15 | 16 | class SCCNetwork(nn.Module): 17 | def __init__(self, backbone, use_original_imgsize, freeze=True): 18 | super(SCCNetwork, self).__init__() 19 | 20 | # 1. Backbone network initialization 21 | self.backbone_type = backbone 22 | self.use_original_imgsize = use_original_imgsize 23 | if backbone == 'vgg16': 24 | self.backbone = vgg.vgg16(pretrained=True) 25 | self.feat_ids = [17, 19, 21, 24, 26, 28, 30] 26 | self.extract_feats = extract_feat_vgg 27 | nbottlenecks = [2, 2, 3, 3, 3, 1] 28 | elif backbone == 'resnet50': 29 | self.backbone = resnet.resnet50(pretrained=True) 30 | self.feat_ids = list(range(4, 17)) 31 | self.extract_feats = extract_feat_res 32 | nbottlenecks = [3, 4, 6, 3] 33 | elif backbone == 'resnet101': 34 | self.backbone = resnet.resnet101(pretrained=True) 35 | self.feat_ids = list(range(4, 34)) 36 | self.extract_feats = extract_feat_res 37 | nbottlenecks = [3, 4, 23, 3] 38 | else: 39 | raise Exception('Unavailable backbone: %s' % backbone) 40 | 41 | self.bottleneck_ids = reduce(add, list(map(lambda x: list(range(x)), nbottlenecks))) 42 | self.lids = reduce(add, [[i + 1] * x for i, x in enumerate(nbottlenecks)]) 43 | self.stack_ids = torch.tensor(self.lids).bincount().__reversed__().cumsum(dim=0)[:3] 44 | if freeze: 45 | self.backbone.eval() 46 | self.hpn_learner = HPNLearner(list(reversed(nbottlenecks[-3:]))) 47 | self.hpn_learner2 = HPNLearner(list(reversed(nbottlenecks[-3:]))) 48 | self.cross_entropy_loss = nn.CrossEntropyLoss() 49 | self.merger = nn.Sequential(nn.Conv2d(4, 2, (1, 1), bias=False), nn.ReLU()) 50 | 51 | def forward(self, query_img, support_img, support_mask, query_mask=None): 52 | with torch.no_grad(): 53 | query_feats = self.extract_feats(query_img, self.backbone, self.feat_ids, self.bottleneck_ids, self.lids) 54 | support_feats = self.extract_feats(support_img, self.backbone, self.feat_ids, self.bottleneck_ids, self.lids) 55 | support_feats = self.mask_feature(support_feats, support_mask.clone()) 56 | corr = Correlation.multilayer_correlation(query_feats, support_feats, self.stack_ids) 57 | 58 | logit_mask_ori = self.hpn_learner(corr) 59 | if not self.use_original_imgsize: 60 | logit_mask = F.interpolate(logit_mask_ori, support_img.size()[2:], mode='bilinear', align_corners=True) 61 | 62 | pred_mask = logit_mask.argmax(dim=1) 63 | with torch.no_grad(): 64 | masked_qfeats = self.mask_feature(query_feats, pred_mask) 65 | corr2 = Correlation.multilayer_correlation(query_feats, masked_qfeats, self.stack_ids) 66 | 67 | logit_mask2 = self.hpn_learner2(corr2) 68 | logit = torch.cat([logit_mask_ori, logit_mask2], dim=1) 69 | logit = self.merger(logit) 70 | if not self.use_original_imgsize: 71 | logit = F.interpolate(logit, support_img.size()[2:], mode='bilinear', align_corners=True) 72 | 73 | logit_mask3 = None 74 | if query_mask is not None: 75 | with torch.no_grad(): 76 | masked_qfeats3 = self.mask_feature(query_feats, query_mask) 77 | corr3 = Correlation.multilayer_correlation(query_feats, masked_qfeats3, self.stack_ids) 78 | logit_mask3 = self.hpn_learner2(corr3) 79 | 80 | if not self.use_original_imgsize: 81 | logit_mask3 = F.interpolate(logit_mask3, support_img.size()[2:], mode='bilinear', align_corners=True) 82 | 83 | return logit, logit_mask3 84 | 85 | def mask_feature(self, features, support_mask): 86 | for idx, feature in enumerate(features): 87 | mask = F.interpolate(support_mask.unsqueeze(1).float(), feature.size()[2:], mode='bilinear', align_corners=True) 88 | features[idx] = features[idx] * mask 89 | return features 90 | 91 | def predict_mask_nshot(self, batch, nshot): 92 | 93 | # Perform multiple prediction given (nshot) number of different support sets 94 | logit_mask_agg = 0 95 | for s_idx in range(nshot): 96 | logit_mask, _ = self(batch['query_img'], batch['support_imgs'][:, s_idx], batch['support_masks'][:, s_idx]) 97 | 98 | if self.use_original_imgsize: 99 | org_qry_imsize = tuple([batch['org_query_imsize'][1].item(), batch['org_query_imsize'][0].item()]) 100 | logit_mask = F.interpolate(logit_mask, org_qry_imsize, mode='bilinear', align_corners=True) 101 | 102 | logit_mask_agg += logit_mask.argmax(dim=1).clone() 103 | if nshot == 1: return logit_mask_agg 104 | 105 | # Average & quantize predictions given threshold (=0.5) 106 | bsz = logit_mask_agg.size(0) 107 | max_vote = logit_mask_agg.view(bsz, -1).max(dim=1)[0] 108 | max_vote = torch.stack([max_vote, torch.ones_like(max_vote).long()]) 109 | max_vote = max_vote.max(dim=0)[0].view(bsz, 1, 1) 110 | pred_mask = logit_mask_agg.float() / max_vote 111 | threshold = 0.4 112 | pred_mask[pred_mask < threshold] = 0 113 | pred_mask[pred_mask >= threshold] = 1 114 | 115 | return pred_mask 116 | 117 | def compute_objective(self, logit_mask, gt_mask): 118 | bsz = logit_mask.size(0) 119 | logit_mask = logit_mask.view(bsz, 2, -1) 120 | gt_mask = gt_mask.view(bsz, -1).long() 121 | 122 | return self.cross_entropy_loss(logit_mask, gt_mask) 123 | 124 | def focal_loss(self, x, p=1, c=0.1): 125 | return -torch.pow(1 - x, p) * torch.log(c + x) 126 | 127 | def compute_area_loss(self, pred_mask, gt_mask): 128 | pred_area = pred_mask.flatten().float().mean() 129 | gt_area = gt_mask.flatten().float().mean() 130 | ratio = torch.minimum(pred_area, gt_area) / (0.01 + torch.maximum(pred_area, gt_area)) 131 | return self.focal_loss(ratio) 132 | 133 | def compute_focal_loss(self, pred_mask): 134 | pred_mask = pred_mask.flatten().float() 135 | return self.focal_loss(pred_mask.mean()) 136 | 137 | def train_mode(self): 138 | self.train() 139 | self.backbone.eval() # to prevent BN from learning data statistics with exponential averaging 140 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | r""" training (validation) code """ 2 | import argparse 3 | 4 | import torch.optim as optim 5 | import torch.nn as nn 6 | import torch 7 | 8 | from model.sccnet import SCCNetwork 9 | from common.logger import Logger, AverageMeter 10 | from common.evaluation import Evaluator 11 | from common import utils, my_optim 12 | from data.dataset import FSSDataset 13 | 14 | 15 | def train(epoch, model, dataloader, optimizer, training, loss_type='no', ld=1.0): 16 | r""" Train HSNet """ 17 | train.count = getattr(train, 'count', 0) 18 | 19 | # Force randomness during training / freeze randomness during testing 20 | utils.fix_randseed(None) if training else utils.fix_randseed(0) 21 | model.module.train_mode() if training else model.module.eval() 22 | average_meter = AverageMeter(dataloader.dataset) 23 | 24 | for idx, batch in enumerate(dataloader): 25 | train.count += 1 26 | if train.count > args.max_steps: 27 | break 28 | 29 | my_optim.adjust_learning_rate_poly(args, optimizer, train.count) 30 | 31 | # 1. forward pass 32 | batch = utils.to_cuda(batch) 33 | logit_mask, logit_mask2 = model(batch['query_img'], batch['support_imgs'].squeeze(1), 34 | batch['support_masks'].squeeze(1), batch['query_mask']) 35 | pred_mask = logit_mask.argmax(dim=1) 36 | 37 | # 2. Compute loss & update model parameters 38 | loss = model.module.compute_objective(logit_mask, batch['query_mask']) 39 | loss2 = model.module.compute_objective(logit_mask2, batch['query_mask']) 40 | loss += ld * loss2 41 | if loss_type == 'focal': 42 | loss += 0.5*model.module.compute_focal_loss(pred_mask) 43 | elif loss_type == 'area': 44 | loss += 0.5 * model.module.compute_area_loss(pred_mask, batch['query_mask']) 45 | if training: 46 | optimizer.zero_grad() 47 | loss.backward() 48 | optimizer.step() 49 | 50 | # 3. Evaluate prediction 51 | area_inter, area_union = Evaluator.classify_prediction(pred_mask, batch) 52 | average_meter.update(area_inter, area_union, batch['class_id'], loss.detach().clone()) 53 | average_meter.write_process(idx, len(dataloader), epoch, write_batch_idx=50) 54 | 55 | # Write evaluation results 56 | average_meter.write_result('Training' if training else 'Validation', epoch) 57 | avg_loss = utils.mean(average_meter.loss_buf) 58 | miou, fb_iou = average_meter.compute_iou() 59 | 60 | return avg_loss, miou, fb_iou 61 | 62 | 63 | if __name__ == '__main__': 64 | 65 | # Arguments parsing 66 | parser = argparse.ArgumentParser(description='SCCNet Pytorch Implementation') 67 | parser.add_argument('--datapath', type=str, default='../Datasets') 68 | parser.add_argument('--benchmark', type=str, default='pascal', choices=['pascal', 'isaid', 'dlrsd']) 69 | parser.add_argument('--logpath', type=str, default='') 70 | parser.add_argument('--loss', type=str, default='no', choices=['no', 'focal', 'area']) 71 | parser.add_argument('--bsz', type=int, default=20) 72 | parser.add_argument('--lr', type=float, default=1e-3) 73 | parser.add_argument('--power', type=float, default=0.9) 74 | parser.add_argument('--ld', type=float, default=1.0) 75 | parser.add_argument('--niter', type=int, default=2000) 76 | parser.add_argument('--max_steps', type=int, default=50001) 77 | parser.add_argument('--nworker', type=int, default=8) 78 | parser.add_argument('--fold', type=int, default=0, choices=[0, 1, 2, 3]) 79 | parser.add_argument('--backbone', type=str, default='resnet101', choices=['vgg16', 'resnet50', 'resnet101']) 80 | parser.add_argument('--img_size', type=int, default=400) 81 | parser.add_argument('--use_original_imgsize', type=bool, default=False) 82 | parser.add_argument('--aug', type=bool, default=False) 83 | parser.add_argument('--freeze', type=bool, default=True) 84 | args = parser.parse_args() 85 | Logger.initialize(args, training=True) 86 | 87 | # Model initialization 88 | model = SCCNetwork(args.backbone, False, args.freeze) 89 | Logger.log_params(model) 90 | 91 | # Device setup 92 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") 93 | Logger.info('# available GPUs: %d' % torch.cuda.device_count()) 94 | model = nn.DataParallel(model) 95 | model.to(device) 96 | 97 | # Helper classes (for training) initialization 98 | optimizer = my_optim.get_finetune_optimizer(args, model) 99 | Evaluator.initialize() 100 | 101 | # Dataset initialization 102 | FSSDataset.initialize(img_size=args.img_size, 103 | datapath=args.datapath, 104 | use_original_imgsize=args.use_original_imgsize) 105 | dataloader_trn = FSSDataset.build_dataloader(args.benchmark, args.bsz, 106 | args.nworker, args.fold, 107 | 'trn', aug=args.aug) 108 | dataloader_val = FSSDataset.build_dataloader(args.benchmark, args.bsz, 109 | args.nworker, args.fold, 110 | 'val') 111 | 112 | # Train HSNet 113 | best_val_miou = float('-inf') 114 | best_val_loss = float('inf') 115 | for epoch in range(args.niter): 116 | 117 | trn_loss, trn_miou, trn_fb_iou = train(epoch, 118 | model, 119 | dataloader_trn, 120 | optimizer, 121 | training=True, 122 | loss_type=args.loss, 123 | ld=args.ld) 124 | with torch.no_grad(): 125 | val_loss, val_miou, val_fb_iou = train(epoch, 126 | model, 127 | dataloader_val, 128 | optimizer, 129 | training=False) 130 | 131 | # Save the best model 132 | if val_miou > best_val_miou: 133 | best_val_miou = val_miou 134 | Logger.save_model_miou(model, epoch, val_miou) 135 | 136 | Logger.tbd_writer.add_scalars('data/loss', {'trn_loss': trn_loss, 'val_loss': val_loss}, epoch) 137 | Logger.tbd_writer.add_scalars('data/miou', {'trn_miou': trn_miou, 'val_miou': val_miou}, epoch) 138 | Logger.tbd_writer.add_scalars('data/fb_iou', {'trn_fb_iou': trn_fb_iou, 'val_fb_iou': val_fb_iou}, epoch) 139 | Logger.tbd_writer.flush() 140 | 141 | if train.count > args.max_steps: 142 | break 143 | Logger.tbd_writer.close() 144 | Logger.info('==================== Finished Training ====================') 145 | -------------------------------------------------------------------------------- /data/pascal.py: -------------------------------------------------------------------------------- 1 | r""" PASCAL-5i few-shot semantic segmentation dataset """ 2 | import os 3 | 4 | from torch.utils.data import Dataset 5 | import torch.nn.functional as F 6 | import torch 7 | import PIL.Image as Image 8 | import numpy as np 9 | 10 | 11 | class DatasetPASCAL(Dataset): 12 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize): 13 | self.split = 'val' if split in ['val', 'test'] else 'trn' 14 | self.fold = fold 15 | self.nfolds = 4 16 | self.nclass = 20 17 | self.benchmark = 'pascal' 18 | self.shot = shot 19 | self.use_original_imgsize = use_original_imgsize 20 | 21 | self.img_path = os.path.join(datapath, 'VOC2012/JPEGImages/') 22 | self.ann_path = os.path.join(datapath, 'VOC2012/SegmentationClassAug/') 23 | self.transform = transform 24 | 25 | self.class_ids = self.build_class_ids() 26 | self.img_metadata = self.build_img_metadata() 27 | self.img_metadata_classwise = self.build_img_metadata_classwise() 28 | 29 | def __len__(self): 30 | return len(self.img_metadata) if self.split == 'trn' else 1000 31 | 32 | def __getitem__(self, idx): 33 | idx %= len(self.img_metadata) # for testing, as n_images < 1000 34 | query_name, support_names, class_sample = self.sample_episode(idx) 35 | query_img, query_cmask, support_imgs, support_cmasks, org_qry_imsize = self.load_frame(query_name, support_names) 36 | 37 | if self.aug: 38 | query_img, query_cmask = self.tv2(query_img, query_cmask) 39 | for i in range(self.shot): 40 | support_imgs[i], support_cmasks[i] = self.tv2(support_imgs[i], support_cmasks[i]) 41 | 42 | query_cmask = torch.tensor(np.array(query_cmask)) 43 | for i in range(self.shot): 44 | support_cmasks[i] = torch.tensor(np.array(support_cmasks[i])) 45 | 46 | query_img = self.transform(query_img) 47 | if not self.use_original_imgsize: 48 | query_cmask = F.interpolate(query_cmask.unsqueeze(0).unsqueeze(0).float(), query_img.size()[-2:], mode='nearest').squeeze() 49 | 50 | query_mask, query_ignore_idx = self.extract_ignore_idx(query_cmask.float(), class_sample) 51 | 52 | support_imgs = torch.stack([self.transform(support_img) for support_img in support_imgs]) 53 | 54 | support_masks = [] 55 | support_ignore_idxs = [] 56 | for scmask in support_cmasks: 57 | scmask = F.interpolate(scmask.unsqueeze(0).unsqueeze(0).float(), support_imgs.size()[-2:], mode='nearest').squeeze() 58 | support_mask, support_ignore_idx = self.extract_ignore_idx(scmask, class_sample) 59 | support_masks.append(support_mask) 60 | support_ignore_idxs.append(support_ignore_idx) 61 | 62 | support_masks = torch.stack(support_masks) 63 | support_ignore_idxs = torch.stack(support_ignore_idxs) 64 | 65 | batch = {'query_img': query_img, 66 | 'query_mask': query_mask, 67 | 'query_name': query_name, 68 | 'query_ignore_idx': query_ignore_idx, 69 | 70 | 'org_query_imsize': org_qry_imsize, 71 | 72 | 'support_imgs': support_imgs, 73 | 'support_masks': support_masks, 74 | 'support_names': support_names, 75 | 'support_ignore_idxs': support_ignore_idxs, 76 | 77 | 'class_id': torch.tensor(class_sample)} 78 | 79 | return batch 80 | 81 | def extract_ignore_idx(self, mask, class_id): 82 | boundary = (mask / 255).floor() 83 | mask[mask != class_id + 1] = 0 84 | mask[mask == class_id + 1] = 1 85 | 86 | return mask, boundary 87 | 88 | def load_frame(self, query_name, support_names): 89 | query_img = self.read_img(query_name) 90 | query_mask = self.read_mask(query_name) 91 | support_imgs = [self.read_img(name) for name in support_names] 92 | support_masks = [self.read_mask(name) for name in support_names] 93 | 94 | org_qry_imsize = query_img.size 95 | 96 | return query_img, query_mask, support_imgs, support_masks, org_qry_imsize 97 | 98 | def read_mask(self, img_name): 99 | r"""Return segmentation mask in PIL Image""" 100 | mask = Image.open(os.path.join(self.ann_path, img_name) + '.png') 101 | # mask = torch.tensor(np.array(Image.open(os.path.join(self.ann_path, img_name) + '.png'))) 102 | return mask 103 | 104 | def read_img(self, img_name): 105 | r"""Return RGB image in PIL Image""" 106 | return Image.open(os.path.join(self.img_path, img_name) + '.jpg') 107 | 108 | def sample_episode(self, idx): 109 | query_name, class_sample = self.img_metadata[idx] 110 | 111 | support_names = [] 112 | while True: # keep sampling support set if query == support 113 | support_name = np.random.choice(self.img_metadata_classwise[class_sample], 1, replace=False)[0] 114 | if query_name != support_name: support_names.append(support_name) 115 | if len(support_names) == self.shot: break 116 | 117 | return query_name, support_names, class_sample 118 | 119 | def build_class_ids(self): 120 | nclass_trn = self.nclass // self.nfolds 121 | class_ids_val = [self.fold * nclass_trn + i for i in range(nclass_trn)] 122 | class_ids_trn = [x for x in range(self.nclass) if x not in class_ids_val] 123 | 124 | if self.split == 'trn': 125 | return class_ids_trn 126 | else: 127 | return class_ids_val 128 | 129 | def build_img_metadata(self): 130 | 131 | def read_metadata(split, fold_id): 132 | fold_n_metadata = os.path.join('data/splits/pascal/%s/fold%d.txt' % (split, fold_id)) 133 | with open(fold_n_metadata, 'r') as f: 134 | fold_n_metadata = f.read().split('\n')[:-1] 135 | fold_n_metadata = [[data.split('__')[0], int(data.split('__')[1]) - 1] for data in fold_n_metadata] 136 | return fold_n_metadata 137 | 138 | img_metadata = [] 139 | if self.split == 'trn': # For training, read image-metadata of "the other" folds 140 | for fold_id in range(self.nfolds): 141 | if fold_id == self.fold: # Skip validation fold 142 | continue 143 | img_metadata += read_metadata(self.split, fold_id) 144 | elif self.split == 'val': # For validation, read image-metadata of "current" fold 145 | img_metadata = read_metadata(self.split, self.fold) 146 | else: 147 | raise Exception('Undefined split %s: ' % self.split) 148 | 149 | print('Total (%s) images are : %d' % (self.split, len(img_metadata))) 150 | 151 | return img_metadata 152 | 153 | def build_img_metadata_classwise(self): 154 | img_metadata_classwise = {} 155 | for class_id in range(self.nclass): 156 | img_metadata_classwise[class_id] = [] 157 | 158 | for img_name, img_class in self.img_metadata: 159 | img_metadata_classwise[img_class] += [img_name] 160 | return img_metadata_classwise 161 | -------------------------------------------------------------------------------- /data/splits/pascal/val/fold0.txt: -------------------------------------------------------------------------------- 1 | 2007_000033__01 2 | 2007_000061__04 3 | 2007_000129__02 4 | 2007_000346__05 5 | 2007_000529__04 6 | 2007_000559__05 7 | 2007_000572__02 8 | 2007_000762__05 9 | 2007_001288__01 10 | 2007_001289__03 11 | 2007_001311__02 12 | 2007_001408__05 13 | 2007_001568__01 14 | 2007_001630__02 15 | 2007_001761__01 16 | 2007_001884__01 17 | 2007_002094__03 18 | 2007_002266__01 19 | 2007_002376__01 20 | 2007_002400__03 21 | 2007_002619__01 22 | 2007_002719__04 23 | 2007_003088__05 24 | 2007_003131__04 25 | 2007_003188__02 26 | 2007_003349__03 27 | 2007_003571__04 28 | 2007_003621__02 29 | 2007_003682__03 30 | 2007_003861__04 31 | 2007_004052__01 32 | 2007_004143__03 33 | 2007_004241__04 34 | 2007_004468__05 35 | 2007_005074__04 36 | 2007_005107__02 37 | 2007_005294__05 38 | 2007_005304__05 39 | 2007_005428__05 40 | 2007_005509__01 41 | 2007_005600__01 42 | 2007_005705__04 43 | 2007_005828__01 44 | 2007_006076__03 45 | 2007_006086__05 46 | 2007_006449__02 47 | 2007_006946__01 48 | 2007_007084__03 49 | 2007_007235__02 50 | 2007_007341__01 51 | 2007_007470__01 52 | 2007_007477__04 53 | 2007_007836__02 54 | 2007_008051__03 55 | 2007_008084__03 56 | 2007_008204__05 57 | 2007_008670__03 58 | 2007_009088__03 59 | 2007_009258__02 60 | 2007_009323__03 61 | 2007_009458__05 62 | 2007_009687__05 63 | 2007_009817__03 64 | 2007_009911__01 65 | 2008_000120__04 66 | 2008_000123__03 67 | 2008_000533__03 68 | 2008_000725__02 69 | 2008_000911__05 70 | 2008_001013__04 71 | 2008_001040__04 72 | 2008_001135__04 73 | 2008_001260__04 74 | 2008_001404__02 75 | 2008_001514__03 76 | 2008_001531__02 77 | 2008_001546__01 78 | 2008_001580__04 79 | 2008_001966__03 80 | 2008_001971__01 81 | 2008_002043__03 82 | 2008_002269__02 83 | 2008_002358__01 84 | 2008_002429__03 85 | 2008_002467__05 86 | 2008_002504__04 87 | 2008_002775__05 88 | 2008_002864__05 89 | 2008_003034__04 90 | 2008_003076__05 91 | 2008_003108__02 92 | 2008_003110__03 93 | 2008_003155__01 94 | 2008_003270__02 95 | 2008_003369__01 96 | 2008_003858__04 97 | 2008_003876__01 98 | 2008_003886__04 99 | 2008_003926__01 100 | 2008_003976__01 101 | 2008_004363__02 102 | 2008_004654__02 103 | 2008_004659__05 104 | 2008_004704__01 105 | 2008_004758__02 106 | 2008_004995__02 107 | 2008_005262__05 108 | 2008_005338__01 109 | 2008_005628__04 110 | 2008_005727__02 111 | 2008_005812__05 112 | 2008_005904__05 113 | 2008_006216__01 114 | 2008_006229__04 115 | 2008_006254__02 116 | 2008_006703__01 117 | 2008_007120__03 118 | 2008_007143__04 119 | 2008_007219__05 120 | 2008_007350__01 121 | 2008_007498__03 122 | 2008_007811__05 123 | 2008_007994__03 124 | 2008_008268__03 125 | 2008_008629__02 126 | 2008_008711__02 127 | 2008_008746__03 128 | 2009_000032__01 129 | 2009_000037__03 130 | 2009_000121__05 131 | 2009_000149__02 132 | 2009_000201__05 133 | 2009_000205__01 134 | 2009_000318__03 135 | 2009_000354__02 136 | 2009_000387__01 137 | 2009_000421__04 138 | 2009_000440__01 139 | 2009_000446__04 140 | 2009_000457__02 141 | 2009_000469__04 142 | 2009_000573__02 143 | 2009_000619__03 144 | 2009_000664__03 145 | 2009_000723__04 146 | 2009_000828__04 147 | 2009_000840__05 148 | 2009_000879__03 149 | 2009_000991__03 150 | 2009_000998__03 151 | 2009_001108__03 152 | 2009_001160__03 153 | 2009_001255__02 154 | 2009_001278__05 155 | 2009_001314__03 156 | 2009_001332__01 157 | 2009_001565__03 158 | 2009_001607__03 159 | 2009_001683__03 160 | 2009_001718__02 161 | 2009_001765__03 162 | 2009_001818__05 163 | 2009_001850__01 164 | 2009_001851__01 165 | 2009_001941__04 166 | 2009_002185__05 167 | 2009_002295__02 168 | 2009_002320__01 169 | 2009_002372__05 170 | 2009_002521__05 171 | 2009_002594__05 172 | 2009_002604__03 173 | 2009_002649__05 174 | 2009_002727__04 175 | 2009_002732__05 176 | 2009_002749__05 177 | 2009_002808__01 178 | 2009_002856__05 179 | 2009_002888__01 180 | 2009_002928__02 181 | 2009_003003__05 182 | 2009_003005__01 183 | 2009_003043__04 184 | 2009_003080__04 185 | 2009_003193__02 186 | 2009_003224__02 187 | 2009_003269__05 188 | 2009_003273__03 189 | 2009_003343__02 190 | 2009_003378__03 191 | 2009_003450__03 192 | 2009_003498__03 193 | 2009_003504__04 194 | 2009_003517__05 195 | 2009_003640__03 196 | 2009_003696__01 197 | 2009_003707__04 198 | 2009_003806__01 199 | 2009_003858__03 200 | 2009_003971__02 201 | 2009_004021__03 202 | 2009_004084__03 203 | 2009_004125__04 204 | 2009_004247__05 205 | 2009_004324__05 206 | 2009_004509__03 207 | 2009_004540__03 208 | 2009_004568__03 209 | 2009_004579__05 210 | 2009_004635__04 211 | 2009_004653__01 212 | 2009_004848__02 213 | 2009_004882__02 214 | 2009_004886__03 215 | 2009_004895__03 216 | 2009_004969__01 217 | 2009_005038__05 218 | 2009_005137__03 219 | 2009_005156__02 220 | 2009_005189__01 221 | 2009_005190__05 222 | 2009_005260__03 223 | 2009_005262__03 224 | 2009_005302__05 225 | 2010_000065__02 226 | 2010_000083__02 227 | 2010_000084__04 228 | 2010_000238__01 229 | 2010_000241__03 230 | 2010_000272__04 231 | 2010_000342__02 232 | 2010_000426__05 233 | 2010_000572__01 234 | 2010_000622__01 235 | 2010_000814__03 236 | 2010_000906__04 237 | 2010_000961__03 238 | 2010_001016__03 239 | 2010_001017__01 240 | 2010_001024__01 241 | 2010_001036__04 242 | 2010_001061__03 243 | 2010_001069__03 244 | 2010_001174__01 245 | 2010_001367__02 246 | 2010_001367__05 247 | 2010_001448__01 248 | 2010_001830__05 249 | 2010_001995__03 250 | 2010_002017__05 251 | 2010_002030__02 252 | 2010_002142__03 253 | 2010_002147__01 254 | 2010_002150__04 255 | 2010_002200__01 256 | 2010_002310__01 257 | 2010_002536__02 258 | 2010_002546__04 259 | 2010_002693__02 260 | 2010_002939__01 261 | 2010_003127__01 262 | 2010_003132__01 263 | 2010_003168__03 264 | 2010_003362__03 265 | 2010_003365__01 266 | 2010_003418__03 267 | 2010_003468__05 268 | 2010_003473__03 269 | 2010_003495__01 270 | 2010_003547__04 271 | 2010_003716__01 272 | 2010_003771__03 273 | 2010_003781__05 274 | 2010_003820__03 275 | 2010_003912__02 276 | 2010_003915__01 277 | 2010_004041__04 278 | 2010_004056__05 279 | 2010_004208__04 280 | 2010_004314__01 281 | 2010_004419__01 282 | 2010_004520__05 283 | 2010_004529__05 284 | 2010_004551__05 285 | 2010_004556__03 286 | 2010_004559__03 287 | 2010_004662__04 288 | 2010_004772__04 289 | 2010_004828__05 290 | 2010_004994__03 291 | 2010_005252__04 292 | 2010_005401__04 293 | 2010_005428__03 294 | 2010_005496__05 295 | 2010_005531__03 296 | 2010_005534__01 297 | 2010_005582__05 298 | 2010_005664__02 299 | 2010_005705__04 300 | 2010_005718__01 301 | 2010_005762__05 302 | 2010_005877__01 303 | 2010_005888__01 304 | 2010_006034__01 305 | 2010_006070__02 306 | 2011_000066__05 307 | 2011_000112__03 308 | 2011_000185__03 309 | 2011_000234__04 310 | 2011_000238__04 311 | 2011_000412__02 312 | 2011_000435__04 313 | 2011_000456__03 314 | 2011_000482__03 315 | 2011_000585__02 316 | 2011_000669__03 317 | 2011_000747__05 318 | 2011_000874__01 319 | 2011_001114__01 320 | 2011_001161__04 321 | 2011_001263__01 322 | 2011_001287__03 323 | 2011_001407__01 324 | 2011_001421__03 325 | 2011_001434__01 326 | 2011_001589__04 327 | 2011_001624__01 328 | 2011_001793__04 329 | 2011_001880__01 330 | 2011_001988__02 331 | 2011_002064__02 332 | 2011_002098__05 333 | 2011_002223__02 334 | 2011_002295__03 335 | 2011_002327__01 336 | 2011_002515__01 337 | 2011_002675__01 338 | 2011_002713__02 339 | 2011_002754__04 340 | 2011_002863__05 341 | 2011_002929__01 342 | 2011_002975__04 343 | 2011_003003__02 344 | 2011_003030__03 345 | 2011_003145__03 346 | 2011_003271__05 347 | -------------------------------------------------------------------------------- /data/splits/pascal/val/fold3.txt: -------------------------------------------------------------------------------- 1 | 2007_000042__19 2 | 2007_000123__19 3 | 2007_000175__17 4 | 2007_000187__20 5 | 2007_000452__18 6 | 2007_000559__20 7 | 2007_000629__19 8 | 2007_000636__19 9 | 2007_000661__18 10 | 2007_000676__17 11 | 2007_000804__18 12 | 2007_000925__17 13 | 2007_001154__18 14 | 2007_001175__20 15 | 2007_001408__16 16 | 2007_001430__16 17 | 2007_001430__20 18 | 2007_001457__18 19 | 2007_001458__18 20 | 2007_001585__18 21 | 2007_001594__17 22 | 2007_001678__20 23 | 2007_001717__20 24 | 2007_001733__17 25 | 2007_001763__18 26 | 2007_001763__20 27 | 2007_002119__20 28 | 2007_002132__20 29 | 2007_002268__18 30 | 2007_002284__16 31 | 2007_002378__16 32 | 2007_002426__18 33 | 2007_002427__18 34 | 2007_002565__19 35 | 2007_002618__17 36 | 2007_002648__17 37 | 2007_002728__19 38 | 2007_003011__18 39 | 2007_003011__20 40 | 2007_003169__18 41 | 2007_003367__16 42 | 2007_003499__19 43 | 2007_003506__16 44 | 2007_003530__18 45 | 2007_003587__19 46 | 2007_003714__17 47 | 2007_003848__19 48 | 2007_003957__19 49 | 2007_004190__20 50 | 2007_004193__20 51 | 2007_004275__16 52 | 2007_004281__19 53 | 2007_004483__19 54 | 2007_004510__20 55 | 2007_004558__16 56 | 2007_004649__19 57 | 2007_004712__16 58 | 2007_004969__17 59 | 2007_005469__17 60 | 2007_005626__19 61 | 2007_005689__19 62 | 2007_005813__16 63 | 2007_005857__16 64 | 2007_005915__17 65 | 2007_006171__18 66 | 2007_006348__20 67 | 2007_006373__18 68 | 2007_006678__17 69 | 2007_006680__19 70 | 2007_006802__19 71 | 2007_007130__20 72 | 2007_007165__17 73 | 2007_007168__19 74 | 2007_007195__19 75 | 2007_007196__20 76 | 2007_007203__20 77 | 2007_007417__18 78 | 2007_007534__17 79 | 2007_007624__16 80 | 2007_007795__16 81 | 2007_007881__19 82 | 2007_007996__18 83 | 2007_008204__20 84 | 2007_008260__18 85 | 2007_008339__19 86 | 2007_008374__20 87 | 2007_008543__18 88 | 2007_008547__16 89 | 2007_009068__18 90 | 2007_009252__18 91 | 2007_009320__17 92 | 2007_009419__16 93 | 2007_009446__20 94 | 2007_009521__18 95 | 2007_009521__20 96 | 2007_009592__18 97 | 2007_009655__18 98 | 2007_009684__18 99 | 2007_009750__16 100 | 2008_000016__20 101 | 2008_000149__18 102 | 2008_000270__18 103 | 2008_000391__16 104 | 2008_000589__18 105 | 2008_000657__19 106 | 2008_001078__16 107 | 2008_001283__16 108 | 2008_001688__16 109 | 2008_001688__20 110 | 2008_001966__16 111 | 2008_002273__16 112 | 2008_002379__16 113 | 2008_002464__20 114 | 2008_002536__17 115 | 2008_002680__20 116 | 2008_002900__19 117 | 2008_002929__18 118 | 2008_003003__20 119 | 2008_003026__20 120 | 2008_003105__19 121 | 2008_003135__16 122 | 2008_003676__16 123 | 2008_003709__18 124 | 2008_003733__18 125 | 2008_003885__20 126 | 2008_004172__18 127 | 2008_004212__19 128 | 2008_004279__20 129 | 2008_004367__19 130 | 2008_004453__17 131 | 2008_004477__16 132 | 2008_004562__18 133 | 2008_004610__19 134 | 2008_004621__17 135 | 2008_004754__20 136 | 2008_004854__17 137 | 2008_004910__20 138 | 2008_005089__20 139 | 2008_005217__16 140 | 2008_005242__16 141 | 2008_005254__20 142 | 2008_005439__20 143 | 2008_005445__20 144 | 2008_005544__19 145 | 2008_005633__17 146 | 2008_005680__16 147 | 2008_006055__19 148 | 2008_006159__20 149 | 2008_006327__17 150 | 2008_006523__19 151 | 2008_006553__19 152 | 2008_006752__19 153 | 2008_006784__18 154 | 2008_006835__17 155 | 2008_007497__17 156 | 2008_007527__20 157 | 2008_007677__17 158 | 2008_007814__17 159 | 2008_007828__20 160 | 2008_008103__18 161 | 2008_008221__19 162 | 2008_008434__16 163 | 2009_000022__19 164 | 2009_000039__17 165 | 2009_000087__18 166 | 2009_000096__18 167 | 2009_000136__20 168 | 2009_000242__18 169 | 2009_000391__20 170 | 2009_000418__16 171 | 2009_000418__18 172 | 2009_000487__18 173 | 2009_000488__16 174 | 2009_000488__20 175 | 2009_000628__19 176 | 2009_000675__17 177 | 2009_000704__20 178 | 2009_000712__19 179 | 2009_000732__18 180 | 2009_000845__19 181 | 2009_000924__17 182 | 2009_001300__19 183 | 2009_001333__19 184 | 2009_001363__20 185 | 2009_001505__17 186 | 2009_001644__16 187 | 2009_001644__18 188 | 2009_001644__20 189 | 2009_001684__16 190 | 2009_001731__18 191 | 2009_001768__17 192 | 2009_001775__16 193 | 2009_001775__18 194 | 2009_001991__17 195 | 2009_002082__17 196 | 2009_002094__20 197 | 2009_002202__19 198 | 2009_002265__19 199 | 2009_002291__19 200 | 2009_002346__18 201 | 2009_002366__20 202 | 2009_002390__18 203 | 2009_002487__16 204 | 2009_002562__20 205 | 2009_002568__19 206 | 2009_002571__16 207 | 2009_002571__18 208 | 2009_002573__20 209 | 2009_002584__16 210 | 2009_002638__19 211 | 2009_002732__18 212 | 2009_002887__19 213 | 2009_002982__19 214 | 2009_003105__19 215 | 2009_003123__18 216 | 2009_003299__19 217 | 2009_003311__19 218 | 2009_003433__19 219 | 2009_003523__20 220 | 2009_003551__20 221 | 2009_003564__16 222 | 2009_003564__18 223 | 2009_003607__18 224 | 2009_003666__17 225 | 2009_003857__20 226 | 2009_003895__18 227 | 2009_003895__20 228 | 2009_003938__19 229 | 2009_004099__18 230 | 2009_004140__18 231 | 2009_004255__19 232 | 2009_004298__18 233 | 2009_004687__18 234 | 2009_004730__19 235 | 2009_004799__19 236 | 2009_004993__18 237 | 2009_004993__20 238 | 2009_005148__19 239 | 2009_005220__19 240 | 2010_000256__18 241 | 2010_000284__18 242 | 2010_000309__17 243 | 2010_000318__20 244 | 2010_000330__16 245 | 2010_000639__16 246 | 2010_000738__20 247 | 2010_000764__19 248 | 2010_001011__17 249 | 2010_001079__17 250 | 2010_001104__19 251 | 2010_001149__18 252 | 2010_001151__19 253 | 2010_001246__16 254 | 2010_001256__17 255 | 2010_001327__18 256 | 2010_001367__20 257 | 2010_001522__17 258 | 2010_001557__17 259 | 2010_001577__17 260 | 2010_001699__16 261 | 2010_001734__19 262 | 2010_001752__20 263 | 2010_001767__18 264 | 2010_001773__16 265 | 2010_001851__16 266 | 2010_001951__19 267 | 2010_001962__18 268 | 2010_002106__17 269 | 2010_002137__16 270 | 2010_002137__18 271 | 2010_002232__17 272 | 2010_002531__18 273 | 2010_002682__19 274 | 2010_002921__20 275 | 2010_003014__18 276 | 2010_003123__16 277 | 2010_003302__16 278 | 2010_003514__19 279 | 2010_003541__17 280 | 2010_003597__18 281 | 2010_003781__16 282 | 2010_003956__19 283 | 2010_004149__19 284 | 2010_004226__17 285 | 2010_004382__16 286 | 2010_004479__20 287 | 2010_004757__16 288 | 2010_004757__18 289 | 2010_004783__18 290 | 2010_004825__16 291 | 2010_004857__20 292 | 2010_004951__19 293 | 2010_004980__19 294 | 2010_005180__18 295 | 2010_005187__16 296 | 2010_005305__20 297 | 2010_005606__18 298 | 2010_005706__19 299 | 2010_005719__17 300 | 2010_005727__19 301 | 2010_005788__17 302 | 2010_005860__16 303 | 2010_005871__19 304 | 2010_005991__18 305 | 2010_006054__19 306 | 2011_000070__18 307 | 2011_000173__18 308 | 2011_000283__19 309 | 2011_000291__19 310 | 2011_000310__18 311 | 2011_000436__17 312 | 2011_000521__19 313 | 2011_000747__16 314 | 2011_001005__18 315 | 2011_001060__19 316 | 2011_001281__19 317 | 2011_001350__17 318 | 2011_001567__18 319 | 2011_001601__18 320 | 2011_001614__19 321 | 2011_001674__18 322 | 2011_001713__16 323 | 2011_001713__18 324 | 2011_001726__20 325 | 2011_001794__18 326 | 2011_001862__18 327 | 2011_001863__16 328 | 2011_001910__20 329 | 2011_002124__18 330 | 2011_002156__20 331 | 2011_002178__17 332 | 2011_002247__19 333 | 2011_002379__19 334 | 2011_002391__18 335 | 2011_002532__20 336 | 2011_002535__19 337 | 2011_002644__18 338 | 2011_002644__20 339 | 2011_002879__18 340 | 2011_002879__20 341 | 2011_003103__16 342 | 2011_003103__18 343 | 2011_003146__19 344 | 2011_003182__18 345 | 2011_003197__19 346 | 2011_003256__18 347 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | r""" testing code """ 2 | import argparse 3 | import os 4 | import torch.nn as nn 5 | import numpy as np 6 | import torch 7 | 8 | import cv2 9 | 10 | from skimage import filters 11 | from PIL import Image 12 | from model.sccnet import SCCNetwork 13 | from common.logger import Logger, AverageMeter 14 | from common.vis import Visualizer 15 | from common.evaluation import Evaluator 16 | from common import utils 17 | from data.dataset import FSSDataset 18 | import torchvision.transforms.functional as FF 19 | import torch.nn.functional as F 20 | from torchmetrics.classification import BinaryJaccardIndex 21 | 22 | 23 | def eigen2img_multi(eigen): 24 | img = eigen.numpy() 25 | threshs = filters.threshold_multiotsu(img) 26 | return img > threshs[1] 27 | 28 | 29 | def eigen2img_adp(eigen): 30 | minv, maxv = eigen.min(), eigen.max() 31 | eigen = (eigen - minv) / (1e-6 + maxv - minv) 32 | uint_img = (eigen.numpy() * 255).astype('uint8') 33 | return cv2.adaptiveThreshold(uint_img, 255, cv2.ADAPTIVE_THRESH_MEAN_C, 34 | cv2.THRESH_BINARY, 9, 2) 35 | 36 | 37 | def eigen2img_merge(eigen): 38 | mask = eigen2img_multi(eigen) 39 | detail = np.asarray(eigen2img_adp(eigen)) 40 | return np.logical_and(mask, detail) 41 | 42 | 43 | def test(model, dataloader, nshot, args): 44 | r""" Test HSNet """ 45 | 46 | # Freeze randomness during testing for reproducibility 47 | utils.fix_randseed(0) 48 | average_meter = AverageMeter(dataloader.dataset) 49 | 50 | iou = BinaryJaccardIndex().cuda() 51 | 52 | for idx, batch in enumerate(dataloader): 53 | 54 | # 1. forward pass 55 | batch = utils.to_cuda(batch) 56 | pred_mask = model.module.predict_mask_nshot(batch, nshot=nshot) 57 | 58 | assert pred_mask.size() == batch['query_mask'].size() 59 | 60 | if args.fuse: 61 | b = pred_mask.size()[0] 62 | for i in range(b): 63 | img_name = batch['query_name'][i] 64 | file_name = os.path.join(args.eigen_path, f'{img_name}.pth') 65 | eigen = torch.load(file_name) 66 | 67 | if args.perfect: 68 | best_iou = 0 69 | best_eigen_mask = None 70 | for j in range(4): 71 | eigen_img = eigen2img_merge(eigen['eigenvectors'][j + 1].resize(64, 64)) 72 | eigen_mask = torch.tensor(eigen_img).int() 73 | eigen_mask = F.interpolate( 74 | eigen_mask.unsqueeze(0).unsqueeze(0).float(), (256, 256), 75 | mode='bilinear', 76 | align_corners=True).cuda().int().squeeze() 77 | tiou = iou(batch['query_mask'][i], eigen_mask).cpu().item() 78 | if tiou > best_iou: 79 | best_iou = tiou 80 | best_eigen_mask = eigen_mask 81 | 82 | if best_iou > 0.1 and best_eigen_mask is not None: 83 | pred_mask[i, :, :] = torch.logical_or(pred_mask[i, :, :], best_eigen_mask) 84 | else: 85 | eigen_img = eigen2img_merge(eigen['eigenvectors'][1].resize(64, 64)) 86 | eigen_mask = torch.tensor(eigen_img).int() 87 | eigen_mask = F.interpolate( 88 | eigen_mask.unsqueeze(0).unsqueeze(0).float(), (256, 256), 89 | mode='bilinear', 90 | align_corners=True).cuda().int().squeeze() 91 | tiou = iou(pred_mask[i, :, :], eigen_mask).cpu().item() 92 | if tiou > 0.1: 93 | pred_mask[i, :, :] = torch.logical_or(pred_mask[i, :, :], eigen_mask) 94 | 95 | if len(args.seg_path) > 0: 96 | b = pred_mask.size()[0] 97 | for i in range(b): 98 | img_name = batch['query_name'][i] 99 | cls_id = batch['class_id'][i] 100 | file_name = os.path.join(args.seg_path, f'{img_name}__{cls_id}.png') 101 | img = FF.to_pil_image(pred_mask[i, :, :].int()) 102 | img.save(file_name) 103 | 104 | # 2. Evaluate prediction 105 | area_inter, area_union = Evaluator.classify_prediction(pred_mask.clone(), batch) 106 | average_meter.update(area_inter, area_union, batch['class_id'], loss=None) 107 | average_meter.write_process(idx, len(dataloader), epoch=-1, write_batch_idx=1) 108 | 109 | # Visualize predictions 110 | if Visualizer.visualize: 111 | Visualizer.visualize_prediction_batch(batch['support_imgs'], batch['support_masks'], 112 | batch['query_img'], batch['query_mask'], 113 | pred_mask, batch['class_id'], idx, 114 | area_inter[1].float() / area_union[1].float()) 115 | 116 | # Write evaluation results 117 | average_meter.write_result('Test', 0) 118 | miou, fb_iou = average_meter.compute_iou() 119 | 120 | return miou, fb_iou 121 | 122 | 123 | if __name__ == '__main__': 124 | 125 | # Arguments parsing 126 | parser = argparse.ArgumentParser(description='SCCNet Pytorch Implementation') 127 | parser.add_argument('--datapath', type=str, default='../Datasets_HSN') 128 | parser.add_argument('--benchmark', type=str, default='pascal', choices=['pascal', 'isaid', 'dlrsd']) 129 | parser.add_argument('--logpath', type=str, default='') 130 | parser.add_argument('--bsz', type=int, default=1) 131 | parser.add_argument('--img_size', type=int, default=400) 132 | parser.add_argument('--nworker', type=int, default=0) 133 | parser.add_argument('--load', type=str, default='') 134 | parser.add_argument('--fold', type=int, default=0, choices=[0, 1, 2, 3]) 135 | parser.add_argument('--nshot', type=int, default=1) 136 | parser.add_argument('--backbone', type=str, default='resnet101', choices=['vgg16', 'resnet50', 'resnet101']) 137 | parser.add_argument('--visualize', action='store_true') 138 | parser.add_argument('--use_original_imgsize', action='store_true') 139 | parser.add_argument('--seg_path', type=str, default='') 140 | parser.add_argument('--eigen_path', type=str, default='') 141 | parser.add_argument('--fuse', type=bool, default=False) 142 | parser.add_argument('--perfect', type=bool, default=False) 143 | args = parser.parse_args() 144 | Logger.initialize(args, training=False) 145 | 146 | # Model initialization 147 | model = SCCNetwork(args.backbone, args.use_original_imgsize) 148 | model.eval() 149 | Logger.log_params(model) 150 | 151 | # Device setup 152 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") 153 | Logger.info('# available GPUs: %d' % torch.cuda.device_count()) 154 | model = nn.DataParallel(model) 155 | model.to(device) 156 | 157 | # Load trained model 158 | if args.load == '': raise Exception('Pretrained model not specified.') 159 | model.load_state_dict(torch.load(args.load)) 160 | 161 | # Helper classes (for testing) initialization 162 | Evaluator.initialize() 163 | Visualizer.initialize(args.visualize) 164 | 165 | # Dataset initialization 166 | FSSDataset.initialize(img_size=args.img_size, datapath=args.datapath, use_original_imgsize=args.use_original_imgsize) 167 | dataloader_test = FSSDataset.build_dataloader(args.benchmark, args.bsz, args.nworker, args.fold, 'test', aug=False, shot=args.nshot) 168 | 169 | # Test HSNet 170 | with torch.no_grad(): 171 | test_miou, test_fb_iou = test(model, dataloader_test, args.nshot, args) 172 | Logger.info('Fold %d mIoU: %5.2f \t FB-IoU: %5.2f' % (args.fold, test_miou.item(), test_fb_iou.item())) 173 | Logger.info('==================== Finished Testing ====================') 174 | -------------------------------------------------------------------------------- /data/splits/pascal/val/fold1.txt: -------------------------------------------------------------------------------- 1 | 2007_000452__09 2 | 2007_000464__10 3 | 2007_000491__10 4 | 2007_000663__06 5 | 2007_000663__07 6 | 2007_000727__06 7 | 2007_000727__07 8 | 2007_000804__09 9 | 2007_000830__09 10 | 2007_001299__10 11 | 2007_001321__07 12 | 2007_001457__09 13 | 2007_001677__09 14 | 2007_001717__09 15 | 2007_001763__08 16 | 2007_001774__08 17 | 2007_001884__06 18 | 2007_002268__08 19 | 2007_002387__10 20 | 2007_002445__08 21 | 2007_002470__08 22 | 2007_002539__06 23 | 2007_002597__08 24 | 2007_002643__07 25 | 2007_002903__10 26 | 2007_003011__09 27 | 2007_003051__07 28 | 2007_003101__06 29 | 2007_003106__08 30 | 2007_003137__06 31 | 2007_003143__07 32 | 2007_003169__08 33 | 2007_003195__06 34 | 2007_003201__10 35 | 2007_003503__06 36 | 2007_003503__07 37 | 2007_003621__06 38 | 2007_003711__06 39 | 2007_003786__06 40 | 2007_003841__10 41 | 2007_003917__07 42 | 2007_003991__08 43 | 2007_004193__09 44 | 2007_004392__09 45 | 2007_004405__09 46 | 2007_004510__09 47 | 2007_004712__09 48 | 2007_004856__08 49 | 2007_004866__08 50 | 2007_005074__07 51 | 2007_005114__10 52 | 2007_005296__07 53 | 2007_005331__07 54 | 2007_005460__08 55 | 2007_005547__07 56 | 2007_005547__10 57 | 2007_005844__09 58 | 2007_005845__08 59 | 2007_005911__06 60 | 2007_005978__06 61 | 2007_006035__07 62 | 2007_006086__09 63 | 2007_006241__09 64 | 2007_006260__08 65 | 2007_006277__07 66 | 2007_006348__09 67 | 2007_006553__09 68 | 2007_006761__10 69 | 2007_006841__10 70 | 2007_007414__07 71 | 2007_007417__08 72 | 2007_007524__08 73 | 2007_007815__07 74 | 2007_007818__07 75 | 2007_007996__09 76 | 2007_008106__09 77 | 2007_008110__09 78 | 2007_008543__09 79 | 2007_008722__10 80 | 2007_008747__06 81 | 2007_008815__08 82 | 2007_008897__09 83 | 2007_008973__10 84 | 2007_009015__06 85 | 2007_009015__07 86 | 2007_009068__09 87 | 2007_009084__09 88 | 2007_009096__07 89 | 2007_009221__08 90 | 2007_009245__10 91 | 2007_009346__08 92 | 2007_009392__06 93 | 2007_009392__07 94 | 2007_009413__09 95 | 2007_009521__09 96 | 2007_009764__06 97 | 2007_009794__08 98 | 2007_009897__10 99 | 2007_009923__08 100 | 2007_009938__07 101 | 2008_000009__10 102 | 2008_000073__10 103 | 2008_000075__06 104 | 2008_000107__09 105 | 2008_000149__09 106 | 2008_000182__08 107 | 2008_000345__08 108 | 2008_000401__08 109 | 2008_000464__08 110 | 2008_000501__07 111 | 2008_000673__09 112 | 2008_000853__08 113 | 2008_000919__10 114 | 2008_001078__08 115 | 2008_001433__08 116 | 2008_001439__09 117 | 2008_001513__08 118 | 2008_001640__08 119 | 2008_001715__09 120 | 2008_001885__08 121 | 2008_002152__08 122 | 2008_002205__06 123 | 2008_002212__07 124 | 2008_002379__09 125 | 2008_002521__09 126 | 2008_002623__08 127 | 2008_002681__08 128 | 2008_002778__10 129 | 2008_002958__07 130 | 2008_003141__06 131 | 2008_003141__07 132 | 2008_003333__07 133 | 2008_003477__09 134 | 2008_003499__08 135 | 2008_003577__07 136 | 2008_003777__06 137 | 2008_003821__09 138 | 2008_003846__07 139 | 2008_004069__07 140 | 2008_004339__07 141 | 2008_004552__07 142 | 2008_004612__09 143 | 2008_004701__10 144 | 2008_005097__10 145 | 2008_005105__10 146 | 2008_005245__07 147 | 2008_005676__06 148 | 2008_006008__09 149 | 2008_006063__10 150 | 2008_006254__07 151 | 2008_006325__08 152 | 2008_006341__08 153 | 2008_006480__08 154 | 2008_006528__10 155 | 2008_006554__06 156 | 2008_006986__07 157 | 2008_007025__10 158 | 2008_007031__10 159 | 2008_007048__09 160 | 2008_007123__10 161 | 2008_007194__09 162 | 2008_007273__10 163 | 2008_007378__09 164 | 2008_007402__09 165 | 2008_007527__09 166 | 2008_007548__08 167 | 2008_007596__10 168 | 2008_007737__09 169 | 2008_007797__06 170 | 2008_007804__07 171 | 2008_007828__09 172 | 2008_008252__06 173 | 2008_008301__06 174 | 2008_008469__06 175 | 2008_008682__06 176 | 2009_000013__08 177 | 2009_000080__08 178 | 2009_000219__10 179 | 2009_000309__10 180 | 2009_000335__06 181 | 2009_000335__07 182 | 2009_000426__06 183 | 2009_000455__06 184 | 2009_000457__07 185 | 2009_000523__07 186 | 2009_000641__10 187 | 2009_000716__08 188 | 2009_000731__10 189 | 2009_000771__10 190 | 2009_000825__07 191 | 2009_000964__08 192 | 2009_001008__08 193 | 2009_001082__06 194 | 2009_001240__07 195 | 2009_001255__07 196 | 2009_001299__09 197 | 2009_001391__08 198 | 2009_001411__08 199 | 2009_001536__07 200 | 2009_001775__09 201 | 2009_001804__06 202 | 2009_001816__06 203 | 2009_001854__06 204 | 2009_002035__10 205 | 2009_002122__10 206 | 2009_002150__10 207 | 2009_002164__07 208 | 2009_002171__10 209 | 2009_002221__10 210 | 2009_002238__06 211 | 2009_002238__07 212 | 2009_002239__07 213 | 2009_002268__08 214 | 2009_002346__09 215 | 2009_002415__09 216 | 2009_002487__09 217 | 2009_002527__08 218 | 2009_002535__06 219 | 2009_002549__10 220 | 2009_002571__09 221 | 2009_002618__07 222 | 2009_002635__10 223 | 2009_002753__08 224 | 2009_002936__08 225 | 2009_002990__07 226 | 2009_003003__07 227 | 2009_003059__10 228 | 2009_003071__09 229 | 2009_003269__07 230 | 2009_003304__06 231 | 2009_003387__07 232 | 2009_003406__07 233 | 2009_003494__09 234 | 2009_003507__09 235 | 2009_003542__10 236 | 2009_003549__07 237 | 2009_003569__10 238 | 2009_003589__07 239 | 2009_003703__06 240 | 2009_003771__08 241 | 2009_003773__10 242 | 2009_003849__09 243 | 2009_003895__09 244 | 2009_003904__08 245 | 2009_004072__06 246 | 2009_004140__09 247 | 2009_004217__09 248 | 2009_004248__08 249 | 2009_004455__07 250 | 2009_004504__08 251 | 2009_004590__06 252 | 2009_004594__07 253 | 2009_004687__09 254 | 2009_004721__08 255 | 2009_004732__06 256 | 2009_004748__07 257 | 2009_004789__06 258 | 2009_004859__09 259 | 2009_004867__06 260 | 2009_005158__08 261 | 2009_005219__08 262 | 2009_005231__06 263 | 2010_000003__09 264 | 2010_000160__07 265 | 2010_000163__08 266 | 2010_000372__07 267 | 2010_000427__10 268 | 2010_000530__07 269 | 2010_000552__08 270 | 2010_000573__06 271 | 2010_000628__07 272 | 2010_000639__09 273 | 2010_000682__06 274 | 2010_000683__08 275 | 2010_000724__08 276 | 2010_000907__10 277 | 2010_000941__08 278 | 2010_000952__07 279 | 2010_001000__10 280 | 2010_001010__10 281 | 2010_001070__08 282 | 2010_001206__06 283 | 2010_001292__08 284 | 2010_001331__08 285 | 2010_001351__08 286 | 2010_001403__06 287 | 2010_001403__07 288 | 2010_001534__08 289 | 2010_001553__07 290 | 2010_001579__09 291 | 2010_001646__06 292 | 2010_001656__08 293 | 2010_001692__10 294 | 2010_001699__09 295 | 2010_001767__07 296 | 2010_001851__09 297 | 2010_001913__08 298 | 2010_002017__07 299 | 2010_002017__09 300 | 2010_002025__08 301 | 2010_002137__08 302 | 2010_002146__08 303 | 2010_002305__08 304 | 2010_002336__09 305 | 2010_002348__08 306 | 2010_002361__07 307 | 2010_002390__10 308 | 2010_002422__08 309 | 2010_002512__08 310 | 2010_002531__08 311 | 2010_002546__06 312 | 2010_002623__09 313 | 2010_002693__08 314 | 2010_002693__09 315 | 2010_002763__08 316 | 2010_002763__10 317 | 2010_002868__06 318 | 2010_002900__08 319 | 2010_002902__07 320 | 2010_002921__09 321 | 2010_002929__07 322 | 2010_002988__07 323 | 2010_003123__07 324 | 2010_003183__10 325 | 2010_003231__07 326 | 2010_003239__10 327 | 2010_003275__08 328 | 2010_003276__07 329 | 2010_003293__06 330 | 2010_003302__09 331 | 2010_003325__09 332 | 2010_003381__07 333 | 2010_003402__08 334 | 2010_003409__09 335 | 2010_003446__07 336 | 2010_003453__07 337 | 2010_003468__08 338 | 2010_003531__09 339 | 2010_003675__08 340 | 2010_003746__07 341 | 2010_003758__08 342 | 2010_003764__08 343 | 2010_003768__07 344 | 2010_003772__06 345 | 2010_003781__08 346 | 2010_003813__07 347 | 2010_003854__07 348 | 2010_003971__08 349 | 2010_003971__09 350 | 2010_004104__08 351 | 2010_004120__08 352 | 2010_004320__08 353 | 2010_004322__10 354 | 2010_004348__06 355 | 2010_004369__08 356 | 2010_004472__07 357 | 2010_004479__08 358 | 2010_004635__10 359 | 2010_004763__09 360 | 2010_004783__09 361 | 2010_004789__10 362 | 2010_004815__08 363 | 2010_004825__09 364 | 2010_004861__08 365 | 2010_004946__07 366 | 2010_005013__07 367 | 2010_005021__08 368 | 2010_005021__09 369 | 2010_005063__06 370 | 2010_005108__08 371 | 2010_005118__06 372 | 2010_005160__06 373 | 2010_005166__10 374 | 2010_005284__06 375 | 2010_005344__08 376 | 2010_005421__08 377 | 2010_005432__07 378 | 2010_005501__07 379 | 2010_005508__08 380 | 2010_005606__08 381 | 2010_005709__08 382 | 2010_005718__07 383 | 2010_005860__07 384 | 2010_005899__08 385 | 2010_006070__07 386 | 2011_000178__06 387 | 2011_000226__09 388 | 2011_000239__06 389 | 2011_000248__06 390 | 2011_000312__06 391 | 2011_000338__09 392 | 2011_000419__08 393 | 2011_000503__07 394 | 2011_000548__10 395 | 2011_000566__10 396 | 2011_000607__09 397 | 2011_000661__08 398 | 2011_000661__09 399 | 2011_000780__08 400 | 2011_000789__08 401 | 2011_000809__09 402 | 2011_000813__08 403 | 2011_000813__09 404 | 2011_000830__06 405 | 2011_000843__09 406 | 2011_000888__06 407 | 2011_000900__07 408 | 2011_000969__06 409 | 2011_001047__10 410 | 2011_001064__06 411 | 2011_001071__09 412 | 2011_001110__07 413 | 2011_001159__10 414 | 2011_001232__10 415 | 2011_001292__08 416 | 2011_001341__06 417 | 2011_001346__09 418 | 2011_001447__09 419 | 2011_001530__10 420 | 2011_001534__08 421 | 2011_001546__10 422 | 2011_001567__09 423 | 2011_001597__08 424 | 2011_001601__08 425 | 2011_001607__08 426 | 2011_001665__09 427 | 2011_001708__10 428 | 2011_001775__08 429 | 2011_001782__10 430 | 2011_001812__09 431 | 2011_002041__09 432 | 2011_002064__07 433 | 2011_002124__09 434 | 2011_002200__09 435 | 2011_002298__09 436 | 2011_002322__07 437 | 2011_002343__09 438 | 2011_002358__09 439 | 2011_002391__09 440 | 2011_002509__09 441 | 2011_002592__07 442 | 2011_002644__09 443 | 2011_002685__08 444 | 2011_002812__07 445 | 2011_002885__10 446 | 2011_003011__09 447 | 2011_003019__07 448 | 2011_003019__10 449 | 2011_003055__07 450 | 2011_003103__09 451 | 2011_003114__06 452 | -------------------------------------------------------------------------------- /spectral/extract_utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import time 3 | from multiprocessing import Pool 4 | from pathlib import Path 5 | from typing import Any, Callable, Iterable, Optional, Tuple, Union 6 | 7 | import cv2 8 | import numpy as np 9 | import scipy.sparse 10 | import torch 11 | from skimage.morphology import binary_dilation, binary_erosion 12 | from torch.utils.data import Dataset 13 | from torchvision import transforms 14 | from torchvision.models import resnet, vgg 15 | from tqdm import tqdm 16 | 17 | 18 | class ImagesDataset(Dataset): 19 | """A very simple dataset for loading images.""" 20 | 21 | def __init__(self, filenames: str, images_root: Optional[str] = None, transform: Optional[Callable] = None, 22 | prepare_filenames: bool = True) -> None: 23 | self.root = None if images_root is None else Path(images_root) 24 | self.filenames = sorted(list(set(filenames))) if prepare_filenames else filenames 25 | self.transform = transform 26 | 27 | def __getitem__(self, index: int) -> Tuple[Any, Any]: 28 | path = self.filenames[index] 29 | full_path = Path(path) if self.root is None else self.root / path 30 | assert full_path.is_file(), f'Not a file: {full_path}' 31 | image = cv2.imread(str(full_path)) 32 | image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) 33 | if self.transform is not None: 34 | image = self.transform(image) 35 | return image, path, index 36 | 37 | def __len__(self) -> int: 38 | return len(self.filenames) 39 | 40 | 41 | def extract_feat_vgg(img, backbone, feat_ids, bottleneck_ids=None, lids=None): 42 | r""" Extract intermediate features from VGG """ 43 | feats = [] 44 | feat = img 45 | for lid, module in enumerate(backbone.features): 46 | feat = module(feat) 47 | if lid in feat_ids: 48 | feats.append(feat.clone()) 49 | return feats 50 | 51 | 52 | def extract_feat_res(img, backbone, feat_ids, bottleneck_ids, lids): 53 | r""" Extract intermediate features from ResNet""" 54 | feats = [] 55 | 56 | # Layer 0 57 | feat = backbone.conv1.forward(img) 58 | feat = backbone.bn1.forward(feat) 59 | feat = backbone.relu.forward(feat) 60 | feat = backbone.maxpool.forward(feat) 61 | 62 | # Layer 1-4 63 | for hid, (bid, lid) in enumerate(zip(bottleneck_ids, lids)): 64 | res = feat 65 | feat = backbone.__getattr__('layer%d' % lid)[bid].conv1.forward(feat) 66 | feat = backbone.__getattr__('layer%d' % lid)[bid].bn1.forward(feat) 67 | feat = backbone.__getattr__('layer%d' % lid)[bid].relu.forward(feat) 68 | feat = backbone.__getattr__('layer%d' % lid)[bid].conv2.forward(feat) 69 | feat = backbone.__getattr__('layer%d' % lid)[bid].bn2.forward(feat) 70 | feat = backbone.__getattr__('layer%d' % lid)[bid].relu.forward(feat) 71 | feat = backbone.__getattr__('layer%d' % lid)[bid].conv3.forward(feat) 72 | feat = backbone.__getattr__('layer%d' % lid)[bid].bn3.forward(feat) 73 | 74 | if bid == 0: 75 | res = backbone.__getattr__('layer%d' % lid)[bid].downsample.forward(res) 76 | 77 | feat += res 78 | 79 | if hid + 1 in feat_ids: 80 | feats.append(feat.clone()) 81 | 82 | feat = backbone.__getattr__('layer%d' % lid)[bid].relu.forward(feat) 83 | 84 | return feats 85 | 86 | def get_model(name: str): 87 | if 'dino' in name: 88 | model = torch.hub.load('facebookresearch/dino:main', name) 89 | model.fc = torch.nn.Identity() 90 | val_transform = get_transform(name) 91 | patch_size = model.patch_embed.patch_size 92 | num_heads = model.blocks[0].attn.num_heads 93 | elif 'resnet101' in name: 94 | model = resnet.resnet101(pretrained=True) 95 | val_transform = get_transform(name) 96 | patch_size = 16 97 | num_heads = 0 98 | elif 'resnet50' in name: 99 | model = resnet.resnet50(pretrained=True) 100 | val_transform = get_transform(name) 101 | patch_size = 16 102 | num_heads = 0 103 | elif 'vgg16' in name: 104 | model = vgg.vgg16(pretrained=True) 105 | val_transform = get_transform(name) 106 | patch_size = 16 107 | num_heads = 0 108 | else: 109 | raise ValueError(f'Cannot get model: {name}') 110 | model = model.eval() 111 | return model, val_transform, patch_size, num_heads 112 | 113 | 114 | def get_transform(name: str): 115 | if any(x in name for x in ('dino', 'mocov3', 'convnext', 'resnet50', 'resnet101','vgg16', )): 116 | normalize = transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)) 117 | transform = transforms.Compose([transforms.ToTensor(), normalize]) 118 | else: 119 | raise NotImplementedError() 120 | return transform 121 | 122 | 123 | def get_inverse_transform(name: str): 124 | if 'dino' in name: 125 | inv_normalize = transforms.Normalize( 126 | [-0.485 / 0.229, -0.456 / 0.224, -0.406 / 0.225], 127 | [1 / 0.229, 1 / 0.224, 1 / 0.225]) 128 | transform = transforms.Compose([transforms.ToTensor(), inv_normalize]) 129 | else: 130 | raise NotImplementedError() 131 | return transform 132 | 133 | 134 | def get_image_sizes(data_dict: dict, downsample_factor: Optional[int] = None): 135 | P = data_dict['patch_size'] if downsample_factor is None else downsample_factor 136 | B, C, H, W = data_dict['shape'] 137 | assert B == 1, 'assumption violated :(' 138 | H_patch, W_patch = H // P, W // P 139 | H_pad, W_pad = H_patch * P, W_patch * P 140 | return (B, C, H, W, P, H_patch, W_patch, H_pad, W_pad) 141 | 142 | 143 | def _get_files(p: str): 144 | if Path(p).is_dir(): 145 | return sorted(Path(p).iterdir()) 146 | elif Path(p).is_file(): 147 | return Path(p).read_text().splitlines() 148 | else: 149 | raise ValueError(p) 150 | 151 | 152 | def get_paired_input_files(path1: str, path2: str): 153 | files1 = _get_files(path1) 154 | files2 = _get_files(path2) 155 | assert len(files1) == len(files2) 156 | return list(enumerate(zip(files1, files2))) 157 | 158 | 159 | def make_output_dir(output_dir, check_if_empty=True): 160 | output_dir = Path(output_dir) 161 | output_dir.mkdir(exist_ok=True, parents=True) 162 | if check_if_empty and (len(list(output_dir.iterdir())) > 0): 163 | print(f'Output dir: {str(output_dir)}') 164 | if input(f'Output dir already contains files. Continue? (y/n) >> ') != 'y': 165 | sys.exit() # skip because already generated 166 | 167 | 168 | def get_largest_cc(mask: np.array): 169 | from skimage.measure import label as measure_label 170 | labels = measure_label(mask) # get connected components 171 | largest_cc_index = np.argmax(np.bincount(labels.flat)[1:]) + 1 172 | largest_cc_mask = (labels == largest_cc_index) 173 | return largest_cc_mask 174 | 175 | 176 | def erode_or_dilate_mask(x: Union[torch.Tensor, np.ndarray], r: int = 0, erode=True): 177 | fn = binary_erosion if erode else binary_dilation 178 | for _ in range(r): 179 | x_new = fn(x) 180 | if x_new.sum() > 0: # do not erode the entire mask away 181 | x = x_new 182 | return x 183 | 184 | 185 | def get_border_fraction(segmap: np.array): 186 | num_border_pixels = 2 * (segmap.shape[0] + segmap.shape[1]) 187 | counts_map = {idx: 0 for idx in np.unique(segmap)} 188 | np.zeros(len(np.unique(segmap))) 189 | for border in [segmap[:, 0], segmap[:, -1], segmap[0, :], segmap[-1, :]]: 190 | unique, counts = np.unique(border, return_counts=True) 191 | for idx, count in zip(unique.tolist(), counts.tolist()): 192 | counts_map[idx] += count 193 | # normlized_counts_map = {idx: count / num_border_pixels for idx, count in counts_map.items()} 194 | indices = np.array(list(counts_map.keys())) 195 | normlized_counts = np.array(list(counts_map.values())) / num_border_pixels 196 | return indices, normlized_counts 197 | 198 | 199 | def parallel_process(inputs: Iterable, fn: Callable, multiprocessing: int = 0): 200 | start = time.time() 201 | if multiprocessing: 202 | print('Starting multiprocessing') 203 | with Pool(multiprocessing) as pool: 204 | for _ in tqdm(pool.imap(fn, inputs), total=len(inputs)): 205 | pass 206 | else: 207 | for inp in tqdm(inputs): 208 | fn(inp) 209 | print(f'Finished in {time.time() - start:.1f}s') 210 | 211 | 212 | def knn_affinity(image, n_neighbors=[20, 10], distance_weights=[2.0, 0.1]): 213 | """Computes a KNN-based affinity matrix. Note that this function requires pymatting""" 214 | try: 215 | from pymatting.util.kdtree import knn 216 | except: 217 | raise ImportError( 218 | 'Please install pymatting to compute KNN affinity matrices:\n' 219 | 'pip3 install pymatting' 220 | ) 221 | 222 | h, w = image.shape[:2] 223 | r, g, b = image.reshape(-1, 3).T 224 | n = w * h 225 | 226 | x = np.tile(np.linspace(0, 1, w), h) 227 | y = np.repeat(np.linspace(0, 1, h), w) 228 | 229 | i, j = [], [] 230 | 231 | for k, distance_weight in zip(n_neighbors, distance_weights): 232 | f = np.stack( 233 | [r, g, b, distance_weight * x, distance_weight * y], 234 | axis=1, 235 | out=np.zeros((n, 5), dtype=np.float32), 236 | ) 237 | 238 | distances, neighbors = knn(f, f, k=k) 239 | 240 | i.append(np.repeat(np.arange(n), k)) 241 | j.append(neighbors.flatten()) 242 | 243 | ij = np.concatenate(i + j) 244 | ji = np.concatenate(j + i) 245 | coo_data = np.ones(2 * sum(n_neighbors) * n) 246 | 247 | # This is our affinity matrix 248 | W = scipy.sparse.csr_matrix((coo_data, (ij, ji)), (n, n)) 249 | return W 250 | 251 | 252 | def rw_affinity(image, sigma=0.033, radius=1): 253 | """Computes a random walk-based affinity matrix. Note that this function requires pymatting""" 254 | try: 255 | from pymatting.laplacian.rw_laplacian import _rw_laplacian 256 | except: 257 | raise ImportError( 258 | 'Please install pymatting to compute RW affinity matrices:\n' 259 | 'pip3 install pymatting' 260 | ) 261 | h, w = image.shape[:2] 262 | n = h * w 263 | values, i_inds, j_inds = _rw_laplacian(image, sigma, radius) 264 | W = scipy.sparse.csr_matrix((values, (i_inds, j_inds)), shape=(n, n)) 265 | return W 266 | 267 | 268 | def get_diagonal(W: scipy.sparse.csr_matrix, threshold: float = 1e-12): 269 | """Gets the diagonal sum of a sparse matrix""" 270 | try: 271 | from pymatting.util.util import row_sum 272 | except: 273 | raise ImportError( 274 | 'Please install pymatting to compute the diagonal sums:\n' 275 | 'pip3 install pymatting' 276 | ) 277 | 278 | D = row_sum(W) 279 | D[D < threshold] = 1.0 # Prevent division by zero. 280 | D = scipy.sparse.diags(D) 281 | return D 282 | -------------------------------------------------------------------------------- /data/splits/pascal/val/fold2.txt: -------------------------------------------------------------------------------- 1 | 2007_000129__15 2 | 2007_000323__15 3 | 2007_000332__13 4 | 2007_000346__15 5 | 2007_000762__11 6 | 2007_000762__15 7 | 2007_000783__13 8 | 2007_000783__15 9 | 2007_000799__13 10 | 2007_000799__15 11 | 2007_000830__11 12 | 2007_000847__11 13 | 2007_000847__15 14 | 2007_000999__15 15 | 2007_001175__15 16 | 2007_001239__12 17 | 2007_001284__15 18 | 2007_001311__15 19 | 2007_001408__15 20 | 2007_001423__15 21 | 2007_001430__11 22 | 2007_001430__15 23 | 2007_001526__15 24 | 2007_001585__15 25 | 2007_001586__13 26 | 2007_001586__15 27 | 2007_001594__15 28 | 2007_001630__15 29 | 2007_001677__11 30 | 2007_001678__15 31 | 2007_001717__15 32 | 2007_001763__12 33 | 2007_001955__13 34 | 2007_002046__13 35 | 2007_002119__15 36 | 2007_002260__14 37 | 2007_002268__12 38 | 2007_002378__15 39 | 2007_002426__15 40 | 2007_002539__15 41 | 2007_002565__15 42 | 2007_002597__12 43 | 2007_002624__11 44 | 2007_002624__15 45 | 2007_002643__15 46 | 2007_002728__15 47 | 2007_002823__14 48 | 2007_002823__15 49 | 2007_002824__15 50 | 2007_002852__12 51 | 2007_003011__11 52 | 2007_003020__15 53 | 2007_003022__13 54 | 2007_003022__15 55 | 2007_003088__15 56 | 2007_003106__15 57 | 2007_003110__12 58 | 2007_003134__15 59 | 2007_003188__15 60 | 2007_003194__12 61 | 2007_003367__14 62 | 2007_003367__15 63 | 2007_003373__12 64 | 2007_003373__15 65 | 2007_003530__15 66 | 2007_003621__15 67 | 2007_003742__11 68 | 2007_003742__15 69 | 2007_003872__12 70 | 2007_004033__14 71 | 2007_004033__15 72 | 2007_004112__12 73 | 2007_004112__15 74 | 2007_004121__15 75 | 2007_004189__12 76 | 2007_004275__14 77 | 2007_004275__15 78 | 2007_004281__15 79 | 2007_004380__14 80 | 2007_004380__15 81 | 2007_004392__15 82 | 2007_004405__11 83 | 2007_004538__13 84 | 2007_004538__15 85 | 2007_004644__12 86 | 2007_004712__11 87 | 2007_004712__15 88 | 2007_004722__13 89 | 2007_004722__15 90 | 2007_004902__13 91 | 2007_004902__15 92 | 2007_005114__13 93 | 2007_005114__15 94 | 2007_005149__12 95 | 2007_005173__14 96 | 2007_005173__15 97 | 2007_005281__15 98 | 2007_005304__15 99 | 2007_005331__13 100 | 2007_005331__15 101 | 2007_005354__14 102 | 2007_005354__15 103 | 2007_005509__15 104 | 2007_005547__15 105 | 2007_005608__14 106 | 2007_005608__15 107 | 2007_005696__12 108 | 2007_005759__14 109 | 2007_005803__11 110 | 2007_005844__11 111 | 2007_005845__15 112 | 2007_006028__15 113 | 2007_006076__15 114 | 2007_006086__11 115 | 2007_006117__15 116 | 2007_006171__12 117 | 2007_006171__15 118 | 2007_006241__11 119 | 2007_006364__13 120 | 2007_006364__15 121 | 2007_006373__15 122 | 2007_006444__12 123 | 2007_006444__15 124 | 2007_006560__15 125 | 2007_006647__14 126 | 2007_006647__15 127 | 2007_006698__15 128 | 2007_006802__15 129 | 2007_006841__15 130 | 2007_006864__15 131 | 2007_006866__13 132 | 2007_006866__15 133 | 2007_007007__11 134 | 2007_007007__15 135 | 2007_007109__13 136 | 2007_007109__15 137 | 2007_007195__15 138 | 2007_007203__15 139 | 2007_007211__14 140 | 2007_007235__15 141 | 2007_007417__12 142 | 2007_007493__15 143 | 2007_007498__11 144 | 2007_007498__15 145 | 2007_007651__11 146 | 2007_007651__15 147 | 2007_007688__14 148 | 2007_007748__13 149 | 2007_007748__15 150 | 2007_007795__15 151 | 2007_007810__11 152 | 2007_007810__15 153 | 2007_007815__15 154 | 2007_007836__15 155 | 2007_007849__15 156 | 2007_007996__15 157 | 2007_008110__15 158 | 2007_008204__15 159 | 2007_008222__12 160 | 2007_008256__13 161 | 2007_008256__15 162 | 2007_008260__12 163 | 2007_008374__15 164 | 2007_008415__12 165 | 2007_008430__15 166 | 2007_008596__13 167 | 2007_008596__15 168 | 2007_008708__15 169 | 2007_008802__13 170 | 2007_008897__15 171 | 2007_008944__15 172 | 2007_008964__12 173 | 2007_008964__15 174 | 2007_008980__12 175 | 2007_009068__15 176 | 2007_009084__12 177 | 2007_009084__14 178 | 2007_009251__13 179 | 2007_009251__15 180 | 2007_009258__15 181 | 2007_009320__15 182 | 2007_009331__12 183 | 2007_009331__13 184 | 2007_009331__15 185 | 2007_009413__11 186 | 2007_009413__15 187 | 2007_009521__11 188 | 2007_009562__12 189 | 2007_009592__12 190 | 2007_009654__15 191 | 2007_009655__15 192 | 2007_009684__15 193 | 2007_009687__15 194 | 2007_009691__14 195 | 2007_009691__15 196 | 2007_009706__11 197 | 2007_009750__15 198 | 2007_009756__14 199 | 2007_009756__15 200 | 2007_009841__13 201 | 2007_009938__14 202 | 2008_000080__12 203 | 2008_000213__15 204 | 2008_000215__15 205 | 2008_000223__15 206 | 2008_000233__15 207 | 2008_000234__15 208 | 2008_000239__12 209 | 2008_000270__12 210 | 2008_000270__15 211 | 2008_000271__15 212 | 2008_000359__15 213 | 2008_000474__15 214 | 2008_000510__15 215 | 2008_000573__11 216 | 2008_000573__15 217 | 2008_000602__13 218 | 2008_000630__15 219 | 2008_000661__12 220 | 2008_000661__15 221 | 2008_000662__15 222 | 2008_000666__15 223 | 2008_000673__15 224 | 2008_000700__15 225 | 2008_000725__15 226 | 2008_000731__15 227 | 2008_000763__11 228 | 2008_000763__15 229 | 2008_000765__13 230 | 2008_000782__14 231 | 2008_000795__15 232 | 2008_000811__14 233 | 2008_000811__15 234 | 2008_000863__12 235 | 2008_000943__12 236 | 2008_000992__15 237 | 2008_001013__15 238 | 2008_001028__15 239 | 2008_001070__12 240 | 2008_001074__15 241 | 2008_001076__15 242 | 2008_001150__14 243 | 2008_001170__15 244 | 2008_001231__15 245 | 2008_001249__15 246 | 2008_001283__15 247 | 2008_001308__15 248 | 2008_001379__12 249 | 2008_001404__15 250 | 2008_001478__12 251 | 2008_001491__15 252 | 2008_001504__15 253 | 2008_001531__15 254 | 2008_001547__15 255 | 2008_001629__15 256 | 2008_001682__13 257 | 2008_001821__15 258 | 2008_001874__15 259 | 2008_001895__12 260 | 2008_001895__15 261 | 2008_001992__13 262 | 2008_001992__15 263 | 2008_002212__15 264 | 2008_002239__12 265 | 2008_002240__14 266 | 2008_002241__15 267 | 2008_002379__11 268 | 2008_002383__14 269 | 2008_002495__15 270 | 2008_002536__12 271 | 2008_002588__15 272 | 2008_002775__11 273 | 2008_002775__15 274 | 2008_002835__13 275 | 2008_002835__15 276 | 2008_002859__12 277 | 2008_002864__11 278 | 2008_002864__15 279 | 2008_002904__12 280 | 2008_002929__15 281 | 2008_002936__12 282 | 2008_002942__15 283 | 2008_002958__12 284 | 2008_003034__15 285 | 2008_003076__15 286 | 2008_003108__15 287 | 2008_003141__15 288 | 2008_003210__15 289 | 2008_003238__12 290 | 2008_003238__15 291 | 2008_003330__15 292 | 2008_003333__14 293 | 2008_003333__15 294 | 2008_003379__13 295 | 2008_003451__14 296 | 2008_003451__15 297 | 2008_003461__13 298 | 2008_003461__15 299 | 2008_003477__11 300 | 2008_003492__15 301 | 2008_003511__12 302 | 2008_003511__15 303 | 2008_003546__15 304 | 2008_003576__12 305 | 2008_003676__15 306 | 2008_003733__15 307 | 2008_003782__13 308 | 2008_003856__15 309 | 2008_003874__15 310 | 2008_004101__15 311 | 2008_004140__11 312 | 2008_004140__15 313 | 2008_004175__13 314 | 2008_004345__14 315 | 2008_004396__13 316 | 2008_004399__14 317 | 2008_004399__15 318 | 2008_004575__11 319 | 2008_004575__15 320 | 2008_004624__13 321 | 2008_004654__15 322 | 2008_004687__13 323 | 2008_004705__13 324 | 2008_005049__14 325 | 2008_005089__15 326 | 2008_005145__11 327 | 2008_005197__12 328 | 2008_005197__15 329 | 2008_005245__14 330 | 2008_005245__15 331 | 2008_005399__15 332 | 2008_005422__14 333 | 2008_005445__15 334 | 2008_005525__13 335 | 2008_005637__14 336 | 2008_005642__13 337 | 2008_005691__13 338 | 2008_005738__15 339 | 2008_005812__15 340 | 2008_005915__14 341 | 2008_006008__11 342 | 2008_006036__13 343 | 2008_006108__11 344 | 2008_006108__15 345 | 2008_006130__12 346 | 2008_006216__15 347 | 2008_006219__13 348 | 2008_006254__15 349 | 2008_006275__15 350 | 2008_006341__15 351 | 2008_006408__11 352 | 2008_006408__15 353 | 2008_006526__14 354 | 2008_006526__15 355 | 2008_006554__15 356 | 2008_006722__12 357 | 2008_006722__15 358 | 2008_006874__14 359 | 2008_006874__15 360 | 2008_006981__12 361 | 2008_007048__11 362 | 2008_007219__15 363 | 2008_007378__11 364 | 2008_007378__12 365 | 2008_007392__13 366 | 2008_007392__15 367 | 2008_007402__11 368 | 2008_007402__15 369 | 2008_007513__12 370 | 2008_007737__15 371 | 2008_007828__15 372 | 2008_007945__13 373 | 2008_007994__15 374 | 2008_008051__11 375 | 2008_008127__14 376 | 2008_008127__15 377 | 2008_008221__15 378 | 2008_008335__11 379 | 2008_008335__15 380 | 2008_008362__11 381 | 2008_008362__15 382 | 2008_008392__13 383 | 2008_008393__13 384 | 2008_008421__13 385 | 2008_008469__15 386 | 2009_000012__13 387 | 2009_000074__14 388 | 2009_000074__15 389 | 2009_000156__12 390 | 2009_000219__15 391 | 2009_000309__15 392 | 2009_000412__13 393 | 2009_000418__15 394 | 2009_000421__15 395 | 2009_000457__15 396 | 2009_000704__15 397 | 2009_000705__13 398 | 2009_000727__13 399 | 2009_000730__14 400 | 2009_000730__15 401 | 2009_000825__14 402 | 2009_000825__15 403 | 2009_000839__12 404 | 2009_000892__12 405 | 2009_000931__13 406 | 2009_000935__12 407 | 2009_001215__11 408 | 2009_001215__15 409 | 2009_001299__15 410 | 2009_001433__13 411 | 2009_001433__15 412 | 2009_001535__12 413 | 2009_001663__15 414 | 2009_001687__12 415 | 2009_001687__15 416 | 2009_001718__15 417 | 2009_001768__15 418 | 2009_001854__15 419 | 2009_002012__12 420 | 2009_002042__15 421 | 2009_002097__13 422 | 2009_002155__12 423 | 2009_002165__13 424 | 2009_002185__15 425 | 2009_002239__14 426 | 2009_002239__15 427 | 2009_002317__14 428 | 2009_002317__15 429 | 2009_002346__12 430 | 2009_002346__15 431 | 2009_002372__15 432 | 2009_002382__14 433 | 2009_002382__15 434 | 2009_002415__11 435 | 2009_002445__12 436 | 2009_002487__11 437 | 2009_002539__12 438 | 2009_002571__11 439 | 2009_002584__15 440 | 2009_002649__15 441 | 2009_002651__14 442 | 2009_002651__15 443 | 2009_002732__15 444 | 2009_002975__13 445 | 2009_003003__11 446 | 2009_003003__15 447 | 2009_003063__12 448 | 2009_003065__15 449 | 2009_003071__11 450 | 2009_003071__15 451 | 2009_003123__11 452 | 2009_003196__14 453 | 2009_003217__12 454 | 2009_003241__12 455 | 2009_003269__15 456 | 2009_003323__13 457 | 2009_003323__15 458 | 2009_003466__12 459 | 2009_003481__13 460 | 2009_003494__15 461 | 2009_003507__11 462 | 2009_003576__14 463 | 2009_003576__15 464 | 2009_003756__12 465 | 2009_003804__13 466 | 2009_003810__12 467 | 2009_003849__11 468 | 2009_003849__15 469 | 2009_003903__13 470 | 2009_003928__12 471 | 2009_003991__11 472 | 2009_003991__15 473 | 2009_004033__12 474 | 2009_004043__14 475 | 2009_004043__15 476 | 2009_004140__11 477 | 2009_004221__15 478 | 2009_004455__14 479 | 2009_004497__13 480 | 2009_004507__12 481 | 2009_004507__15 482 | 2009_004581__12 483 | 2009_004592__12 484 | 2009_004738__14 485 | 2009_004738__15 486 | 2009_004848__15 487 | 2009_004859__11 488 | 2009_004859__15 489 | 2009_004942__13 490 | 2009_004987__14 491 | 2009_004987__15 492 | 2009_004994__12 493 | 2009_004994__15 494 | 2009_005038__11 495 | 2009_005038__15 496 | 2009_005078__14 497 | 2009_005087__15 498 | 2009_005217__13 499 | 2009_005217__15 500 | 2010_000003__12 501 | 2010_000038__13 502 | 2010_000038__15 503 | 2010_000087__14 504 | 2010_000087__15 505 | 2010_000110__12 506 | 2010_000110__15 507 | 2010_000159__12 508 | 2010_000174__11 509 | 2010_000174__15 510 | 2010_000216__12 511 | 2010_000238__15 512 | 2010_000256__15 513 | 2010_000422__12 514 | 2010_000530__15 515 | 2010_000559__15 516 | 2010_000639__12 517 | 2010_000666__13 518 | 2010_000666__15 519 | 2010_000738__15 520 | 2010_000788__12 521 | 2010_000874__13 522 | 2010_000904__12 523 | 2010_001024__15 524 | 2010_001124__12 525 | 2010_001251__14 526 | 2010_001264__12 527 | 2010_001313__14 528 | 2010_001313__15 529 | 2010_001367__15 530 | 2010_001376__12 531 | 2010_001451__13 532 | 2010_001553__14 533 | 2010_001563__12 534 | 2010_001563__15 535 | 2010_001579__11 536 | 2010_001579__15 537 | 2010_001692__15 538 | 2010_001699__15 539 | 2010_001734__15 540 | 2010_001767__15 541 | 2010_001851__11 542 | 2010_001908__12 543 | 2010_001956__12 544 | 2010_002017__15 545 | 2010_002137__15 546 | 2010_002161__13 547 | 2010_002161__15 548 | 2010_002228__12 549 | 2010_002251__14 550 | 2010_002251__15 551 | 2010_002271__14 552 | 2010_002336__11 553 | 2010_002396__14 554 | 2010_002396__15 555 | 2010_002480__12 556 | 2010_002623__15 557 | 2010_002691__13 558 | 2010_002763__15 559 | 2010_002792__15 560 | 2010_002902__15 561 | 2010_002929__15 562 | 2010_003014__15 563 | 2010_003060__12 564 | 2010_003187__12 565 | 2010_003207__14 566 | 2010_003239__15 567 | 2010_003325__11 568 | 2010_003325__15 569 | 2010_003381__15 570 | 2010_003409__15 571 | 2010_003446__15 572 | 2010_003506__12 573 | 2010_003531__11 574 | 2010_003532__13 575 | 2010_003597__11 576 | 2010_003597__15 577 | 2010_003746__12 578 | 2010_003746__15 579 | 2010_003947__14 580 | 2010_003971__11 581 | 2010_004042__14 582 | 2010_004165__12 583 | 2010_004165__15 584 | 2010_004219__14 585 | 2010_004219__15 586 | 2010_004337__15 587 | 2010_004355__14 588 | 2010_004432__15 589 | 2010_004472__15 590 | 2010_004479__15 591 | 2010_004519__13 592 | 2010_004550__12 593 | 2010_004559__15 594 | 2010_004628__12 595 | 2010_004697__14 596 | 2010_004697__15 597 | 2010_004795__12 598 | 2010_004815__15 599 | 2010_004825__11 600 | 2010_004828__15 601 | 2010_004856__13 602 | 2010_004941__14 603 | 2010_004951__15 604 | 2010_005046__11 605 | 2010_005046__15 606 | 2010_005118__15 607 | 2010_005159__12 608 | 2010_005160__14 609 | 2010_005166__15 610 | 2010_005174__13 611 | 2010_005206__12 612 | 2010_005245__12 613 | 2010_005245__15 614 | 2010_005252__14 615 | 2010_005252__15 616 | 2010_005284__15 617 | 2010_005366__14 618 | 2010_005433__14 619 | 2010_005501__14 620 | 2010_005575__12 621 | 2010_005582__15 622 | 2010_005606__15 623 | 2010_005626__11 624 | 2010_005626__15 625 | 2010_005644__12 626 | 2010_005709__15 627 | 2010_005871__15 628 | 2010_005991__12 629 | 2010_005991__15 630 | 2010_005992__12 631 | 2011_000045__12 632 | 2011_000051__15 633 | 2011_000054__15 634 | 2011_000178__15 635 | 2011_000226__11 636 | 2011_000248__15 637 | 2011_000338__11 638 | 2011_000396__13 639 | 2011_000435__15 640 | 2011_000438__15 641 | 2011_000455__14 642 | 2011_000455__15 643 | 2011_000479__15 644 | 2011_000512__14 645 | 2011_000526__13 646 | 2011_000536__12 647 | 2011_000566__15 648 | 2011_000585__15 649 | 2011_000598__11 650 | 2011_000618__14 651 | 2011_000618__15 652 | 2011_000638__15 653 | 2011_000780__15 654 | 2011_000809__11 655 | 2011_000809__15 656 | 2011_000843__15 657 | 2011_000953__11 658 | 2011_000953__15 659 | 2011_001014__12 660 | 2011_001060__15 661 | 2011_001069__15 662 | 2011_001071__15 663 | 2011_001159__15 664 | 2011_001276__11 665 | 2011_001276__12 666 | 2011_001276__15 667 | 2011_001346__15 668 | 2011_001416__15 669 | 2011_001447__15 670 | 2011_001530__15 671 | 2011_001567__15 672 | 2011_001619__15 673 | 2011_001642__12 674 | 2011_001665__11 675 | 2011_001674__15 676 | 2011_001714__12 677 | 2011_001714__15 678 | 2011_001722__13 679 | 2011_001745__12 680 | 2011_001794__15 681 | 2011_001862__11 682 | 2011_001862__12 683 | 2011_001868__12 684 | 2011_001984__12 685 | 2011_001988__15 686 | 2011_002002__15 687 | 2011_002040__12 688 | 2011_002075__11 689 | 2011_002075__15 690 | 2011_002098__12 691 | 2011_002110__12 692 | 2011_002110__15 693 | 2011_002121__12 694 | 2011_002124__15 695 | 2011_002156__12 696 | 2011_002200__11 697 | 2011_002200__15 698 | 2011_002247__15 699 | 2011_002279__12 700 | 2011_002298__12 701 | 2011_002308__15 702 | 2011_002317__15 703 | 2011_002322__14 704 | 2011_002322__15 705 | 2011_002343__15 706 | 2011_002358__11 707 | 2011_002358__15 708 | 2011_002371__12 709 | 2011_002498__15 710 | 2011_002509__15 711 | 2011_002532__15 712 | 2011_002575__15 713 | 2011_002578__15 714 | 2011_002589__12 715 | 2011_002623__15 716 | 2011_002641__15 717 | 2011_002675__15 718 | 2011_002951__13 719 | 2011_002997__15 720 | 2011_003019__14 721 | 2011_003019__15 722 | 2011_003085__13 723 | 2011_003114__15 724 | 2011_003240__15 725 | 2011_003256__12 726 | -------------------------------------------------------------------------------- /data/splits/DLRSD/trn/fold1.txt: -------------------------------------------------------------------------------- 1 | baseballdiamond96__6 2 | intersection07__6 3 | sparseresidential11__6 4 | sparseresidential60__6 5 | sparseresidential72__6 6 | tenniscourt00__6 7 | tenniscourt01__6 8 | tenniscourt02__6 9 | tenniscourt03__6 10 | tenniscourt04__6 11 | tenniscourt05__6 12 | tenniscourt06__6 13 | tenniscourt07__6 14 | tenniscourt08__6 15 | tenniscourt09__6 16 | tenniscourt10__6 17 | tenniscourt11__6 18 | tenniscourt12__6 19 | tenniscourt13__6 20 | tenniscourt14__6 21 | tenniscourt15__6 22 | tenniscourt16__6 23 | tenniscourt17__6 24 | tenniscourt18__6 25 | tenniscourt19__6 26 | tenniscourt20__6 27 | tenniscourt21__6 28 | tenniscourt22__6 29 | tenniscourt23__6 30 | tenniscourt24__6 31 | tenniscourt25__6 32 | tenniscourt26__6 33 | tenniscourt27__6 34 | tenniscourt28__6 35 | tenniscourt29__6 36 | tenniscourt30__6 37 | tenniscourt31__6 38 | tenniscourt32__6 39 | tenniscourt33__6 40 | tenniscourt34__6 41 | tenniscourt35__6 42 | tenniscourt36__6 43 | tenniscourt37__6 44 | tenniscourt38__6 45 | tenniscourt39__6 46 | tenniscourt40__6 47 | tenniscourt41__6 48 | tenniscourt42__6 49 | tenniscourt43__6 50 | tenniscourt44__6 51 | tenniscourt45__6 52 | tenniscourt46__6 53 | tenniscourt47__6 54 | tenniscourt48__6 55 | tenniscourt49__6 56 | tenniscourt50__6 57 | tenniscourt51__6 58 | tenniscourt52__6 59 | tenniscourt53__6 60 | tenniscourt54__6 61 | tenniscourt55__6 62 | tenniscourt56__6 63 | tenniscourt57__6 64 | tenniscourt58__6 65 | tenniscourt59__6 66 | tenniscourt60__6 67 | tenniscourt61__6 68 | tenniscourt62__6 69 | tenniscourt63__6 70 | tenniscourt64__6 71 | tenniscourt65__6 72 | tenniscourt66__6 73 | tenniscourt67__6 74 | tenniscourt68__6 75 | tenniscourt69__6 76 | tenniscourt70__6 77 | tenniscourt71__6 78 | tenniscourt72__6 79 | tenniscourt73__6 80 | tenniscourt74__6 81 | tenniscourt75__6 82 | tenniscourt76__6 83 | tenniscourt77__6 84 | tenniscourt78__6 85 | tenniscourt79__6 86 | tenniscourt80__6 87 | tenniscourt81__6 88 | tenniscourt82__6 89 | tenniscourt83__6 90 | tenniscourt84__6 91 | tenniscourt85__6 92 | tenniscourt86__6 93 | tenniscourt87__6 94 | tenniscourt88__6 95 | tenniscourt89__6 96 | tenniscourt90__6 97 | tenniscourt91__6 98 | tenniscourt92__6 99 | tenniscourt93__6 100 | tenniscourt94__6 101 | tenniscourt95__6 102 | tenniscourt96__6 103 | tenniscourt97__6 104 | tenniscourt98__6 105 | tenniscourt99__6 106 | harbor00__7 107 | harbor01__7 108 | harbor02__7 109 | harbor03__7 110 | harbor04__7 111 | harbor05__7 112 | harbor06__7 113 | harbor07__7 114 | harbor08__7 115 | harbor09__7 116 | harbor10__7 117 | harbor11__7 118 | harbor12__7 119 | harbor13__7 120 | harbor14__7 121 | harbor15__7 122 | harbor16__7 123 | harbor17__7 124 | harbor18__7 125 | harbor19__7 126 | harbor20__7 127 | harbor21__7 128 | harbor22__7 129 | harbor23__7 130 | harbor24__7 131 | harbor25__7 132 | harbor26__7 133 | harbor27__7 134 | harbor28__7 135 | harbor29__7 136 | harbor30__7 137 | harbor31__7 138 | harbor32__7 139 | harbor33__7 140 | harbor34__7 141 | harbor35__7 142 | harbor36__7 143 | harbor37__7 144 | harbor38__7 145 | harbor39__7 146 | harbor40__7 147 | harbor41__7 148 | harbor42__7 149 | harbor43__7 150 | harbor44__7 151 | harbor45__7 152 | harbor46__7 153 | harbor47__7 154 | harbor48__7 155 | harbor49__7 156 | harbor50__7 157 | harbor51__7 158 | harbor52__7 159 | harbor53__7 160 | harbor54__7 161 | harbor55__7 162 | harbor56__7 163 | harbor57__7 164 | harbor58__7 165 | harbor59__7 166 | harbor60__7 167 | harbor61__7 168 | harbor62__7 169 | harbor63__7 170 | harbor64__7 171 | harbor65__7 172 | harbor66__7 173 | harbor67__7 174 | harbor68__7 175 | harbor69__7 176 | harbor70__7 177 | harbor71__7 178 | harbor72__7 179 | harbor73__7 180 | harbor74__7 181 | harbor75__7 182 | harbor76__7 183 | harbor77__7 184 | harbor78__7 185 | harbor79__7 186 | harbor80__7 187 | harbor81__7 188 | harbor82__7 189 | harbor83__7 190 | harbor84__7 191 | harbor85__7 192 | harbor86__7 193 | harbor87__7 194 | harbor88__7 195 | harbor89__7 196 | harbor90__7 197 | harbor91__7 198 | harbor92__7 199 | harbor93__7 200 | harbor94__7 201 | harbor95__7 202 | harbor96__7 203 | harbor97__7 204 | harbor98__7 205 | harbor99__7 206 | agricultural00__8 207 | agricultural01__8 208 | agricultural02__8 209 | agricultural03__8 210 | agricultural05__8 211 | agricultural06__8 212 | agricultural07__8 213 | agricultural08__8 214 | agricultural09__8 215 | agricultural10__8 216 | agricultural11__8 217 | agricultural12__8 218 | agricultural13__8 219 | agricultural14__8 220 | agricultural15__8 221 | agricultural16__8 222 | agricultural17__8 223 | agricultural18__8 224 | agricultural19__8 225 | agricultural20__8 226 | agricultural24__8 227 | agricultural25__8 228 | agricultural26__8 229 | agricultural27__8 230 | agricultural28__8 231 | agricultural29__8 232 | agricultural30__8 233 | agricultural31__8 234 | agricultural32__8 235 | agricultural33__8 236 | agricultural34__8 237 | agricultural35__8 238 | agricultural36__8 239 | agricultural37__8 240 | agricultural38__8 241 | agricultural39__8 242 | agricultural40__8 243 | agricultural41__8 244 | agricultural42__8 245 | agricultural43__8 246 | agricultural44__8 247 | agricultural45__8 248 | agricultural46__8 249 | agricultural47__8 250 | agricultural48__8 251 | agricultural49__8 252 | agricultural50__8 253 | agricultural51__8 254 | agricultural52__8 255 | agricultural53__8 256 | agricultural54__8 257 | agricultural55__8 258 | agricultural56__8 259 | agricultural57__8 260 | agricultural58__8 261 | agricultural59__8 262 | agricultural60__8 263 | agricultural61__8 264 | agricultural62__8 265 | agricultural63__8 266 | agricultural64__8 267 | agricultural65__8 268 | agricultural66__8 269 | agricultural67__8 270 | agricultural68__8 271 | agricultural69__8 272 | agricultural70__8 273 | agricultural71__8 274 | agricultural72__8 275 | agricultural73__8 276 | agricultural74__8 277 | agricultural75__8 278 | agricultural76__8 279 | agricultural77__8 280 | agricultural78__8 281 | agricultural79__8 282 | agricultural80__8 283 | agricultural81__8 284 | agricultural82__8 285 | agricultural83__8 286 | agricultural84__8 287 | agricultural85__8 288 | agricultural86__8 289 | agricultural87__8 290 | agricultural88__8 291 | agricultural89__8 292 | agricultural90__8 293 | agricultural91__8 294 | agricultural92__8 295 | agricultural93__8 296 | agricultural94__8 297 | agricultural95__8 298 | agricultural96__8 299 | agricultural97__8 300 | agricultural98__8 301 | agricultural99__8 302 | intersection35__8 303 | river53__8 304 | sparseresidential79__8 305 | sparseresidential81__8 306 | sparseresidential82__8 307 | sparseresidential83__8 308 | sparseresidential84__8 309 | airplane00__9 310 | airplane01__9 311 | airplane02__9 312 | airplane20__9 313 | airplane24__9 314 | airplane25__9 315 | airplane31__9 316 | airplane35__9 317 | airplane37__9 318 | airplane38__9 319 | airplane78__9 320 | baseballdiamond00__9 321 | baseballdiamond01__9 322 | baseballdiamond02__9 323 | baseballdiamond03__9 324 | baseballdiamond04__9 325 | baseballdiamond05__9 326 | baseballdiamond06__9 327 | baseballdiamond07__9 328 | baseballdiamond08__9 329 | baseballdiamond09__9 330 | baseballdiamond10__9 331 | baseballdiamond11__9 332 | baseballdiamond12__9 333 | baseballdiamond13__9 334 | baseballdiamond14__9 335 | baseballdiamond15__9 336 | baseballdiamond16__9 337 | baseballdiamond17__9 338 | baseballdiamond18__9 339 | baseballdiamond19__9 340 | baseballdiamond20__9 341 | baseballdiamond21__9 342 | baseballdiamond22__9 343 | baseballdiamond23__9 344 | baseballdiamond24__9 345 | baseballdiamond25__9 346 | baseballdiamond26__9 347 | baseballdiamond27__9 348 | baseballdiamond28__9 349 | baseballdiamond29__9 350 | baseballdiamond30__9 351 | baseballdiamond31__9 352 | baseballdiamond32__9 353 | baseballdiamond33__9 354 | baseballdiamond34__9 355 | baseballdiamond35__9 356 | baseballdiamond36__9 357 | baseballdiamond37__9 358 | baseballdiamond38__9 359 | baseballdiamond39__9 360 | baseballdiamond40__9 361 | baseballdiamond41__9 362 | baseballdiamond42__9 363 | baseballdiamond43__9 364 | baseballdiamond44__9 365 | baseballdiamond45__9 366 | baseballdiamond46__9 367 | baseballdiamond47__9 368 | baseballdiamond48__9 369 | baseballdiamond49__9 370 | baseballdiamond50__9 371 | baseballdiamond51__9 372 | baseballdiamond52__9 373 | baseballdiamond53__9 374 | baseballdiamond54__9 375 | baseballdiamond55__9 376 | baseballdiamond56__9 377 | baseballdiamond57__9 378 | baseballdiamond58__9 379 | baseballdiamond62__9 380 | baseballdiamond63__9 381 | baseballdiamond64__9 382 | baseballdiamond65__9 383 | baseballdiamond66__9 384 | baseballdiamond67__9 385 | baseballdiamond68__9 386 | baseballdiamond69__9 387 | baseballdiamond70__9 388 | baseballdiamond71__9 389 | baseballdiamond72__9 390 | baseballdiamond73__9 391 | baseballdiamond74__9 392 | baseballdiamond75__9 393 | baseballdiamond76__9 394 | baseballdiamond77__9 395 | baseballdiamond78__9 396 | baseballdiamond79__9 397 | baseballdiamond80__9 398 | baseballdiamond81__9 399 | baseballdiamond82__9 400 | baseballdiamond83__9 401 | baseballdiamond84__9 402 | baseballdiamond85__9 403 | baseballdiamond86__9 404 | baseballdiamond87__9 405 | baseballdiamond88__9 406 | baseballdiamond89__9 407 | baseballdiamond90__9 408 | baseballdiamond91__9 409 | baseballdiamond92__9 410 | baseballdiamond93__9 411 | baseballdiamond94__9 412 | baseballdiamond95__9 413 | baseballdiamond96__9 414 | baseballdiamond97__9 415 | baseballdiamond98__9 416 | baseballdiamond99__9 417 | beach42__9 418 | buildings30__9 419 | buildings39__9 420 | buildings51__9 421 | buildings52__9 422 | buildings53__9 423 | buildings56__9 424 | buildings57__9 425 | buildings58__9 426 | buildings60__9 427 | buildings61__9 428 | buildings63__9 429 | buildings64__9 430 | buildings67__9 431 | buildings71__9 432 | buildings72__9 433 | buildings73__9 434 | buildings74__9 435 | buildings76__9 436 | buildings77__9 437 | buildings79__9 438 | buildings80__9 439 | buildings84__9 440 | buildings85__9 441 | buildings93__9 442 | buildings94__9 443 | denseresidential01__9 444 | denseresidential02__9 445 | denseresidential03__9 446 | denseresidential06__9 447 | denseresidential09__9 448 | denseresidential10__9 449 | denseresidential20__9 450 | denseresidential21__9 451 | denseresidential22__9 452 | denseresidential23__9 453 | denseresidential24__9 454 | denseresidential25__9 455 | denseresidential30__9 456 | denseresidential31__9 457 | denseresidential32__9 458 | denseresidential41__9 459 | denseresidential43__9 460 | denseresidential44__9 461 | denseresidential50__9 462 | denseresidential51__9 463 | denseresidential52__9 464 | denseresidential53__9 465 | denseresidential55__9 466 | denseresidential56__9 467 | denseresidential57__9 468 | denseresidential58__9 469 | denseresidential59__9 470 | denseresidential61__9 471 | denseresidential62__9 472 | denseresidential63__9 473 | denseresidential64__9 474 | denseresidential66__9 475 | denseresidential68__9 476 | denseresidential69__9 477 | denseresidential70__9 478 | denseresidential71__9 479 | denseresidential72__9 480 | denseresidential74__9 481 | denseresidential75__9 482 | denseresidential76__9 483 | denseresidential77__9 484 | denseresidential78__9 485 | denseresidential79__9 486 | denseresidential80__9 487 | denseresidential81__9 488 | denseresidential82__9 489 | denseresidential83__9 490 | denseresidential84__9 491 | denseresidential86__9 492 | denseresidential87__9 493 | denseresidential98__9 494 | denseresidential99__9 495 | forest08__9 496 | freeway00__9 497 | freeway04__9 498 | freeway06__9 499 | freeway07__9 500 | freeway08__9 501 | freeway10__9 502 | freeway11__9 503 | freeway13__9 504 | freeway14__9 505 | freeway15__9 506 | freeway16__9 507 | freeway17__9 508 | freeway18__9 509 | freeway19__9 510 | freeway20__9 511 | freeway21__9 512 | freeway22__9 513 | freeway23__9 514 | freeway25__9 515 | freeway26__9 516 | freeway27__9 517 | freeway28__9 518 | freeway29__9 519 | freeway30__9 520 | freeway31__9 521 | freeway32__9 522 | freeway33__9 523 | freeway34__9 524 | freeway35__9 525 | freeway36__9 526 | freeway37__9 527 | freeway38__9 528 | freeway39__9 529 | freeway40__9 530 | freeway41__9 531 | freeway64__9 532 | freeway65__9 533 | freeway66__9 534 | freeway71__9 535 | freeway72__9 536 | freeway73__9 537 | freeway74__9 538 | freeway75__9 539 | freeway77__9 540 | freeway78__9 541 | freeway79__9 542 | freeway80__9 543 | freeway81__9 544 | freeway82__9 545 | freeway85__9 546 | freeway89__9 547 | freeway90__9 548 | freeway91__9 549 | freeway92__9 550 | freeway96__9 551 | freeway97__9 552 | freeway99__9 553 | golfcourse00__9 554 | golfcourse01__9 555 | golfcourse02__9 556 | golfcourse03__9 557 | golfcourse04__9 558 | golfcourse05__9 559 | golfcourse06__9 560 | golfcourse07__9 561 | golfcourse08__9 562 | golfcourse09__9 563 | golfcourse10__9 564 | golfcourse11__9 565 | golfcourse12__9 566 | golfcourse13__9 567 | golfcourse14__9 568 | golfcourse15__9 569 | golfcourse16__9 570 | golfcourse17__9 571 | golfcourse18__9 572 | golfcourse19__9 573 | golfcourse20__9 574 | golfcourse21__9 575 | golfcourse22__9 576 | golfcourse23__9 577 | golfcourse24__9 578 | golfcourse25__9 579 | golfcourse26__9 580 | golfcourse27__9 581 | golfcourse28__9 582 | golfcourse29__9 583 | golfcourse30__9 584 | golfcourse31__9 585 | golfcourse32__9 586 | golfcourse33__9 587 | golfcourse34__9 588 | golfcourse35__9 589 | golfcourse36__9 590 | golfcourse37__9 591 | golfcourse38__9 592 | golfcourse39__9 593 | golfcourse40__9 594 | golfcourse41__9 595 | golfcourse42__9 596 | golfcourse43__9 597 | golfcourse44__9 598 | golfcourse45__9 599 | golfcourse46__9 600 | golfcourse47__9 601 | golfcourse48__9 602 | golfcourse49__9 603 | golfcourse50__9 604 | golfcourse51__9 605 | golfcourse52__9 606 | golfcourse53__9 607 | golfcourse54__9 608 | golfcourse55__9 609 | golfcourse56__9 610 | golfcourse57__9 611 | golfcourse58__9 612 | golfcourse59__9 613 | golfcourse60__9 614 | golfcourse61__9 615 | golfcourse62__9 616 | golfcourse63__9 617 | golfcourse64__9 618 | golfcourse65__9 619 | golfcourse66__9 620 | golfcourse67__9 621 | golfcourse68__9 622 | golfcourse69__9 623 | golfcourse70__9 624 | golfcourse71__9 625 | golfcourse72__9 626 | golfcourse73__9 627 | golfcourse74__9 628 | golfcourse75__9 629 | golfcourse76__9 630 | golfcourse77__9 631 | golfcourse78__9 632 | golfcourse79__9 633 | golfcourse80__9 634 | golfcourse81__9 635 | golfcourse82__9 636 | golfcourse83__9 637 | golfcourse84__9 638 | golfcourse85__9 639 | golfcourse86__9 640 | golfcourse87__9 641 | golfcourse88__9 642 | golfcourse89__9 643 | golfcourse90__9 644 | golfcourse91__9 645 | golfcourse92__9 646 | golfcourse93__9 647 | golfcourse94__9 648 | golfcourse95__9 649 | golfcourse96__9 650 | golfcourse97__9 651 | golfcourse98__9 652 | golfcourse99__9 653 | harbor70__9 654 | intersection00__9 655 | intersection03__9 656 | intersection04__9 657 | intersection05__9 658 | intersection06__9 659 | intersection09__9 660 | intersection10__9 661 | intersection12__9 662 | intersection14__9 663 | intersection16__9 664 | intersection17__9 665 | intersection18__9 666 | intersection19__9 667 | intersection20__9 668 | intersection21__9 669 | intersection22__9 670 | intersection23__9 671 | intersection24__9 672 | intersection25__9 673 | intersection26__9 674 | intersection27__9 675 | intersection28__9 676 | intersection29__9 677 | intersection30__9 678 | intersection31__9 679 | intersection32__9 680 | intersection33__9 681 | intersection34__9 682 | intersection35__9 683 | intersection36__9 684 | intersection37__9 685 | intersection39__9 686 | intersection43__9 687 | intersection44__9 688 | intersection45__9 689 | intersection46__9 690 | intersection49__9 691 | intersection51__9 692 | intersection52__9 693 | intersection56__9 694 | intersection57__9 695 | intersection58__9 696 | intersection60__9 697 | intersection61__9 698 | intersection62__9 699 | intersection63__9 700 | intersection64__9 701 | intersection65__9 702 | intersection66__9 703 | intersection67__9 704 | intersection68__9 705 | intersection69__9 706 | intersection76__9 707 | intersection77__9 708 | intersection78__9 709 | intersection79__9 710 | intersection80__9 711 | intersection81__9 712 | intersection82__9 713 | intersection83__9 714 | intersection84__9 715 | intersection85__9 716 | intersection87__9 717 | intersection93__9 718 | intersection94__9 719 | intersection95__9 720 | intersection97__9 721 | mediumresidential00__9 722 | mediumresidential01__9 723 | mediumresidential02__9 724 | mediumresidential03__9 725 | mediumresidential04__9 726 | mediumresidential05__9 727 | mediumresidential06__9 728 | mediumresidential07__9 729 | mediumresidential08__9 730 | mediumresidential09__9 731 | mediumresidential10__9 732 | mediumresidential11__9 733 | mediumresidential12__9 734 | mediumresidential13__9 735 | mediumresidential14__9 736 | mediumresidential15__9 737 | mediumresidential16__9 738 | mediumresidential17__9 739 | mediumresidential18__9 740 | mediumresidential19__9 741 | mediumresidential20__9 742 | mediumresidential21__9 743 | mediumresidential22__9 744 | mediumresidential23__9 745 | mediumresidential24__9 746 | mediumresidential25__9 747 | mediumresidential26__9 748 | mediumresidential27__9 749 | mediumresidential28__9 750 | mediumresidential29__9 751 | mediumresidential30__9 752 | mediumresidential31__9 753 | mediumresidential32__9 754 | mediumresidential34__9 755 | mediumresidential37__9 756 | mediumresidential40__9 757 | mediumresidential41__9 758 | mediumresidential44__9 759 | mediumresidential46__9 760 | mediumresidential47__9 761 | mediumresidential48__9 762 | mediumresidential50__9 763 | mediumresidential51__9 764 | mediumresidential52__9 765 | mediumresidential54__9 766 | mediumresidential56__9 767 | mediumresidential57__9 768 | mediumresidential58__9 769 | mediumresidential59__9 770 | mediumresidential60__9 771 | mediumresidential61__9 772 | mediumresidential62__9 773 | mediumresidential63__9 774 | mediumresidential64__9 775 | mediumresidential65__9 776 | mediumresidential66__9 777 | mediumresidential67__9 778 | mediumresidential68__9 779 | mediumresidential69__9 780 | mediumresidential71__9 781 | mediumresidential72__9 782 | mediumresidential73__9 783 | mediumresidential74__9 784 | mediumresidential76__9 785 | mediumresidential77__9 786 | mediumresidential78__9 787 | mediumresidential79__9 788 | mediumresidential80__9 789 | mediumresidential81__9 790 | mediumresidential82__9 791 | mediumresidential83__9 792 | mediumresidential84__9 793 | mediumresidential85__9 794 | mediumresidential86__9 795 | mediumresidential87__9 796 | mediumresidential88__9 797 | mediumresidential89__9 798 | mediumresidential90__9 799 | mediumresidential91__9 800 | mediumresidential92__9 801 | mediumresidential93__9 802 | mediumresidential94__9 803 | mediumresidential95__9 804 | mediumresidential96__9 805 | mediumresidential97__9 806 | mediumresidential98__9 807 | mediumresidential99__9 808 | mobilehomepark01__9 809 | mobilehomepark08__9 810 | mobilehomepark09__9 811 | mobilehomepark18__9 812 | mobilehomepark23__9 813 | mobilehomepark24__9 814 | mobilehomepark25__9 815 | mobilehomepark26__9 816 | mobilehomepark28__9 817 | mobilehomepark29__9 818 | mobilehomepark30__9 819 | mobilehomepark31__9 820 | mobilehomepark32__9 821 | mobilehomepark33__9 822 | mobilehomepark34__9 823 | mobilehomepark35__9 824 | mobilehomepark36__9 825 | mobilehomepark37__9 826 | mobilehomepark38__9 827 | mobilehomepark39__9 828 | mobilehomepark46__9 829 | mobilehomepark47__9 830 | mobilehomepark48__9 831 | mobilehomepark49__9 832 | mobilehomepark50__9 833 | mobilehomepark52__9 834 | mobilehomepark53__9 835 | mobilehomepark54__9 836 | mobilehomepark55__9 837 | mobilehomepark56__9 838 | mobilehomepark57__9 839 | mobilehomepark58__9 840 | mobilehomepark59__9 841 | mobilehomepark60__9 842 | mobilehomepark61__9 843 | mobilehomepark62__9 844 | mobilehomepark63__9 845 | mobilehomepark64__9 846 | mobilehomepark65__9 847 | mobilehomepark66__9 848 | mobilehomepark67__9 849 | mobilehomepark68__9 850 | mobilehomepark69__9 851 | mobilehomepark70__9 852 | mobilehomepark72__9 853 | mobilehomepark73__9 854 | mobilehomepark78__9 855 | mobilehomepark79__9 856 | mobilehomepark80__9 857 | mobilehomepark81__9 858 | mobilehomepark82__9 859 | mobilehomepark88__9 860 | mobilehomepark92__9 861 | mobilehomepark96__9 862 | mobilehomepark97__9 863 | mobilehomepark98__9 864 | mobilehomepark99__9 865 | overpass00__9 866 | overpass01__9 867 | overpass10__9 868 | overpass12__9 869 | overpass13__9 870 | overpass14__9 871 | overpass15__9 872 | overpass16__9 873 | overpass17__9 874 | overpass18__9 875 | overpass19__9 876 | overpass20__9 877 | overpass21__9 878 | overpass22__9 879 | overpass23__9 880 | overpass24__9 881 | overpass25__9 882 | overpass27__9 883 | overpass28__9 884 | overpass29__9 885 | overpass31__9 886 | overpass32__9 887 | overpass33__9 888 | overpass34__9 889 | overpass35__9 890 | overpass36__9 891 | overpass37__9 892 | overpass38__9 893 | overpass39__9 894 | overpass40__9 895 | overpass41__9 896 | overpass42__9 897 | overpass43__9 898 | overpass44__9 899 | overpass45__9 900 | overpass46__9 901 | overpass47__9 902 | overpass48__9 903 | overpass50__9 904 | overpass51__9 905 | overpass52__9 906 | overpass53__9 907 | overpass54__9 908 | overpass55__9 909 | overpass56__9 910 | overpass57__9 911 | overpass58__9 912 | overpass59__9 913 | overpass60__9 914 | overpass61__9 915 | overpass62__9 916 | overpass63__9 917 | overpass64__9 918 | overpass65__9 919 | overpass66__9 920 | overpass67__9 921 | overpass68__9 922 | overpass69__9 923 | overpass70__9 924 | overpass71__9 925 | overpass72__9 926 | overpass73__9 927 | overpass74__9 928 | overpass75__9 929 | overpass76__9 930 | overpass77__9 931 | overpass78__9 932 | overpass79__9 933 | overpass80__9 934 | overpass81__9 935 | overpass82__9 936 | overpass83__9 937 | overpass84__9 938 | overpass85__9 939 | overpass86__9 940 | overpass87__9 941 | overpass88__9 942 | overpass89__9 943 | overpass90__9 944 | overpass91__9 945 | overpass92__9 946 | overpass93__9 947 | overpass94__9 948 | overpass96__9 949 | overpass97__9 950 | parkinglot20__9 951 | parkinglot24__9 952 | parkinglot26__9 953 | parkinglot28__9 954 | parkinglot29__9 955 | parkinglot30__9 956 | parkinglot31__9 957 | parkinglot32__9 958 | parkinglot34__9 959 | parkinglot35__9 960 | parkinglot36__9 961 | parkinglot37__9 962 | parkinglot39__9 963 | parkinglot45__9 964 | parkinglot46__9 965 | parkinglot47__9 966 | parkinglot52__9 967 | parkinglot54__9 968 | parkinglot56__9 969 | parkinglot59__9 970 | parkinglot61__9 971 | parkinglot63__9 972 | river00__9 973 | river01__9 974 | river02__9 975 | river03__9 976 | river04__9 977 | river05__9 978 | river06__9 979 | river07__9 980 | river12__9 981 | river15__9 982 | river16__9 983 | river17__9 984 | river18__9 985 | river19__9 986 | river20__9 987 | river21__9 988 | river22__9 989 | river23__9 990 | river24__9 991 | river25__9 992 | river26__9 993 | river27__9 994 | river28__9 995 | river34__9 996 | river38__9 997 | river39__9 998 | river40__9 999 | river41__9 1000 | river42__9 1001 | river43__9 1002 | river44__9 1003 | river45__9 1004 | river46__9 1005 | river47__9 1006 | river48__9 1007 | river49__9 1008 | river50__9 1009 | river51__9 1010 | river52__9 1011 | river54__9 1012 | river61__9 1013 | river75__9 1014 | river76__9 1015 | river83__9 1016 | river86__9 1017 | river87__9 1018 | river88__9 1019 | river89__9 1020 | river90__9 1021 | river91__9 1022 | river92__9 1023 | river93__9 1024 | river94__9 1025 | river95__9 1026 | river96__9 1027 | river97__9 1028 | river99__9 1029 | runway00__9 1030 | runway01__9 1031 | runway03__9 1032 | runway04__9 1033 | runway06__9 1034 | runway08__9 1035 | runway09__9 1036 | runway10__9 1037 | runway11__9 1038 | runway12__9 1039 | runway13__9 1040 | runway14__9 1041 | runway15__9 1042 | runway16__9 1043 | runway17__9 1044 | runway18__9 1045 | runway19__9 1046 | runway20__9 1047 | runway21__9 1048 | runway22__9 1049 | runway23__9 1050 | runway24__9 1051 | runway25__9 1052 | runway26__9 1053 | runway27__9 1054 | runway28__9 1055 | runway29__9 1056 | runway30__9 1057 | runway31__9 1058 | runway32__9 1059 | runway33__9 1060 | runway34__9 1061 | runway35__9 1062 | runway36__9 1063 | runway37__9 1064 | runway38__9 1065 | runway39__9 1066 | runway40__9 1067 | runway41__9 1068 | runway42__9 1069 | runway43__9 1070 | runway44__9 1071 | runway45__9 1072 | runway46__9 1073 | runway47__9 1074 | runway48__9 1075 | runway49__9 1076 | runway50__9 1077 | runway92__9 1078 | runway93__9 1079 | runway94__9 1080 | runway95__9 1081 | runway96__9 1082 | sparseresidential00__9 1083 | sparseresidential01__9 1084 | sparseresidential02__9 1085 | sparseresidential03__9 1086 | sparseresidential04__9 1087 | sparseresidential05__9 1088 | sparseresidential07__9 1089 | sparseresidential09__9 1090 | sparseresidential10__9 1091 | sparseresidential11__9 1092 | sparseresidential16__9 1093 | sparseresidential17__9 1094 | sparseresidential18__9 1095 | sparseresidential19__9 1096 | sparseresidential20__9 1097 | sparseresidential21__9 1098 | sparseresidential22__9 1099 | sparseresidential23__9 1100 | sparseresidential24__9 1101 | sparseresidential25__9 1102 | sparseresidential26__9 1103 | sparseresidential27__9 1104 | sparseresidential28__9 1105 | sparseresidential29__9 1106 | sparseresidential30__9 1107 | sparseresidential32__9 1108 | sparseresidential33__9 1109 | sparseresidential34__9 1110 | sparseresidential35__9 1111 | sparseresidential36__9 1112 | sparseresidential37__9 1113 | sparseresidential38__9 1114 | sparseresidential39__9 1115 | sparseresidential40__9 1116 | sparseresidential41__9 1117 | sparseresidential42__9 1118 | sparseresidential43__9 1119 | sparseresidential44__9 1120 | sparseresidential45__9 1121 | sparseresidential46__9 1122 | sparseresidential47__9 1123 | sparseresidential48__9 1124 | sparseresidential49__9 1125 | sparseresidential50__9 1126 | sparseresidential51__9 1127 | sparseresidential52__9 1128 | sparseresidential53__9 1129 | sparseresidential54__9 1130 | sparseresidential55__9 1131 | sparseresidential56__9 1132 | sparseresidential57__9 1133 | sparseresidential58__9 1134 | sparseresidential59__9 1135 | sparseresidential60__9 1136 | sparseresidential61__9 1137 | sparseresidential62__9 1138 | sparseresidential63__9 1139 | sparseresidential64__9 1140 | sparseresidential65__9 1141 | sparseresidential66__9 1142 | sparseresidential67__9 1143 | sparseresidential68__9 1144 | sparseresidential69__9 1145 | sparseresidential70__9 1146 | sparseresidential71__9 1147 | sparseresidential72__9 1148 | sparseresidential73__9 1149 | sparseresidential74__9 1150 | sparseresidential75__9 1151 | sparseresidential76__9 1152 | sparseresidential77__9 1153 | sparseresidential78__9 1154 | sparseresidential79__9 1155 | sparseresidential80__9 1156 | sparseresidential81__9 1157 | sparseresidential83__9 1158 | sparseresidential95__9 1159 | sparseresidential96__9 1160 | sparseresidential97__9 1161 | sparseresidential98__9 1162 | sparseresidential99__9 1163 | storagetanks00__9 1164 | storagetanks03__9 1165 | storagetanks04__9 1166 | storagetanks05__9 1167 | storagetanks06__9 1168 | storagetanks07__9 1169 | storagetanks09__9 1170 | storagetanks11__9 1171 | storagetanks13__9 1172 | storagetanks14__9 1173 | storagetanks15__9 1174 | storagetanks16__9 1175 | storagetanks17__9 1176 | storagetanks18__9 1177 | storagetanks19__9 1178 | storagetanks25__9 1179 | storagetanks26__9 1180 | storagetanks46__9 1181 | storagetanks48__9 1182 | storagetanks49__9 1183 | storagetanks50__9 1184 | storagetanks51__9 1185 | storagetanks52__9 1186 | storagetanks55__9 1187 | storagetanks56__9 1188 | storagetanks57__9 1189 | storagetanks59__9 1190 | storagetanks60__9 1191 | storagetanks65__9 1192 | storagetanks66__9 1193 | storagetanks67__9 1194 | storagetanks68__9 1195 | storagetanks69__9 1196 | storagetanks70__9 1197 | storagetanks73__9 1198 | tenniscourt00__9 1199 | tenniscourt02__9 1200 | tenniscourt03__9 1201 | tenniscourt04__9 1202 | tenniscourt05__9 1203 | tenniscourt06__9 1204 | tenniscourt07__9 1205 | tenniscourt08__9 1206 | tenniscourt09__9 1207 | tenniscourt11__9 1208 | tenniscourt12__9 1209 | tenniscourt13__9 1210 | tenniscourt14__9 1211 | tenniscourt15__9 1212 | tenniscourt16__9 1213 | tenniscourt17__9 1214 | tenniscourt18__9 1215 | tenniscourt19__9 1216 | tenniscourt20__9 1217 | tenniscourt21__9 1218 | tenniscourt22__9 1219 | tenniscourt23__9 1220 | tenniscourt24__9 1221 | tenniscourt25__9 1222 | tenniscourt26__9 1223 | tenniscourt27__9 1224 | tenniscourt29__9 1225 | tenniscourt30__9 1226 | tenniscourt31__9 1227 | tenniscourt32__9 1228 | tenniscourt33__9 1229 | tenniscourt34__9 1230 | tenniscourt35__9 1231 | tenniscourt36__9 1232 | tenniscourt38__9 1233 | tenniscourt39__9 1234 | tenniscourt40__9 1235 | tenniscourt41__9 1236 | tenniscourt43__9 1237 | tenniscourt46__9 1238 | tenniscourt47__9 1239 | tenniscourt48__9 1240 | tenniscourt49__9 1241 | tenniscourt50__9 1242 | tenniscourt51__9 1243 | tenniscourt52__9 1244 | tenniscourt53__9 1245 | tenniscourt54__9 1246 | tenniscourt55__9 1247 | tenniscourt56__9 1248 | tenniscourt57__9 1249 | tenniscourt59__9 1250 | tenniscourt60__9 1251 | tenniscourt61__9 1252 | tenniscourt62__9 1253 | tenniscourt63__9 1254 | tenniscourt64__9 1255 | tenniscourt66__9 1256 | tenniscourt67__9 1257 | tenniscourt68__9 1258 | tenniscourt69__9 1259 | tenniscourt70__9 1260 | tenniscourt71__9 1261 | tenniscourt72__9 1262 | tenniscourt73__9 1263 | tenniscourt75__9 1264 | tenniscourt76__9 1265 | tenniscourt77__9 1266 | tenniscourt78__9 1267 | tenniscourt79__9 1268 | tenniscourt80__9 1269 | tenniscourt81__9 1270 | tenniscourt82__9 1271 | tenniscourt84__9 1272 | tenniscourt85__9 1273 | tenniscourt86__9 1274 | tenniscourt87__9 1275 | tenniscourt88__9 1276 | tenniscourt89__9 1277 | tenniscourt90__9 1278 | tenniscourt91__9 1279 | tenniscourt92__9 1280 | tenniscourt93__9 1281 | tenniscourt94__9 1282 | tenniscourt96__9 1283 | tenniscourt97__9 1284 | tenniscourt98__9 1285 | tenniscourt99__9 1286 | intersection50__10 1287 | intersection51__10 1288 | mobilehomepark00__10 1289 | mobilehomepark01__10 1290 | mobilehomepark02__10 1291 | mobilehomepark03__10 1292 | mobilehomepark04__10 1293 | mobilehomepark05__10 1294 | mobilehomepark06__10 1295 | mobilehomepark07__10 1296 | mobilehomepark08__10 1297 | mobilehomepark09__10 1298 | mobilehomepark10__10 1299 | mobilehomepark11__10 1300 | mobilehomepark12__10 1301 | mobilehomepark13__10 1302 | mobilehomepark14__10 1303 | mobilehomepark15__10 1304 | mobilehomepark16__10 1305 | mobilehomepark17__10 1306 | mobilehomepark18__10 1307 | mobilehomepark19__10 1308 | mobilehomepark20__10 1309 | mobilehomepark21__10 1310 | mobilehomepark22__10 1311 | mobilehomepark23__10 1312 | mobilehomepark24__10 1313 | mobilehomepark25__10 1314 | mobilehomepark26__10 1315 | mobilehomepark27__10 1316 | mobilehomepark28__10 1317 | mobilehomepark29__10 1318 | mobilehomepark30__10 1319 | mobilehomepark31__10 1320 | mobilehomepark32__10 1321 | mobilehomepark33__10 1322 | mobilehomepark34__10 1323 | mobilehomepark35__10 1324 | mobilehomepark36__10 1325 | mobilehomepark37__10 1326 | mobilehomepark38__10 1327 | mobilehomepark39__10 1328 | mobilehomepark40__10 1329 | mobilehomepark41__10 1330 | mobilehomepark42__10 1331 | mobilehomepark43__10 1332 | mobilehomepark44__10 1333 | mobilehomepark45__10 1334 | mobilehomepark46__10 1335 | mobilehomepark47__10 1336 | mobilehomepark48__10 1337 | mobilehomepark49__10 1338 | mobilehomepark50__10 1339 | mobilehomepark51__10 1340 | mobilehomepark52__10 1341 | mobilehomepark53__10 1342 | mobilehomepark54__10 1343 | mobilehomepark55__10 1344 | mobilehomepark56__10 1345 | mobilehomepark57__10 1346 | mobilehomepark58__10 1347 | mobilehomepark59__10 1348 | mobilehomepark60__10 1349 | mobilehomepark61__10 1350 | mobilehomepark62__10 1351 | mobilehomepark63__10 1352 | mobilehomepark64__10 1353 | mobilehomepark65__10 1354 | mobilehomepark66__10 1355 | mobilehomepark67__10 1356 | mobilehomepark68__10 1357 | mobilehomepark69__10 1358 | mobilehomepark70__10 1359 | mobilehomepark71__10 1360 | mobilehomepark72__10 1361 | mobilehomepark73__10 1362 | mobilehomepark74__10 1363 | mobilehomepark75__10 1364 | mobilehomepark76__10 1365 | mobilehomepark77__10 1366 | mobilehomepark78__10 1367 | mobilehomepark79__10 1368 | mobilehomepark80__10 1369 | mobilehomepark81__10 1370 | mobilehomepark82__10 1371 | mobilehomepark83__10 1372 | mobilehomepark84__10 1373 | mobilehomepark85__10 1374 | mobilehomepark86__10 1375 | mobilehomepark87__10 1376 | mobilehomepark88__10 1377 | mobilehomepark89__10 1378 | mobilehomepark90__10 1379 | mobilehomepark91__10 1380 | mobilehomepark92__10 1381 | mobilehomepark93__10 1382 | mobilehomepark94__10 1383 | mobilehomepark95__10 1384 | mobilehomepark96__10 1385 | mobilehomepark97__10 1386 | mobilehomepark98__10 1387 | mobilehomepark99__10 -------------------------------------------------------------------------------- /data/splits/DLRSD/val/fold1.txt: -------------------------------------------------------------------------------- 1 | baseballdiamond96__6 2 | intersection07__6 3 | sparseresidential11__6 4 | sparseresidential60__6 5 | sparseresidential72__6 6 | tenniscourt00__6 7 | tenniscourt01__6 8 | tenniscourt02__6 9 | tenniscourt03__6 10 | tenniscourt04__6 11 | tenniscourt05__6 12 | tenniscourt06__6 13 | tenniscourt07__6 14 | tenniscourt08__6 15 | tenniscourt09__6 16 | tenniscourt10__6 17 | tenniscourt11__6 18 | tenniscourt12__6 19 | tenniscourt13__6 20 | tenniscourt14__6 21 | tenniscourt15__6 22 | tenniscourt16__6 23 | tenniscourt17__6 24 | tenniscourt18__6 25 | tenniscourt19__6 26 | tenniscourt20__6 27 | tenniscourt21__6 28 | tenniscourt22__6 29 | tenniscourt23__6 30 | tenniscourt24__6 31 | tenniscourt25__6 32 | tenniscourt26__6 33 | tenniscourt27__6 34 | tenniscourt28__6 35 | tenniscourt29__6 36 | tenniscourt30__6 37 | tenniscourt31__6 38 | tenniscourt32__6 39 | tenniscourt33__6 40 | tenniscourt34__6 41 | tenniscourt35__6 42 | tenniscourt36__6 43 | tenniscourt37__6 44 | tenniscourt38__6 45 | tenniscourt39__6 46 | tenniscourt40__6 47 | tenniscourt41__6 48 | tenniscourt42__6 49 | tenniscourt43__6 50 | tenniscourt44__6 51 | tenniscourt45__6 52 | tenniscourt46__6 53 | tenniscourt47__6 54 | tenniscourt48__6 55 | tenniscourt49__6 56 | tenniscourt50__6 57 | tenniscourt51__6 58 | tenniscourt52__6 59 | tenniscourt53__6 60 | tenniscourt54__6 61 | tenniscourt55__6 62 | tenniscourt56__6 63 | tenniscourt57__6 64 | tenniscourt58__6 65 | tenniscourt59__6 66 | tenniscourt60__6 67 | tenniscourt61__6 68 | tenniscourt62__6 69 | tenniscourt63__6 70 | tenniscourt64__6 71 | tenniscourt65__6 72 | tenniscourt66__6 73 | tenniscourt67__6 74 | tenniscourt68__6 75 | tenniscourt69__6 76 | tenniscourt70__6 77 | tenniscourt71__6 78 | tenniscourt72__6 79 | tenniscourt73__6 80 | tenniscourt74__6 81 | tenniscourt75__6 82 | tenniscourt76__6 83 | tenniscourt77__6 84 | tenniscourt78__6 85 | tenniscourt79__6 86 | tenniscourt80__6 87 | tenniscourt81__6 88 | tenniscourt82__6 89 | tenniscourt83__6 90 | tenniscourt84__6 91 | tenniscourt85__6 92 | tenniscourt86__6 93 | tenniscourt87__6 94 | tenniscourt88__6 95 | tenniscourt89__6 96 | tenniscourt90__6 97 | tenniscourt91__6 98 | tenniscourt92__6 99 | tenniscourt93__6 100 | tenniscourt94__6 101 | tenniscourt95__6 102 | tenniscourt96__6 103 | tenniscourt97__6 104 | tenniscourt98__6 105 | tenniscourt99__6 106 | harbor00__7 107 | harbor01__7 108 | harbor02__7 109 | harbor03__7 110 | harbor04__7 111 | harbor05__7 112 | harbor06__7 113 | harbor07__7 114 | harbor08__7 115 | harbor09__7 116 | harbor10__7 117 | harbor11__7 118 | harbor12__7 119 | harbor13__7 120 | harbor14__7 121 | harbor15__7 122 | harbor16__7 123 | harbor17__7 124 | harbor18__7 125 | harbor19__7 126 | harbor20__7 127 | harbor21__7 128 | harbor22__7 129 | harbor23__7 130 | harbor24__7 131 | harbor25__7 132 | harbor26__7 133 | harbor27__7 134 | harbor28__7 135 | harbor29__7 136 | harbor30__7 137 | harbor31__7 138 | harbor32__7 139 | harbor33__7 140 | harbor34__7 141 | harbor35__7 142 | harbor36__7 143 | harbor37__7 144 | harbor38__7 145 | harbor39__7 146 | harbor40__7 147 | harbor41__7 148 | harbor42__7 149 | harbor43__7 150 | harbor44__7 151 | harbor45__7 152 | harbor46__7 153 | harbor47__7 154 | harbor48__7 155 | harbor49__7 156 | harbor50__7 157 | harbor51__7 158 | harbor52__7 159 | harbor53__7 160 | harbor54__7 161 | harbor55__7 162 | harbor56__7 163 | harbor57__7 164 | harbor58__7 165 | harbor59__7 166 | harbor60__7 167 | harbor61__7 168 | harbor62__7 169 | harbor63__7 170 | harbor64__7 171 | harbor65__7 172 | harbor66__7 173 | harbor67__7 174 | harbor68__7 175 | harbor69__7 176 | harbor70__7 177 | harbor71__7 178 | harbor72__7 179 | harbor73__7 180 | harbor74__7 181 | harbor75__7 182 | harbor76__7 183 | harbor77__7 184 | harbor78__7 185 | harbor79__7 186 | harbor80__7 187 | harbor81__7 188 | harbor82__7 189 | harbor83__7 190 | harbor84__7 191 | harbor85__7 192 | harbor86__7 193 | harbor87__7 194 | harbor88__7 195 | harbor89__7 196 | harbor90__7 197 | harbor91__7 198 | harbor92__7 199 | harbor93__7 200 | harbor94__7 201 | harbor95__7 202 | harbor96__7 203 | harbor97__7 204 | harbor98__7 205 | harbor99__7 206 | agricultural00__8 207 | agricultural01__8 208 | agricultural02__8 209 | agricultural03__8 210 | agricultural05__8 211 | agricultural06__8 212 | agricultural07__8 213 | agricultural08__8 214 | agricultural09__8 215 | agricultural10__8 216 | agricultural11__8 217 | agricultural12__8 218 | agricultural13__8 219 | agricultural14__8 220 | agricultural15__8 221 | agricultural16__8 222 | agricultural17__8 223 | agricultural18__8 224 | agricultural19__8 225 | agricultural20__8 226 | agricultural24__8 227 | agricultural25__8 228 | agricultural26__8 229 | agricultural27__8 230 | agricultural28__8 231 | agricultural29__8 232 | agricultural30__8 233 | agricultural31__8 234 | agricultural32__8 235 | agricultural33__8 236 | agricultural34__8 237 | agricultural35__8 238 | agricultural36__8 239 | agricultural37__8 240 | agricultural38__8 241 | agricultural39__8 242 | agricultural40__8 243 | agricultural41__8 244 | agricultural42__8 245 | agricultural43__8 246 | agricultural44__8 247 | agricultural45__8 248 | agricultural46__8 249 | agricultural47__8 250 | agricultural48__8 251 | agricultural49__8 252 | agricultural50__8 253 | agricultural51__8 254 | agricultural52__8 255 | agricultural53__8 256 | agricultural54__8 257 | agricultural55__8 258 | agricultural56__8 259 | agricultural57__8 260 | agricultural58__8 261 | agricultural59__8 262 | agricultural60__8 263 | agricultural61__8 264 | agricultural62__8 265 | agricultural63__8 266 | agricultural64__8 267 | agricultural65__8 268 | agricultural66__8 269 | agricultural67__8 270 | agricultural68__8 271 | agricultural69__8 272 | agricultural70__8 273 | agricultural71__8 274 | agricultural72__8 275 | agricultural73__8 276 | agricultural74__8 277 | agricultural75__8 278 | agricultural76__8 279 | agricultural77__8 280 | agricultural78__8 281 | agricultural79__8 282 | agricultural80__8 283 | agricultural81__8 284 | agricultural82__8 285 | agricultural83__8 286 | agricultural84__8 287 | agricultural85__8 288 | agricultural86__8 289 | agricultural87__8 290 | agricultural88__8 291 | agricultural89__8 292 | agricultural90__8 293 | agricultural91__8 294 | agricultural92__8 295 | agricultural93__8 296 | agricultural94__8 297 | agricultural95__8 298 | agricultural96__8 299 | agricultural97__8 300 | agricultural98__8 301 | agricultural99__8 302 | intersection35__8 303 | river53__8 304 | sparseresidential79__8 305 | sparseresidential81__8 306 | sparseresidential82__8 307 | sparseresidential83__8 308 | sparseresidential84__8 309 | airplane00__9 310 | airplane01__9 311 | airplane02__9 312 | airplane20__9 313 | airplane24__9 314 | airplane25__9 315 | airplane31__9 316 | airplane35__9 317 | airplane37__9 318 | airplane38__9 319 | airplane78__9 320 | baseballdiamond00__9 321 | baseballdiamond01__9 322 | baseballdiamond02__9 323 | baseballdiamond03__9 324 | baseballdiamond04__9 325 | baseballdiamond05__9 326 | baseballdiamond06__9 327 | baseballdiamond07__9 328 | baseballdiamond08__9 329 | baseballdiamond09__9 330 | baseballdiamond10__9 331 | baseballdiamond11__9 332 | baseballdiamond12__9 333 | baseballdiamond13__9 334 | baseballdiamond14__9 335 | baseballdiamond15__9 336 | baseballdiamond16__9 337 | baseballdiamond17__9 338 | baseballdiamond18__9 339 | baseballdiamond19__9 340 | baseballdiamond20__9 341 | baseballdiamond21__9 342 | baseballdiamond22__9 343 | baseballdiamond23__9 344 | baseballdiamond24__9 345 | baseballdiamond25__9 346 | baseballdiamond26__9 347 | baseballdiamond27__9 348 | baseballdiamond28__9 349 | baseballdiamond29__9 350 | baseballdiamond30__9 351 | baseballdiamond31__9 352 | baseballdiamond32__9 353 | baseballdiamond33__9 354 | baseballdiamond34__9 355 | baseballdiamond35__9 356 | baseballdiamond36__9 357 | baseballdiamond37__9 358 | baseballdiamond38__9 359 | baseballdiamond39__9 360 | baseballdiamond40__9 361 | baseballdiamond41__9 362 | baseballdiamond42__9 363 | baseballdiamond43__9 364 | baseballdiamond44__9 365 | baseballdiamond45__9 366 | baseballdiamond46__9 367 | baseballdiamond47__9 368 | baseballdiamond48__9 369 | baseballdiamond49__9 370 | baseballdiamond50__9 371 | baseballdiamond51__9 372 | baseballdiamond52__9 373 | baseballdiamond53__9 374 | baseballdiamond54__9 375 | baseballdiamond55__9 376 | baseballdiamond56__9 377 | baseballdiamond57__9 378 | baseballdiamond58__9 379 | baseballdiamond62__9 380 | baseballdiamond63__9 381 | baseballdiamond64__9 382 | baseballdiamond65__9 383 | baseballdiamond66__9 384 | baseballdiamond67__9 385 | baseballdiamond68__9 386 | baseballdiamond69__9 387 | baseballdiamond70__9 388 | baseballdiamond71__9 389 | baseballdiamond72__9 390 | baseballdiamond73__9 391 | baseballdiamond74__9 392 | baseballdiamond75__9 393 | baseballdiamond76__9 394 | baseballdiamond77__9 395 | baseballdiamond78__9 396 | baseballdiamond79__9 397 | baseballdiamond80__9 398 | baseballdiamond81__9 399 | baseballdiamond82__9 400 | baseballdiamond83__9 401 | baseballdiamond84__9 402 | baseballdiamond85__9 403 | baseballdiamond86__9 404 | baseballdiamond87__9 405 | baseballdiamond88__9 406 | baseballdiamond89__9 407 | baseballdiamond90__9 408 | baseballdiamond91__9 409 | baseballdiamond92__9 410 | baseballdiamond93__9 411 | baseballdiamond94__9 412 | baseballdiamond95__9 413 | baseballdiamond96__9 414 | baseballdiamond97__9 415 | baseballdiamond98__9 416 | baseballdiamond99__9 417 | beach42__9 418 | buildings30__9 419 | buildings39__9 420 | buildings51__9 421 | buildings52__9 422 | buildings53__9 423 | buildings56__9 424 | buildings57__9 425 | buildings58__9 426 | buildings60__9 427 | buildings61__9 428 | buildings63__9 429 | buildings64__9 430 | buildings67__9 431 | buildings71__9 432 | buildings72__9 433 | buildings73__9 434 | buildings74__9 435 | buildings76__9 436 | buildings77__9 437 | buildings79__9 438 | buildings80__9 439 | buildings84__9 440 | buildings85__9 441 | buildings93__9 442 | buildings94__9 443 | denseresidential01__9 444 | denseresidential02__9 445 | denseresidential03__9 446 | denseresidential06__9 447 | denseresidential09__9 448 | denseresidential10__9 449 | denseresidential20__9 450 | denseresidential21__9 451 | denseresidential22__9 452 | denseresidential23__9 453 | denseresidential24__9 454 | denseresidential25__9 455 | denseresidential30__9 456 | denseresidential31__9 457 | denseresidential32__9 458 | denseresidential41__9 459 | denseresidential43__9 460 | denseresidential44__9 461 | denseresidential50__9 462 | denseresidential51__9 463 | denseresidential52__9 464 | denseresidential53__9 465 | denseresidential55__9 466 | denseresidential56__9 467 | denseresidential57__9 468 | denseresidential58__9 469 | denseresidential59__9 470 | denseresidential61__9 471 | denseresidential62__9 472 | denseresidential63__9 473 | denseresidential64__9 474 | denseresidential66__9 475 | denseresidential68__9 476 | denseresidential69__9 477 | denseresidential70__9 478 | denseresidential71__9 479 | denseresidential72__9 480 | denseresidential74__9 481 | denseresidential75__9 482 | denseresidential76__9 483 | denseresidential77__9 484 | denseresidential78__9 485 | denseresidential79__9 486 | denseresidential80__9 487 | denseresidential81__9 488 | denseresidential82__9 489 | denseresidential83__9 490 | denseresidential84__9 491 | denseresidential86__9 492 | denseresidential87__9 493 | denseresidential98__9 494 | denseresidential99__9 495 | forest08__9 496 | freeway00__9 497 | freeway04__9 498 | freeway06__9 499 | freeway07__9 500 | freeway08__9 501 | freeway10__9 502 | freeway11__9 503 | freeway13__9 504 | freeway14__9 505 | freeway15__9 506 | freeway16__9 507 | freeway17__9 508 | freeway18__9 509 | freeway19__9 510 | freeway20__9 511 | freeway21__9 512 | freeway22__9 513 | freeway23__9 514 | freeway25__9 515 | freeway26__9 516 | freeway27__9 517 | freeway28__9 518 | freeway29__9 519 | freeway30__9 520 | freeway31__9 521 | freeway32__9 522 | freeway33__9 523 | freeway34__9 524 | freeway35__9 525 | freeway36__9 526 | freeway37__9 527 | freeway38__9 528 | freeway39__9 529 | freeway40__9 530 | freeway41__9 531 | freeway64__9 532 | freeway65__9 533 | freeway66__9 534 | freeway71__9 535 | freeway72__9 536 | freeway73__9 537 | freeway74__9 538 | freeway75__9 539 | freeway77__9 540 | freeway78__9 541 | freeway79__9 542 | freeway80__9 543 | freeway81__9 544 | freeway82__9 545 | freeway85__9 546 | freeway89__9 547 | freeway90__9 548 | freeway91__9 549 | freeway92__9 550 | freeway96__9 551 | freeway97__9 552 | freeway99__9 553 | golfcourse00__9 554 | golfcourse01__9 555 | golfcourse02__9 556 | golfcourse03__9 557 | golfcourse04__9 558 | golfcourse05__9 559 | golfcourse06__9 560 | golfcourse07__9 561 | golfcourse08__9 562 | golfcourse09__9 563 | golfcourse10__9 564 | golfcourse11__9 565 | golfcourse12__9 566 | golfcourse13__9 567 | golfcourse14__9 568 | golfcourse15__9 569 | golfcourse16__9 570 | golfcourse17__9 571 | golfcourse18__9 572 | golfcourse19__9 573 | golfcourse20__9 574 | golfcourse21__9 575 | golfcourse22__9 576 | golfcourse23__9 577 | golfcourse24__9 578 | golfcourse25__9 579 | golfcourse26__9 580 | golfcourse27__9 581 | golfcourse28__9 582 | golfcourse29__9 583 | golfcourse30__9 584 | golfcourse31__9 585 | golfcourse32__9 586 | golfcourse33__9 587 | golfcourse34__9 588 | golfcourse35__9 589 | golfcourse36__9 590 | golfcourse37__9 591 | golfcourse38__9 592 | golfcourse39__9 593 | golfcourse40__9 594 | golfcourse41__9 595 | golfcourse42__9 596 | golfcourse43__9 597 | golfcourse44__9 598 | golfcourse45__9 599 | golfcourse46__9 600 | golfcourse47__9 601 | golfcourse48__9 602 | golfcourse49__9 603 | golfcourse50__9 604 | golfcourse51__9 605 | golfcourse52__9 606 | golfcourse53__9 607 | golfcourse54__9 608 | golfcourse55__9 609 | golfcourse56__9 610 | golfcourse57__9 611 | golfcourse58__9 612 | golfcourse59__9 613 | golfcourse60__9 614 | golfcourse61__9 615 | golfcourse62__9 616 | golfcourse63__9 617 | golfcourse64__9 618 | golfcourse65__9 619 | golfcourse66__9 620 | golfcourse67__9 621 | golfcourse68__9 622 | golfcourse69__9 623 | golfcourse70__9 624 | golfcourse71__9 625 | golfcourse72__9 626 | golfcourse73__9 627 | golfcourse74__9 628 | golfcourse75__9 629 | golfcourse76__9 630 | golfcourse77__9 631 | golfcourse78__9 632 | golfcourse79__9 633 | golfcourse80__9 634 | golfcourse81__9 635 | golfcourse82__9 636 | golfcourse83__9 637 | golfcourse84__9 638 | golfcourse85__9 639 | golfcourse86__9 640 | golfcourse87__9 641 | golfcourse88__9 642 | golfcourse89__9 643 | golfcourse90__9 644 | golfcourse91__9 645 | golfcourse92__9 646 | golfcourse93__9 647 | golfcourse94__9 648 | golfcourse95__9 649 | golfcourse96__9 650 | golfcourse97__9 651 | golfcourse98__9 652 | golfcourse99__9 653 | harbor70__9 654 | intersection00__9 655 | intersection03__9 656 | intersection04__9 657 | intersection05__9 658 | intersection06__9 659 | intersection09__9 660 | intersection10__9 661 | intersection12__9 662 | intersection14__9 663 | intersection16__9 664 | intersection17__9 665 | intersection18__9 666 | intersection19__9 667 | intersection20__9 668 | intersection21__9 669 | intersection22__9 670 | intersection23__9 671 | intersection24__9 672 | intersection25__9 673 | intersection26__9 674 | intersection27__9 675 | intersection28__9 676 | intersection29__9 677 | intersection30__9 678 | intersection31__9 679 | intersection32__9 680 | intersection33__9 681 | intersection34__9 682 | intersection35__9 683 | intersection36__9 684 | intersection37__9 685 | intersection39__9 686 | intersection43__9 687 | intersection44__9 688 | intersection45__9 689 | intersection46__9 690 | intersection49__9 691 | intersection51__9 692 | intersection52__9 693 | intersection56__9 694 | intersection57__9 695 | intersection58__9 696 | intersection60__9 697 | intersection61__9 698 | intersection62__9 699 | intersection63__9 700 | intersection64__9 701 | intersection65__9 702 | intersection66__9 703 | intersection67__9 704 | intersection68__9 705 | intersection69__9 706 | intersection76__9 707 | intersection77__9 708 | intersection78__9 709 | intersection79__9 710 | intersection80__9 711 | intersection81__9 712 | intersection82__9 713 | intersection83__9 714 | intersection84__9 715 | intersection85__9 716 | intersection87__9 717 | intersection93__9 718 | intersection94__9 719 | intersection95__9 720 | intersection97__9 721 | mediumresidential00__9 722 | mediumresidential01__9 723 | mediumresidential02__9 724 | mediumresidential03__9 725 | mediumresidential04__9 726 | mediumresidential05__9 727 | mediumresidential06__9 728 | mediumresidential07__9 729 | mediumresidential08__9 730 | mediumresidential09__9 731 | mediumresidential10__9 732 | mediumresidential11__9 733 | mediumresidential12__9 734 | mediumresidential13__9 735 | mediumresidential14__9 736 | mediumresidential15__9 737 | mediumresidential16__9 738 | mediumresidential17__9 739 | mediumresidential18__9 740 | mediumresidential19__9 741 | mediumresidential20__9 742 | mediumresidential21__9 743 | mediumresidential22__9 744 | mediumresidential23__9 745 | mediumresidential24__9 746 | mediumresidential25__9 747 | mediumresidential26__9 748 | mediumresidential27__9 749 | mediumresidential28__9 750 | mediumresidential29__9 751 | mediumresidential30__9 752 | mediumresidential31__9 753 | mediumresidential32__9 754 | mediumresidential34__9 755 | mediumresidential37__9 756 | mediumresidential40__9 757 | mediumresidential41__9 758 | mediumresidential44__9 759 | mediumresidential46__9 760 | mediumresidential47__9 761 | mediumresidential48__9 762 | mediumresidential50__9 763 | mediumresidential51__9 764 | mediumresidential52__9 765 | mediumresidential54__9 766 | mediumresidential56__9 767 | mediumresidential57__9 768 | mediumresidential58__9 769 | mediumresidential59__9 770 | mediumresidential60__9 771 | mediumresidential61__9 772 | mediumresidential62__9 773 | mediumresidential63__9 774 | mediumresidential64__9 775 | mediumresidential65__9 776 | mediumresidential66__9 777 | mediumresidential67__9 778 | mediumresidential68__9 779 | mediumresidential69__9 780 | mediumresidential71__9 781 | mediumresidential72__9 782 | mediumresidential73__9 783 | mediumresidential74__9 784 | mediumresidential76__9 785 | mediumresidential77__9 786 | mediumresidential78__9 787 | mediumresidential79__9 788 | mediumresidential80__9 789 | mediumresidential81__9 790 | mediumresidential82__9 791 | mediumresidential83__9 792 | mediumresidential84__9 793 | mediumresidential85__9 794 | mediumresidential86__9 795 | mediumresidential87__9 796 | mediumresidential88__9 797 | mediumresidential89__9 798 | mediumresidential90__9 799 | mediumresidential91__9 800 | mediumresidential92__9 801 | mediumresidential93__9 802 | mediumresidential94__9 803 | mediumresidential95__9 804 | mediumresidential96__9 805 | mediumresidential97__9 806 | mediumresidential98__9 807 | mediumresidential99__9 808 | mobilehomepark01__9 809 | mobilehomepark08__9 810 | mobilehomepark09__9 811 | mobilehomepark18__9 812 | mobilehomepark23__9 813 | mobilehomepark24__9 814 | mobilehomepark25__9 815 | mobilehomepark26__9 816 | mobilehomepark28__9 817 | mobilehomepark29__9 818 | mobilehomepark30__9 819 | mobilehomepark31__9 820 | mobilehomepark32__9 821 | mobilehomepark33__9 822 | mobilehomepark34__9 823 | mobilehomepark35__9 824 | mobilehomepark36__9 825 | mobilehomepark37__9 826 | mobilehomepark38__9 827 | mobilehomepark39__9 828 | mobilehomepark46__9 829 | mobilehomepark47__9 830 | mobilehomepark48__9 831 | mobilehomepark49__9 832 | mobilehomepark50__9 833 | mobilehomepark52__9 834 | mobilehomepark53__9 835 | mobilehomepark54__9 836 | mobilehomepark55__9 837 | mobilehomepark56__9 838 | mobilehomepark57__9 839 | mobilehomepark58__9 840 | mobilehomepark59__9 841 | mobilehomepark60__9 842 | mobilehomepark61__9 843 | mobilehomepark62__9 844 | mobilehomepark63__9 845 | mobilehomepark64__9 846 | mobilehomepark65__9 847 | mobilehomepark66__9 848 | mobilehomepark67__9 849 | mobilehomepark68__9 850 | mobilehomepark69__9 851 | mobilehomepark70__9 852 | mobilehomepark72__9 853 | mobilehomepark73__9 854 | mobilehomepark78__9 855 | mobilehomepark79__9 856 | mobilehomepark80__9 857 | mobilehomepark81__9 858 | mobilehomepark82__9 859 | mobilehomepark88__9 860 | mobilehomepark92__9 861 | mobilehomepark96__9 862 | mobilehomepark97__9 863 | mobilehomepark98__9 864 | mobilehomepark99__9 865 | overpass00__9 866 | overpass01__9 867 | overpass10__9 868 | overpass12__9 869 | overpass13__9 870 | overpass14__9 871 | overpass15__9 872 | overpass16__9 873 | overpass17__9 874 | overpass18__9 875 | overpass19__9 876 | overpass20__9 877 | overpass21__9 878 | overpass22__9 879 | overpass23__9 880 | overpass24__9 881 | overpass25__9 882 | overpass27__9 883 | overpass28__9 884 | overpass29__9 885 | overpass31__9 886 | overpass32__9 887 | overpass33__9 888 | overpass34__9 889 | overpass35__9 890 | overpass36__9 891 | overpass37__9 892 | overpass38__9 893 | overpass39__9 894 | overpass40__9 895 | overpass41__9 896 | overpass42__9 897 | overpass43__9 898 | overpass44__9 899 | overpass45__9 900 | overpass46__9 901 | overpass47__9 902 | overpass48__9 903 | overpass50__9 904 | overpass51__9 905 | overpass52__9 906 | overpass53__9 907 | overpass54__9 908 | overpass55__9 909 | overpass56__9 910 | overpass57__9 911 | overpass58__9 912 | overpass59__9 913 | overpass60__9 914 | overpass61__9 915 | overpass62__9 916 | overpass63__9 917 | overpass64__9 918 | overpass65__9 919 | overpass66__9 920 | overpass67__9 921 | overpass68__9 922 | overpass69__9 923 | overpass70__9 924 | overpass71__9 925 | overpass72__9 926 | overpass73__9 927 | overpass74__9 928 | overpass75__9 929 | overpass76__9 930 | overpass77__9 931 | overpass78__9 932 | overpass79__9 933 | overpass80__9 934 | overpass81__9 935 | overpass82__9 936 | overpass83__9 937 | overpass84__9 938 | overpass85__9 939 | overpass86__9 940 | overpass87__9 941 | overpass88__9 942 | overpass89__9 943 | overpass90__9 944 | overpass91__9 945 | overpass92__9 946 | overpass93__9 947 | overpass94__9 948 | overpass96__9 949 | overpass97__9 950 | parkinglot20__9 951 | parkinglot24__9 952 | parkinglot26__9 953 | parkinglot28__9 954 | parkinglot29__9 955 | parkinglot30__9 956 | parkinglot31__9 957 | parkinglot32__9 958 | parkinglot34__9 959 | parkinglot35__9 960 | parkinglot36__9 961 | parkinglot37__9 962 | parkinglot39__9 963 | parkinglot45__9 964 | parkinglot46__9 965 | parkinglot47__9 966 | parkinglot52__9 967 | parkinglot54__9 968 | parkinglot56__9 969 | parkinglot59__9 970 | parkinglot61__9 971 | parkinglot63__9 972 | river00__9 973 | river01__9 974 | river02__9 975 | river03__9 976 | river04__9 977 | river05__9 978 | river06__9 979 | river07__9 980 | river12__9 981 | river15__9 982 | river16__9 983 | river17__9 984 | river18__9 985 | river19__9 986 | river20__9 987 | river21__9 988 | river22__9 989 | river23__9 990 | river24__9 991 | river25__9 992 | river26__9 993 | river27__9 994 | river28__9 995 | river34__9 996 | river38__9 997 | river39__9 998 | river40__9 999 | river41__9 1000 | river42__9 1001 | river43__9 1002 | river44__9 1003 | river45__9 1004 | river46__9 1005 | river47__9 1006 | river48__9 1007 | river49__9 1008 | river50__9 1009 | river51__9 1010 | river52__9 1011 | river54__9 1012 | river61__9 1013 | river75__9 1014 | river76__9 1015 | river83__9 1016 | river86__9 1017 | river87__9 1018 | river88__9 1019 | river89__9 1020 | river90__9 1021 | river91__9 1022 | river92__9 1023 | river93__9 1024 | river94__9 1025 | river95__9 1026 | river96__9 1027 | river97__9 1028 | river99__9 1029 | runway00__9 1030 | runway01__9 1031 | runway03__9 1032 | runway04__9 1033 | runway06__9 1034 | runway08__9 1035 | runway09__9 1036 | runway10__9 1037 | runway11__9 1038 | runway12__9 1039 | runway13__9 1040 | runway14__9 1041 | runway15__9 1042 | runway16__9 1043 | runway17__9 1044 | runway18__9 1045 | runway19__9 1046 | runway20__9 1047 | runway21__9 1048 | runway22__9 1049 | runway23__9 1050 | runway24__9 1051 | runway25__9 1052 | runway26__9 1053 | runway27__9 1054 | runway28__9 1055 | runway29__9 1056 | runway30__9 1057 | runway31__9 1058 | runway32__9 1059 | runway33__9 1060 | runway34__9 1061 | runway35__9 1062 | runway36__9 1063 | runway37__9 1064 | runway38__9 1065 | runway39__9 1066 | runway40__9 1067 | runway41__9 1068 | runway42__9 1069 | runway43__9 1070 | runway44__9 1071 | runway45__9 1072 | runway46__9 1073 | runway47__9 1074 | runway48__9 1075 | runway49__9 1076 | runway50__9 1077 | runway92__9 1078 | runway93__9 1079 | runway94__9 1080 | runway95__9 1081 | runway96__9 1082 | sparseresidential00__9 1083 | sparseresidential01__9 1084 | sparseresidential02__9 1085 | sparseresidential03__9 1086 | sparseresidential04__9 1087 | sparseresidential05__9 1088 | sparseresidential07__9 1089 | sparseresidential09__9 1090 | sparseresidential10__9 1091 | sparseresidential11__9 1092 | sparseresidential16__9 1093 | sparseresidential17__9 1094 | sparseresidential18__9 1095 | sparseresidential19__9 1096 | sparseresidential20__9 1097 | sparseresidential21__9 1098 | sparseresidential22__9 1099 | sparseresidential23__9 1100 | sparseresidential24__9 1101 | sparseresidential25__9 1102 | sparseresidential26__9 1103 | sparseresidential27__9 1104 | sparseresidential28__9 1105 | sparseresidential29__9 1106 | sparseresidential30__9 1107 | sparseresidential32__9 1108 | sparseresidential33__9 1109 | sparseresidential34__9 1110 | sparseresidential35__9 1111 | sparseresidential36__9 1112 | sparseresidential37__9 1113 | sparseresidential38__9 1114 | sparseresidential39__9 1115 | sparseresidential40__9 1116 | sparseresidential41__9 1117 | sparseresidential42__9 1118 | sparseresidential43__9 1119 | sparseresidential44__9 1120 | sparseresidential45__9 1121 | sparseresidential46__9 1122 | sparseresidential47__9 1123 | sparseresidential48__9 1124 | sparseresidential49__9 1125 | sparseresidential50__9 1126 | sparseresidential51__9 1127 | sparseresidential52__9 1128 | sparseresidential53__9 1129 | sparseresidential54__9 1130 | sparseresidential55__9 1131 | sparseresidential56__9 1132 | sparseresidential57__9 1133 | sparseresidential58__9 1134 | sparseresidential59__9 1135 | sparseresidential60__9 1136 | sparseresidential61__9 1137 | sparseresidential62__9 1138 | sparseresidential63__9 1139 | sparseresidential64__9 1140 | sparseresidential65__9 1141 | sparseresidential66__9 1142 | sparseresidential67__9 1143 | sparseresidential68__9 1144 | sparseresidential69__9 1145 | sparseresidential70__9 1146 | sparseresidential71__9 1147 | sparseresidential72__9 1148 | sparseresidential73__9 1149 | sparseresidential74__9 1150 | sparseresidential75__9 1151 | sparseresidential76__9 1152 | sparseresidential77__9 1153 | sparseresidential78__9 1154 | sparseresidential79__9 1155 | sparseresidential80__9 1156 | sparseresidential81__9 1157 | sparseresidential83__9 1158 | sparseresidential95__9 1159 | sparseresidential96__9 1160 | sparseresidential97__9 1161 | sparseresidential98__9 1162 | sparseresidential99__9 1163 | storagetanks00__9 1164 | storagetanks03__9 1165 | storagetanks04__9 1166 | storagetanks05__9 1167 | storagetanks06__9 1168 | storagetanks07__9 1169 | storagetanks09__9 1170 | storagetanks11__9 1171 | storagetanks13__9 1172 | storagetanks14__9 1173 | storagetanks15__9 1174 | storagetanks16__9 1175 | storagetanks17__9 1176 | storagetanks18__9 1177 | storagetanks19__9 1178 | storagetanks25__9 1179 | storagetanks26__9 1180 | storagetanks46__9 1181 | storagetanks48__9 1182 | storagetanks49__9 1183 | storagetanks50__9 1184 | storagetanks51__9 1185 | storagetanks52__9 1186 | storagetanks55__9 1187 | storagetanks56__9 1188 | storagetanks57__9 1189 | storagetanks59__9 1190 | storagetanks60__9 1191 | storagetanks65__9 1192 | storagetanks66__9 1193 | storagetanks67__9 1194 | storagetanks68__9 1195 | storagetanks69__9 1196 | storagetanks70__9 1197 | storagetanks73__9 1198 | tenniscourt00__9 1199 | tenniscourt02__9 1200 | tenniscourt03__9 1201 | tenniscourt04__9 1202 | tenniscourt05__9 1203 | tenniscourt06__9 1204 | tenniscourt07__9 1205 | tenniscourt08__9 1206 | tenniscourt09__9 1207 | tenniscourt11__9 1208 | tenniscourt12__9 1209 | tenniscourt13__9 1210 | tenniscourt14__9 1211 | tenniscourt15__9 1212 | tenniscourt16__9 1213 | tenniscourt17__9 1214 | tenniscourt18__9 1215 | tenniscourt19__9 1216 | tenniscourt20__9 1217 | tenniscourt21__9 1218 | tenniscourt22__9 1219 | tenniscourt23__9 1220 | tenniscourt24__9 1221 | tenniscourt25__9 1222 | tenniscourt26__9 1223 | tenniscourt27__9 1224 | tenniscourt29__9 1225 | tenniscourt30__9 1226 | tenniscourt31__9 1227 | tenniscourt32__9 1228 | tenniscourt33__9 1229 | tenniscourt34__9 1230 | tenniscourt35__9 1231 | tenniscourt36__9 1232 | tenniscourt38__9 1233 | tenniscourt39__9 1234 | tenniscourt40__9 1235 | tenniscourt41__9 1236 | tenniscourt43__9 1237 | tenniscourt46__9 1238 | tenniscourt47__9 1239 | tenniscourt48__9 1240 | tenniscourt49__9 1241 | tenniscourt50__9 1242 | tenniscourt51__9 1243 | tenniscourt52__9 1244 | tenniscourt53__9 1245 | tenniscourt54__9 1246 | tenniscourt55__9 1247 | tenniscourt56__9 1248 | tenniscourt57__9 1249 | tenniscourt59__9 1250 | tenniscourt60__9 1251 | tenniscourt61__9 1252 | tenniscourt62__9 1253 | tenniscourt63__9 1254 | tenniscourt64__9 1255 | tenniscourt66__9 1256 | tenniscourt67__9 1257 | tenniscourt68__9 1258 | tenniscourt69__9 1259 | tenniscourt70__9 1260 | tenniscourt71__9 1261 | tenniscourt72__9 1262 | tenniscourt73__9 1263 | tenniscourt75__9 1264 | tenniscourt76__9 1265 | tenniscourt77__9 1266 | tenniscourt78__9 1267 | tenniscourt79__9 1268 | tenniscourt80__9 1269 | tenniscourt81__9 1270 | tenniscourt82__9 1271 | tenniscourt84__9 1272 | tenniscourt85__9 1273 | tenniscourt86__9 1274 | tenniscourt87__9 1275 | tenniscourt88__9 1276 | tenniscourt89__9 1277 | tenniscourt90__9 1278 | tenniscourt91__9 1279 | tenniscourt92__9 1280 | tenniscourt93__9 1281 | tenniscourt94__9 1282 | tenniscourt96__9 1283 | tenniscourt97__9 1284 | tenniscourt98__9 1285 | tenniscourt99__9 1286 | intersection50__10 1287 | intersection51__10 1288 | mobilehomepark00__10 1289 | mobilehomepark01__10 1290 | mobilehomepark02__10 1291 | mobilehomepark03__10 1292 | mobilehomepark04__10 1293 | mobilehomepark05__10 1294 | mobilehomepark06__10 1295 | mobilehomepark07__10 1296 | mobilehomepark08__10 1297 | mobilehomepark09__10 1298 | mobilehomepark10__10 1299 | mobilehomepark11__10 1300 | mobilehomepark12__10 1301 | mobilehomepark13__10 1302 | mobilehomepark14__10 1303 | mobilehomepark15__10 1304 | mobilehomepark16__10 1305 | mobilehomepark17__10 1306 | mobilehomepark18__10 1307 | mobilehomepark19__10 1308 | mobilehomepark20__10 1309 | mobilehomepark21__10 1310 | mobilehomepark22__10 1311 | mobilehomepark23__10 1312 | mobilehomepark24__10 1313 | mobilehomepark25__10 1314 | mobilehomepark26__10 1315 | mobilehomepark27__10 1316 | mobilehomepark28__10 1317 | mobilehomepark29__10 1318 | mobilehomepark30__10 1319 | mobilehomepark31__10 1320 | mobilehomepark32__10 1321 | mobilehomepark33__10 1322 | mobilehomepark34__10 1323 | mobilehomepark35__10 1324 | mobilehomepark36__10 1325 | mobilehomepark37__10 1326 | mobilehomepark38__10 1327 | mobilehomepark39__10 1328 | mobilehomepark40__10 1329 | mobilehomepark41__10 1330 | mobilehomepark42__10 1331 | mobilehomepark43__10 1332 | mobilehomepark44__10 1333 | mobilehomepark45__10 1334 | mobilehomepark46__10 1335 | mobilehomepark47__10 1336 | mobilehomepark48__10 1337 | mobilehomepark49__10 1338 | mobilehomepark50__10 1339 | mobilehomepark51__10 1340 | mobilehomepark52__10 1341 | mobilehomepark53__10 1342 | mobilehomepark54__10 1343 | mobilehomepark55__10 1344 | mobilehomepark56__10 1345 | mobilehomepark57__10 1346 | mobilehomepark58__10 1347 | mobilehomepark59__10 1348 | mobilehomepark60__10 1349 | mobilehomepark61__10 1350 | mobilehomepark62__10 1351 | mobilehomepark63__10 1352 | mobilehomepark64__10 1353 | mobilehomepark65__10 1354 | mobilehomepark66__10 1355 | mobilehomepark67__10 1356 | mobilehomepark68__10 1357 | mobilehomepark69__10 1358 | mobilehomepark70__10 1359 | mobilehomepark71__10 1360 | mobilehomepark72__10 1361 | mobilehomepark73__10 1362 | mobilehomepark74__10 1363 | mobilehomepark75__10 1364 | mobilehomepark76__10 1365 | mobilehomepark77__10 1366 | mobilehomepark78__10 1367 | mobilehomepark79__10 1368 | mobilehomepark80__10 1369 | mobilehomepark81__10 1370 | mobilehomepark82__10 1371 | mobilehomepark83__10 1372 | mobilehomepark84__10 1373 | mobilehomepark85__10 1374 | mobilehomepark86__10 1375 | mobilehomepark87__10 1376 | mobilehomepark88__10 1377 | mobilehomepark89__10 1378 | mobilehomepark90__10 1379 | mobilehomepark91__10 1380 | mobilehomepark92__10 1381 | mobilehomepark93__10 1382 | mobilehomepark94__10 1383 | mobilehomepark95__10 1384 | mobilehomepark96__10 1385 | mobilehomepark97__10 1386 | mobilehomepark98__10 1387 | mobilehomepark99__10 -------------------------------------------------------------------------------- /spectral/extract.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from functools import reduce 3 | from operator import add 4 | from pathlib import Path 5 | from typing import Optional, Tuple 6 | 7 | import cv2 8 | import fire 9 | import numpy as np 10 | import torch 11 | import torch.nn.functional as F 12 | from accelerate import Accelerator 13 | from PIL import Image 14 | from scipy.sparse.linalg import eigsh 15 | from sklearn.cluster import KMeans, MiniBatchKMeans 16 | from sklearn.decomposition import PCA 17 | from torchvision.utils import draw_bounding_boxes 18 | from tqdm import tqdm 19 | 20 | import extract_utils as utils 21 | 22 | 23 | def extract_features( 24 | images_list: str, 25 | images_root: Optional[str], 26 | model_name: str, 27 | batch_size: int, 28 | output_dir: str, 29 | which_block: int = -1, 30 | ): 31 | """ 32 | Extract features from a list of images. 33 | 34 | Example: 35 | python extract.py extract_features \ 36 | --images_list "./data/VOC2012/lists/images.txt" \ 37 | --images_root "./data/VOC2012/images" \ 38 | --output_dir "./data/VOC2012/features/dino_vits16" \ 39 | --model_name dino_vits16 \ 40 | --batch_size 1 41 | """ 42 | 43 | # Output 44 | utils.make_output_dir(output_dir) 45 | 46 | # Models 47 | model_name = model_name.lower() 48 | model, val_transform, patch_size, num_heads = utils.get_model(model_name) 49 | 50 | # Add hook 51 | if 'dino' in model_name or 'mocov3' in model_name: 52 | feat_out = {} 53 | def hook_fn_forward_qkv(module, input, output): 54 | feat_out["qkv"] = output 55 | model._modules["blocks"][which_block]._modules["attn"]._modules["qkv"].register_forward_hook(hook_fn_forward_qkv) 56 | elif 'resnet50' in model_name: 57 | pass 58 | elif 'resnet101' in model_name: 59 | pass 60 | elif 'vgg16' in model_name: 61 | pass 62 | else: 63 | raise ValueError(model_name) 64 | 65 | # Dataset 66 | filenames = Path(images_list).read_text().splitlines() 67 | dataset = utils.ImagesDataset(filenames=filenames, images_root=images_root, transform=val_transform) 68 | dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, num_workers=8) 69 | print(f'Dataset size: {len(dataset)=}') 70 | print(f'Dataloader size: {len(dataloader)=}') 71 | 72 | # Prepare 73 | accelerator = Accelerator(mixed_precision='fp16', cpu=False) 74 | # model, dataloader = accelerator.prepare(model, dataloader) 75 | model = model.to(accelerator.device) 76 | 77 | # Process 78 | pbar = tqdm(dataloader, desc='Processing') 79 | for i, (images, files, indices) in enumerate(pbar): 80 | output_dict = {} 81 | 82 | # Check if file already exists 83 | id = Path(files[0]).stem 84 | output_file = Path(output_dir) / f'{id}.pth' 85 | if output_file.is_file(): 86 | pbar.write(f'Skipping existing file {str(output_file)}') 87 | continue 88 | 89 | P = patch_size 90 | B, C, H, W = images.shape 91 | 92 | # Forward and collect features into output dict 93 | if 'dino' in model_name or 'mocov3' in model_name: 94 | # Reshape image 95 | H_patch, W_patch = H // P, W // P 96 | H_pad, W_pad = H_patch * P, W_patch * P 97 | T = H_patch * W_patch + 1 # number of tokens, add 1 for [CLS] 98 | # images = F.interpolate(images, size=(H_pad, W_pad), mode='bilinear') # resize image 99 | images = images[:, :, :H_pad, :W_pad] 100 | images = images.to(accelerator.device) 101 | 102 | # accelerator.unwrap_model(model).get_intermediate_layers(images)[0].squeeze(0) 103 | model.get_intermediate_layers(images)[0].squeeze(0) 104 | # output_dict['out'] = out 105 | output_qkv = feat_out["qkv"].reshape(B, T, 3, num_heads, -1 // num_heads).permute(2, 0, 3, 1, 4) 106 | # output_dict['q'] = output_qkv[0].transpose(1, 2).reshape(B, T, -1)[:, 1:, :] 107 | output_dict['k'] = output_qkv[1].transpose(1, 2).reshape(B, T, -1)[:, 1:, :] 108 | # output_dict['v'] = output_qkv[2].transpose(1, 2).reshape(B, T, -1)[:, 1:, :] 109 | elif 'resnet101' in model_name: 110 | feat_ids = list(range(4, 34)) 111 | nbottlenecks = [3, 4, 23, 3] 112 | bottleneck_ids = reduce(add, list(map(lambda x: list(range(x)), nbottlenecks))) 113 | lids = reduce(add, [[i + 1] * x for i, x in enumerate(nbottlenecks)]) 114 | images = images.to(accelerator.device) 115 | features = utils.extract_feat_res(images, model, feat_ids, bottleneck_ids, lids) 116 | feat = features[26] 117 | b, c, h, w = feat.size() 118 | output_dict['k'] = feat.view(b, c, -1).permute(0, 2, 1) 119 | elif 'resnet50' in model_name: 120 | feat_ids = list(range(4, 17)) 121 | nbottlenecks = [3, 4, 6, 3] 122 | bottleneck_ids = reduce(add, list(map(lambda x: list(range(x)), nbottlenecks))) 123 | lids = reduce(add, [[i + 1] * x for i, x in enumerate(nbottlenecks)]) 124 | images = images.to(accelerator.device) 125 | features = utils.extract_feat_res(images, model, feat_ids, bottleneck_ids, lids) 126 | feat = features[9] 127 | b, c, h, w = feat.size() 128 | output_dict['k'] = feat.view(b, c, -1).permute(0, 2, 1) 129 | elif 'vgg16' in model_name: 130 | feat_ids = [17, 19, 21, 24, 26, 28, 30] 131 | nbottlenecks = [2, 2, 3, 3, 3, 1] 132 | bottleneck_ids = reduce(add, list(map(lambda x: list(range(x)), nbottlenecks))) 133 | lids = reduce(add, [[i + 1] * x for i, x in enumerate(nbottlenecks)]) 134 | images = images.to(accelerator.device) 135 | features = utils.extract_feat_vgg(images, model, feat_ids, bottleneck_ids, lids) 136 | feat = features[5] 137 | b, c, h, w = feat.size() 138 | output_dict['k'] = feat.view(b, c, -1).permute(0, 2, 1) 139 | else: 140 | raise ValueError(model_name) 141 | 142 | # Metadata 143 | output_dict['indices'] = indices[0] 144 | output_dict['file'] = files[0] 145 | output_dict['id'] = id 146 | output_dict['model_name'] = model_name 147 | output_dict['patch_size'] = patch_size 148 | output_dict['shape'] = (B, C, H, W) 149 | output_dict = {k: (v.detach().cpu() if torch.is_tensor(v) else v) for k, v in output_dict.items()} 150 | 151 | # Save 152 | accelerator.save(output_dict, str(output_file)) 153 | accelerator.wait_for_everyone() 154 | 155 | print(f'Saved features to {output_dir}') 156 | 157 | 158 | def _extract_eig( 159 | inp: Tuple[int, str], 160 | K: int, 161 | images_root: str, 162 | output_dir: str, 163 | which_matrix: str = 'laplacian', 164 | which_features: str = 'k', 165 | normalize: bool = True, 166 | lapnorm: bool = True, 167 | which_color_matrix: str = 'knn', 168 | threshold_at_zero: bool = True, 169 | image_downsample_factor: Optional[int] = 8, 170 | image_color_lambda: float = 10, 171 | ): 172 | index, features_file = inp 173 | 174 | # Load 175 | data_dict = torch.load(features_file, map_location='cpu') 176 | image_id = data_dict['file'][:-4] 177 | 178 | # Load 179 | output_file = str(Path(output_dir) / f'{image_id}.pth') 180 | if Path(output_file).is_file(): 181 | print(f'Skipping existing file {str(output_file)}') 182 | return # skip because already generated 183 | 184 | # Load affinity matrix 185 | feats = data_dict[which_features].squeeze().cuda() 186 | print(feats.size()) 187 | if normalize: 188 | feats = F.normalize(feats, p=2, dim=-1) 189 | 190 | # Eigenvectors of affinity matrix 191 | if which_matrix == 'affinity_torch': 192 | W = feats @ feats.T 193 | if threshold_at_zero: 194 | W = (W * (W > 0)) 195 | eigenvalues, eigenvectors = torch.eig(W, eigenvectors=True) 196 | eigenvalues = eigenvalues.cpu() 197 | eigenvectors = eigenvectors.cpu() 198 | 199 | # Eigenvectors of affinity matrix with scipy 200 | elif which_matrix == 'affinity_svd': 201 | USV = torch.linalg.svd(feats, full_matrices=False) 202 | eigenvectors = USV[0][:, :K].T.to('cpu', non_blocking=True) 203 | eigenvalues = USV[1][:K].to('cpu', non_blocking=True) 204 | 205 | # Eigenvectors of affinity matrix with scipy 206 | elif which_matrix == 'affinity': 207 | W = (feats @ feats.T) 208 | if threshold_at_zero: 209 | W = (W * (W > 0)) 210 | W = W.cpu().numpy() 211 | eigenvalues, eigenvectors = eigsh(W, which='LM', k=K) 212 | eigenvectors = torch.flip(torch.from_numpy(eigenvectors), dims=(-1,)).T 213 | 214 | # Eigenvectors of matting laplacian matrix 215 | elif which_matrix in ['matting_laplacian', 'laplacian']: 216 | 217 | # Get sizes 218 | B, C, H, W, P, H_patch, W_patch, H_pad, W_pad = utils.get_image_sizes(data_dict) 219 | if image_downsample_factor is None: 220 | image_downsample_factor = P 221 | H_pad_lr, W_pad_lr = H_pad // image_downsample_factor, W_pad // image_downsample_factor 222 | 223 | # Upscale features to match the resolution 224 | if (H_patch, W_patch) != (H_pad_lr, W_pad_lr): 225 | feats = F.interpolate( 226 | feats.T.reshape(1, -1, H_patch, W_patch), 227 | size=(H_pad_lr, W_pad_lr), mode='bilinear', align_corners=False 228 | ).reshape(-1, H_pad_lr * W_pad_lr).T 229 | 230 | ### Feature affinities 231 | W_feat = (feats @ feats.T) 232 | if threshold_at_zero: 233 | W_feat = (W_feat * (W_feat > 0)) 234 | W_feat = W_feat / W_feat.max() # NOTE: If features are normalized, this naturally does nothing 235 | W_feat = W_feat.cpu().numpy() 236 | 237 | ### Color affinities 238 | # If we are fusing with color affinites, then load the image and compute 239 | if image_color_lambda > 0: 240 | 241 | # Load image 242 | # image_file = str(Path(images_root) / f'{image_id}.jpg') 243 | image_file = str(Path(images_root) / data_dict['file']) 244 | image_lr = Image.open(image_file).resize((W_pad_lr, H_pad_lr), Image.BILINEAR) 245 | image_lr = np.array(image_lr) / 255. 246 | 247 | # Color affinities (of type scipy.sparse.csr_matrix) 248 | if which_color_matrix == 'knn': 249 | W_lr = utils.knn_affinity(image_lr) 250 | elif which_color_matrix == 'rw': 251 | W_lr = utils.rw_affinity(image_lr) 252 | 253 | # Convert to dense numpy array 254 | W_color = np.array(W_lr.todense().astype(np.float32)) 255 | 256 | else: 257 | 258 | # No color affinity 259 | W_color = 0 260 | 261 | # Combine 262 | W_comb = W_feat + W_color * image_color_lambda # combination 263 | D_comb = np.array(utils.get_diagonal(W_comb).todense()) # is dense or sparse faster? not sure, should check 264 | 265 | # Extract eigenvectors 266 | if lapnorm: 267 | try: 268 | eigenvalues, eigenvectors = eigsh(D_comb - W_comb, k=K, sigma=0, which='LM', M=D_comb) 269 | except: 270 | eigenvalues, eigenvectors = eigsh(D_comb - W_comb, k=K, which='SM', M=D_comb) 271 | else: 272 | try: 273 | eigenvalues, eigenvectors = eigsh(D_comb - W_comb, k=K, sigma=0, which='LM') 274 | except: 275 | eigenvalues, eigenvectors = eigsh(D_comb - W_comb, k=K, which='SM') 276 | eigenvalues, eigenvectors = torch.from_numpy(eigenvalues), torch.from_numpy(eigenvectors.T).float() 277 | 278 | # Sign ambiguity 279 | for k in range(eigenvectors.shape[0]): 280 | if 0.5 < torch.mean((eigenvectors[k] > 0).float()).item() < 1.0: # reverse segment 281 | eigenvectors[k] = 0 - eigenvectors[k] 282 | 283 | # Save dict 284 | output_dict = {'eigenvalues': eigenvalues, 'eigenvectors': eigenvectors} 285 | torch.save(output_dict, output_file) 286 | 287 | 288 | def extract_eigs( 289 | images_root: str, 290 | features_dir: str, 291 | output_dir: str, 292 | which_matrix: str = 'laplacian', 293 | which_color_matrix: str = 'knn', 294 | which_features: str = 'k', 295 | normalize: bool = True, 296 | threshold_at_zero: bool = True, 297 | lapnorm: bool = True, 298 | K: int = 20, 299 | image_downsample_factor: Optional[int] = None, 300 | image_color_lambda: float = 0.0, 301 | multiprocessing: int = 0 302 | ): 303 | """ 304 | Extracts eigenvalues from features. 305 | 306 | Example: 307 | python extract.py extract_eigs \ 308 | --images_root "./data/VOC2012/images" \ 309 | --features_dir "./data/VOC2012/features/dino_vits16" \ 310 | --which_matrix "laplacian" \ 311 | --output_dir "./data/VOC2012/eigs/laplacian" \ 312 | --K 5 313 | """ 314 | utils.make_output_dir(output_dir) 315 | kwargs = dict(K=K, which_matrix=which_matrix, which_features=which_features, which_color_matrix=which_color_matrix, 316 | normalize=normalize, threshold_at_zero=threshold_at_zero, images_root=images_root, output_dir=output_dir, 317 | image_downsample_factor=image_downsample_factor, image_color_lambda=image_color_lambda, lapnorm=lapnorm) 318 | print(kwargs) 319 | fn = partial(_extract_eig, **kwargs) 320 | inputs = list(enumerate(sorted(Path(features_dir).iterdir()))) 321 | utils.parallel_process(inputs, fn, multiprocessing) 322 | 323 | 324 | def _extract_multi_region_segmentations( 325 | inp: Tuple[int, Tuple[str, str]], 326 | adaptive: bool, 327 | non_adaptive_num_segments: int, 328 | infer_bg_index: bool, 329 | kmeans_baseline: bool, 330 | output_dir: str, 331 | num_eigenvectors: int, 332 | ): 333 | index, (feature_path, eigs_path) = inp 334 | 335 | # Load 336 | data_dict = torch.load(feature_path, map_location='cpu') 337 | data_dict.update(torch.load(eigs_path, map_location='cpu')) 338 | 339 | # Output file 340 | id = Path(data_dict['id']) 341 | output_file = str(Path(output_dir) / f'{id}.png') 342 | if Path(output_file).is_file(): 343 | print(f'Skipping existing file {str(output_file)}') 344 | return # skip because already generated 345 | 346 | # Sizes 347 | B, C, H, W, P, H_patch, W_patch, H_pad, W_pad = utils.get_image_sizes(data_dict, 4) 348 | 349 | # If adaptive, we use the gaps between eigenvalues to determine the number of 350 | # segments per image. If not, we use non_adaptive_num_segments to get a fixed 351 | # number of segments per image. 352 | if adaptive: 353 | indices_by_gap = np.argsort(np.diff(data_dict['eigenvalues'].numpy()))[::-1] 354 | index_largest_gap = indices_by_gap[indices_by_gap != 0][0] # remove zero and take the biggest 355 | n_clusters = index_largest_gap + 1 356 | # print(f'Number of clusters: {n_clusters}') 357 | else: 358 | n_clusters = non_adaptive_num_segments 359 | 360 | # K-Means 361 | kmeans = KMeans(n_clusters=n_clusters) 362 | 363 | # Compute segments using eigenvector or baseline K-means 364 | if kmeans_baseline: 365 | feats = data_dict['k'].squeeze().numpy() 366 | clusters = kmeans.fit_predict(feats) 367 | else: 368 | eigenvectors = data_dict['eigenvectors'][1:1+num_eigenvectors].numpy() # take non-constant eigenvectors 369 | # import pdb; pdb.set_trace() 370 | clusters = kmeans.fit_predict(eigenvectors.T) 371 | 372 | 373 | # Reshape 374 | if clusters.size == H_patch * W_patch: # TODO: better solution might be to pass in patch index 375 | segmap = clusters.reshape(H_patch, W_patch) 376 | elif clusters.size == H_patch * W_patch * 4: 377 | segmap = clusters.reshape(H_patch * 2, W_patch * 2) 378 | else: 379 | raise ValueError() 380 | 381 | # TODO: Improve this step in the pipeline. 382 | # Background detection: we assume that the segment with the most border pixels is the 383 | # background region. We will always make this region equal 0. 384 | if infer_bg_index: 385 | indices, normlized_counts = utils.get_border_fraction(segmap) 386 | bg_index = indices[np.argmax(normlized_counts)].item() 387 | bg_region = (segmap == bg_index) 388 | zero_region = (segmap == 0) 389 | segmap[bg_region] = 0 390 | segmap[zero_region] = bg_index 391 | 392 | # Save dict 393 | Image.fromarray(segmap).convert('L').save(output_file) 394 | 395 | 396 | def extract_multi_region_segmentations( 397 | features_dir: str, 398 | eigs_dir: str, 399 | output_dir: str, 400 | adaptive: bool = False, 401 | non_adaptive_num_segments: int = 4, 402 | infer_bg_index: bool = True, 403 | kmeans_baseline: bool = False, 404 | num_eigenvectors: int = 1_000_000, 405 | multiprocessing: int = 0 406 | ): 407 | """ 408 | Example: 409 | python extract.py extract_multi_region_segmentations \ 410 | --features_dir "./data/VOC2012/features/dino_vits16" \ 411 | --eigs_dir "./data/VOC2012/eigs/laplacian" \ 412 | --output_dir "./data/VOC2012/multi_region_segmentation/fixed" \ 413 | """ 414 | utils.make_output_dir(output_dir) 415 | fn = partial(_extract_multi_region_segmentations, adaptive=adaptive, infer_bg_index=infer_bg_index, 416 | non_adaptive_num_segments=non_adaptive_num_segments, num_eigenvectors=num_eigenvectors, 417 | kmeans_baseline=kmeans_baseline, output_dir=output_dir) 418 | inputs = utils.get_paired_input_files(features_dir, eigs_dir) 419 | utils.parallel_process(inputs, fn, multiprocessing) 420 | 421 | 422 | def _extract_single_region_segmentations( 423 | inp: Tuple[int, Tuple[str, str]], 424 | threshold: float, 425 | output_dir: str, 426 | ): 427 | index, (feature_path, eigs_path) = inp 428 | 429 | # Load 430 | data_dict = torch.load(feature_path, map_location='cpu') 431 | data_dict.update(torch.load(eigs_path, map_location='cpu')) 432 | 433 | # Output file 434 | id = Path(data_dict['id']) 435 | output_file = str(Path(output_dir) / f'{id}.png') 436 | if Path(output_file).is_file(): 437 | print(f'Skipping existing file {str(output_file)}') 438 | return # skip because already generated 439 | 440 | # Sizes 441 | B, C, H, W, P, H_patch, W_patch, H_pad, W_pad = utils.get_image_sizes(data_dict) 442 | 443 | # Eigenvector 444 | eigenvector = data_dict['eigenvectors'][1].numpy() # take smallest non-zero eigenvector 445 | segmap = (eigenvector > threshold).reshape(H_patch, W_patch) 446 | 447 | # Save dict 448 | Image.fromarray(segmap).convert('L').save(output_file) 449 | 450 | 451 | def extract_single_region_segmentations( 452 | features_dir: str, 453 | eigs_dir: str, 454 | output_dir: str, 455 | threshold: float = 0.0, 456 | multiprocessing: int = 0 457 | ): 458 | """ 459 | Example: 460 | python extract.py extract_single_region_segmentations \ 461 | --features_dir "./data/VOC2012/features/dino_vits16" \ 462 | --eigs_dir "./data/VOC2012/eigs/laplacian" \ 463 | --output_dir "./data/VOC2012/single_region_segmentation/patches" \ 464 | """ 465 | utils.make_output_dir(output_dir) 466 | fn = partial(_extract_single_region_segmentations, threshold=threshold, output_dir=output_dir) 467 | inputs = utils.get_paired_input_files(features_dir, eigs_dir) 468 | utils.parallel_process(inputs, fn, multiprocessing) 469 | 470 | 471 | def _extract_bbox( 472 | inp: Tuple[str, str], 473 | num_erode: int, 474 | num_dilate: int, 475 | skip_bg_index: bool, 476 | downsample_factor: Optional[int] = None 477 | ): 478 | index, (feature_path, segmentation_path) = inp 479 | 480 | # Load 481 | data_dict = torch.load(feature_path, map_location='cpu') 482 | segmap = np.array(Image.open(str(segmentation_path))) 483 | image_id = data_dict['id'] 484 | 485 | # Sizes 486 | B, C, H, W, P, H_patch, W_patch, H_pad, W_pad = utils.get_image_sizes(data_dict, downsample_factor) 487 | 488 | # Get bounding boxes 489 | outputs = {'bboxes': [], 'bboxes_original_resolution': [], 'segment_indices': [], 'id': image_id, 490 | 'format': "(xmin, ymin, xmax, ymax)"} 491 | for segment_index in sorted(np.unique(segmap).tolist()): 492 | if (not skip_bg_index) or (segment_index > 0): # skip 0, because 0 is the background 493 | 494 | # Erode and dilate mask 495 | binary_mask = (segmap == segment_index) 496 | binary_mask = utils.erode_or_dilate_mask(binary_mask, r=num_erode, erode=True) 497 | binary_mask = utils.erode_or_dilate_mask(binary_mask, r=num_dilate, erode=False) 498 | 499 | # Find box 500 | mask = np.where(binary_mask == 1) 501 | ymin, ymax = min(mask[0]), max(mask[0]) + 1 # add +1 because excluded max 502 | xmin, xmax = min(mask[1]), max(mask[1]) + 1 # add +1 because excluded max 503 | bbox = [xmin, ymin, xmax, ymax] 504 | bbox_resized = [x * P for x in bbox] # rescale to image size 505 | bbox_features = [ymin, xmin, ymax, xmax] # feature space coordinates are different 506 | 507 | # Append 508 | outputs['segment_indices'].append(segment_index) 509 | outputs['bboxes'].append(bbox) 510 | outputs['bboxes_original_resolution'].append(bbox_resized) 511 | 512 | return outputs 513 | 514 | 515 | def extract_bboxes( 516 | features_dir: str, 517 | segmentations_dir: str, 518 | output_file: str, 519 | num_erode: int = 2, 520 | num_dilate: int = 3, 521 | skip_bg_index: bool = True, 522 | downsample_factor: Optional[int] = None, 523 | ): 524 | """ 525 | Note: There is no need for multiprocessing here, as it is more convenient to save 526 | the entire output as a single JSON file. Example: 527 | python extract.py extract_bboxes \ 528 | --features_dir "./data/VOC2012/features/dino_vits16" \ 529 | --segmentations_dir "./data/VOC2012/multi_region_segmentation/fixed" \ 530 | --num_erode 2 --num_dilate 5 \ 531 | --output_file "./data/VOC2012/multi_region_bboxes/fixed/bboxes_e2_d5.pth" \ 532 | """ 533 | utils.make_output_dir(str(Path(output_file).parent), check_if_empty=False) 534 | fn = partial(_extract_bbox, num_erode=num_erode, num_dilate=num_dilate, skip_bg_index=skip_bg_index, 535 | downsample_factor=downsample_factor) 536 | inputs = utils.get_paired_input_files(features_dir, segmentations_dir) 537 | all_outputs = [fn(inp) for inp in tqdm(inputs, desc='Extracting bounding boxes')] 538 | torch.save(all_outputs, output_file) 539 | print('Done') 540 | 541 | 542 | def extract_bbox_features( 543 | images_root: str, 544 | bbox_file: str, 545 | model_name: str, 546 | output_file: str, 547 | ): 548 | """ 549 | Example: 550 | python extract.py extract_bbox_features \ 551 | --model_name dino_vits16 \ 552 | --images_root "./data/VOC2012/images" \ 553 | --bbox_file "./data/VOC2012/multi_region_bboxes/fixed/bboxes_e2_d5.pth" \ 554 | --output_file "./data/VOC2012/features/dino_vits16" \ 555 | --output_file "./data/VOC2012/multi_region_bboxes/fixed/bbox_features_e2_d5.pth" \ 556 | """ 557 | 558 | # Load bounding boxes 559 | bbox_list = torch.load(bbox_file) 560 | total_num_boxes = sum(len(d['bboxes']) for d in bbox_list) 561 | print(f'Loaded bounding box list. There are {total_num_boxes} total bounding boxes.') 562 | 563 | # Models 564 | model_name_lower = model_name.lower() 565 | model, val_transform, patch_size, num_heads = utils.get_model(model_name_lower) 566 | model.eval().to('cuda') 567 | 568 | # Loop over boxes 569 | for bbox_dict in tqdm(bbox_list): 570 | # Get image info 571 | image_id = bbox_dict['id'] 572 | bboxes = bbox_dict['bboxes_original_resolution'] 573 | # Load image as tensor 574 | image_filename = str(Path(images_root) / f'{image_id}.png') 575 | image = val_transform(Image.open(image_filename).convert('RGB')) # (3, H, W) 576 | image = image.unsqueeze(0).to('cuda') # (1, 3, H, W) 577 | features_crops = [] 578 | for (xmin, ymin, xmax, ymax) in bboxes: 579 | image_crop = image[:, :, ymin:ymax, xmin:xmax] 580 | features_crop = model(image_crop).squeeze().cpu() 581 | features_crops.append(features_crop) 582 | bbox_dict['features'] = torch.stack(features_crops, dim=0) 583 | 584 | # Save 585 | torch.save(bbox_list, output_file) 586 | print(f'Saved features to {output_file}') 587 | 588 | 589 | def extract_bbox_clusters( 590 | bbox_features_file: str, 591 | output_file: str, 592 | num_clusters: int = 20, 593 | seed: int = 0, 594 | pca_dim: Optional[int] = 0, 595 | ): 596 | """ 597 | Example: 598 | python extract.py extract_bbox_clusters \ 599 | --bbox_features_file "./data/VOC2012/multi_region_bboxes/fixed/bbox_features_e2_d5.pth" \ 600 | --pca_dim 32 --num_clusters 21 --seed 0 \ 601 | --output_file "./data/VOC2012/multi_region_bboxes/fixed/bbox_clusters_e2_d5_pca_32.pth" \ 602 | """ 603 | 604 | # Load bounding boxes 605 | bbox_list = torch.load(bbox_features_file) 606 | total_num_boxes = sum(len(d['bboxes']) for d in bbox_list) 607 | print(f'Loaded bounding box list. There are {total_num_boxes} total bounding boxes with features.') 608 | 609 | # Loop over boxes and stack features with PyTorch, because Numpy is too slow 610 | print(f'Stacking and normalizing features') 611 | all_features = torch.cat([bbox_dict['features'] for bbox_dict in bbox_list], dim=0) # (numBbox, D) 612 | all_features = all_features / torch.norm(all_features, dim=-1, keepdim=True) # (numBbox, D)f 613 | all_features = all_features.numpy() 614 | 615 | # Cluster: PCA 616 | if pca_dim: 617 | pca = PCA(pca_dim) 618 | print(f'Computing PCA with dimension {pca_dim}') 619 | all_features = pca.fit_transform(all_features) 620 | 621 | # Cluster: K-Means 622 | print(f'Computing K-Means clustering with {num_clusters} clusters') 623 | kmeans = MiniBatchKMeans(n_clusters=num_clusters, batch_size=4096, max_iter=5000, random_state=seed) 624 | clusters = kmeans.fit_predict(all_features) 625 | 626 | # Print 627 | _indices, _counts = np.unique(clusters, return_counts=True) 628 | print(f'Cluster indices: {_indices.tolist()}') 629 | print(f'Cluster counts: {_counts.tolist()}') 630 | 631 | # Loop over boxes and add clusters 632 | idx = 0 633 | for bbox_dict in bbox_list: 634 | num_bboxes = len(bbox_dict['bboxes']) 635 | del bbox_dict['features'] # bbox_dict['features'] = bbox_dict['features'].squeeze() 636 | bbox_dict['clusters'] = clusters[idx: idx + num_bboxes] 637 | idx = idx + num_bboxes 638 | 639 | # Save 640 | torch.save(bbox_list, output_file) 641 | print(f'Saved features to {output_file}') 642 | 643 | 644 | def extract_semantic_segmentations( 645 | segmentations_dir: str, 646 | bbox_clusters_file: str, 647 | output_dir: str, 648 | ): 649 | """ 650 | Example: 651 | python extract.py extract_semantic_segmentations \ 652 | --segmentations_dir "./data/VOC2012/multi_region_segmentation/fixed" \ 653 | --bbox_clusters_file "./data/VOC2012/multi_region_bboxes/fixed/bbox_clusters_e2_d5_pca_32.pth" \ 654 | --output_dir "./data/VOC2012/semantic_segmentations/patches/fixed/segmaps_e2_d5_pca_32" \ 655 | """ 656 | 657 | # Load bounding boxes 658 | bbox_list = torch.load(bbox_clusters_file) 659 | total_num_boxes = sum(len(d['bboxes']) for d in bbox_list) 660 | print(f'Loaded bounding box list. There are {total_num_boxes} total bounding boxes with features and clusters.') 661 | 662 | # Output 663 | utils.make_output_dir(output_dir) 664 | 665 | # Loop over boxes 666 | for bbox_dict in tqdm(bbox_list): 667 | # Get image info 668 | image_id = bbox_dict['id'] 669 | # Load segmentation as tensor 670 | segmap_path = str(Path(segmentations_dir) / f'{image_id}.png') 671 | segmap = np.array(Image.open(segmap_path)) 672 | # Check if the segmap is a binary file with foreground pixels saved as 255 instead of 1 673 | # this will be the case for some of our baselines 674 | if set(np.unique(segmap).tolist()).issubset({0, 255}): 675 | segmap[segmap == 255] = 1 676 | # Semantic map 677 | if not len(bbox_dict['segment_indices']) == len(bbox_dict['clusters'].tolist()): 678 | import pdb 679 | pdb.set_trace() 680 | semantic_map = dict(zip(bbox_dict['segment_indices'], bbox_dict['clusters'].tolist())) 681 | assert 0 not in semantic_map, semantic_map 682 | semantic_map[0] = 0 # background region remains zero 683 | # Perform mapping 684 | semantic_segmap = np.vectorize(semantic_map.__getitem__)(segmap) 685 | # Save 686 | output_file = str(Path(output_dir) / f'{image_id}.png') 687 | Image.fromarray(semantic_segmap.astype(np.uint8)).convert('L').save(output_file) 688 | 689 | print(f'Saved features to {output_dir}') 690 | 691 | 692 | def _extract_crf_segmentations( 693 | inp: Tuple[int, Tuple[str, str]], 694 | images_root: str, 695 | num_classes: int, 696 | output_dir: str, 697 | crf_params: Tuple, 698 | downsample_factor: int = 16, 699 | ): 700 | index, (image_file, segmap_path) = inp 701 | 702 | # Output file 703 | id = Path(image_file).stem 704 | output_file = str(Path(output_dir) / f'{id}.png') 705 | if Path(output_file).is_file(): 706 | print(f'Skipping existing file {str(output_file)}') 707 | return # skip because already generated 708 | 709 | # Load image and segmap 710 | image_file = str(Path(images_root) / f'{id}.jpg') 711 | image = np.array(Image.open(image_file).convert('RGB')) # (H_patch, W_patch, 3) 712 | segmap = np.array(Image.open(segmap_path)) # (H_patch, W_patch) 713 | 714 | # Sizes 715 | P = downsample_factor 716 | H, W = image.shape[:2] 717 | H_patch, W_patch = H // P, W // P 718 | H_pad, W_pad = H_patch * P, W_patch * P 719 | 720 | # Resize and expand 721 | segmap_upscaled = cv2.resize(segmap, dsize=(W_pad, H_pad), interpolation=cv2.INTER_NEAREST) # (H_pad, W_pad) 722 | segmap_orig_res = cv2.resize(segmap, dsize=(W, H), interpolation=cv2.INTER_NEAREST) # (H, W) 723 | segmap_orig_res[:H_pad, :W_pad] = segmap_upscaled # replace with the correctly upscaled version, just in case they are different 724 | 725 | # Convert binary 726 | if set(np.unique(segmap_orig_res).tolist()) == {0, 255}: 727 | segmap_orig_res[segmap_orig_res == 255] = 1 728 | 729 | # CRF 730 | import denseCRF # make sure you've installed SimpleCRF 731 | unary_potentials = F.one_hot(torch.from_numpy(segmap_orig_res).long(), num_classes=num_classes) 732 | segmap_crf = denseCRF.densecrf(image, unary_potentials, crf_params) # (H_pad, W_pad) 733 | 734 | # Save 735 | Image.fromarray(segmap_crf).convert('L').save(output_file) 736 | 737 | 738 | def extract_crf_segmentations( 739 | images_list: str, 740 | images_root: str, 741 | segmentations_dir: str, 742 | output_dir: str, 743 | num_classes: int = 21, 744 | downsample_factor: int = 16, 745 | multiprocessing: int = 0, 746 | # CRF parameters 747 | w1 = 10, # weight of bilateral term # default: 10.0, 748 | alpha = 80, # spatial std # default: 80, 749 | beta = 13, # rgb std # default: 13, 750 | w2 = 3, # weight of spatial term # default: 3.0, 751 | gamma = 3, # spatial std # default: 3, 752 | it = 5.0, # iteration # default: 5.0, 753 | ): 754 | """ 755 | Applies a CRF to segmentations in order to sharpen them. 756 | 757 | Example: 758 | python extract.py extract_crf_segmentations \ 759 | --images_list "./data/VOC2012/lists/images.txt" \ 760 | --images_root "./data/VOC2012/images" \ 761 | --segmentations_dir "./data/VOC2012/semantic_segmentations/patches/fixed/segmaps_e2_d5_pca_32" \ 762 | --output_dir "./data/VOC2012/semantic_segmentations/crf/fixed/segmaps_e2_d5_pca_32" \ 763 | """ 764 | try: 765 | import denseCRF 766 | except: 767 | raise ImportError( 768 | 'Please install SimpleCRF to compute CRF segmentations:\n' 769 | 'pip3 install SimpleCRF' 770 | ) 771 | 772 | utils.make_output_dir(output_dir) 773 | fn = partial(_extract_crf_segmentations, images_root=images_root, num_classes=num_classes, output_dir=output_dir, 774 | crf_params=(w1, alpha, beta, w2, gamma, it), downsample_factor=downsample_factor) 775 | inputs = utils.get_paired_input_files(images_list, segmentations_dir) 776 | print(f'Found {len(inputs)} images and segmaps') 777 | utils.parallel_process(inputs, fn, multiprocessing) 778 | 779 | 780 | def vis_segmentations( 781 | images_list: str, 782 | images_root: str, 783 | segmentations_dir: str, 784 | bbox_file: Optional[str] = None, 785 | ): 786 | """ 787 | Example: 788 | streamlit run extract.py vis_segmentations -- \ 789 | --images_list "./data/VOC2012/lists/images.txt" \ 790 | --images_root "./data/VOC2012/images" \ 791 | --segmentations_dir "./data/VOC2012/multi_region_segmentation/fixed" 792 | or alternatively: 793 | --segmentations_dir "./data/VOC2012/semantic_segmentations/crf/fixed/segmaps_e2_d5_pca_32/" 794 | """ 795 | # Streamlit setup 796 | import streamlit as st 797 | from matplotlib.cm import get_cmap 798 | from skimage.color import label2rgb 799 | st.set_page_config(layout='wide') 800 | 801 | # Inputs 802 | image_paths = [] 803 | segmap_paths = [] 804 | images_root = Path(images_root) 805 | segmentations_dir = Path(segmentations_dir) 806 | for image_file in Path(images_list).read_text().splitlines(): 807 | segmap_file = f'{Path(image_file).stem}.png' 808 | image_paths.append(images_root / image_file) 809 | segmap_paths.append(segmentations_dir / segmap_file) 810 | print(f'Found {len(image_paths)} image and segmap paths') 811 | 812 | # Load optional bounding boxes 813 | if bbox_file is not None: 814 | bboxes_list = torch.load(bbox_file) 815 | 816 | # Colors 817 | colors = get_cmap('tab20', 21).colors[:, :3] 818 | 819 | # Which index 820 | which_index = st.number_input(label='Which index to view (0 for all)', value=0) 821 | 822 | # Load 823 | total = 0 824 | for i, (image_path, segmap_path) in enumerate(zip(image_paths, segmap_paths)): 825 | if total > 40: break 826 | image_id = image_path.stem 827 | 828 | # Streamlit 829 | cols = [] 830 | 831 | # Load 832 | image = np.array(Image.open(image_path).convert('RGB')) 833 | segmap = np.array(Image.open(segmap_path)) 834 | 835 | # Convert binary 836 | if set(np.unique(segmap).tolist()) == {0, 255}: 837 | segmap[segmap == 255] = 1 838 | 839 | # Resize 840 | segmap_fullres = cv2.resize(segmap, dsize=image.shape[:2][::-1], interpolation=cv2.INTER_NEAREST) 841 | 842 | # Only view images with a specific class 843 | if which_index not in np.unique(segmap): 844 | continue 845 | total += 1 846 | 847 | # Streamlit 848 | cols.append({'image': image, 'caption': image_id}) 849 | 850 | # Load optional bounding boxes 851 | bboxes = None 852 | if bbox_file is not None: 853 | bboxes = torch.tensor(bboxes_list[i]['bboxes_original_resolution']) 854 | assert bboxes_list[i]['id'] == image_id, f"{bboxes_list[i]['id']=} but {image_id=}" 855 | image_torch = torch.from_numpy(image).permute(2, 0, 1) 856 | image_with_boxes_torch = draw_bounding_boxes(image_torch, bboxes) 857 | image_with_boxes = image_with_boxes_torch.permute(1, 2, 0).numpy() 858 | 859 | # Streamlit 860 | cols.append({'image': image_with_boxes}) 861 | 862 | # Color 863 | segmap_label_indices, segmap_label_counts = np.unique(segmap, return_counts=True) 864 | blank_segmap_overlay = label2rgb(label=segmap_fullres, image=np.full_like(image, 128), 865 | colors=colors[segmap_label_indices[segmap_label_indices != 0]], bg_label=0, alpha=1.0) 866 | image_segmap_overlay = label2rgb(label=segmap_fullres, image=image, 867 | colors=colors[segmap_label_indices[segmap_label_indices != 0]], bg_label=0, alpha=0.45) 868 | segmap_caption = dict(zip(segmap_label_indices.tolist(), (segmap_label_counts).tolist())) 869 | 870 | # Streamlit 871 | cols.append({'image': blank_segmap_overlay, 'caption': segmap_caption}) 872 | cols.append({'image': image_segmap_overlay, 'caption': segmap_caption}) 873 | 874 | # Display 875 | for d, col in zip(cols, st.columns(len(cols))): 876 | col.image(**d) 877 | 878 | 879 | if __name__ == '__main__': 880 | torch.set_grad_enabled(False) 881 | fire.Fire(dict( 882 | extract_features=extract_features, 883 | extract_eigs=extract_eigs, 884 | extract_multi_region_segmentations=extract_multi_region_segmentations, 885 | extract_bboxes=extract_bboxes, 886 | extract_bbox_features=extract_bbox_features, 887 | extract_bbox_clusters=extract_bbox_clusters, 888 | extract_semantic_segmentations=extract_semantic_segmentations, 889 | extract_crf_segmentations=extract_crf_segmentations, 890 | extract_single_region_segmentations=extract_single_region_segmentations, 891 | vis_segmentations=vis_segmentations, 892 | )) 893 | --------------------------------------------------------------------------------