├── .gitignore
├── assets
└── architecture.png
├── data
├── splits
│ ├── coco
│ │ ├── trn
│ │ │ ├── fold0.pkl
│ │ │ ├── fold1.pkl
│ │ │ ├── fold2.pkl
│ │ │ └── fold3.pkl
│ │ └── val
│ │ │ ├── fold0.pkl
│ │ │ ├── fold1.pkl
│ │ │ ├── fold2.pkl
│ │ │ └── fold3.pkl
│ ├── fss
│ │ ├── val.txt
│ │ ├── test.txt
│ │ └── trn.txt
│ └── pascal
│ │ ├── val
│ │ ├── fold0.txt
│ │ ├── fold3.txt
│ │ ├── fold1.txt
│ │ └── fold2.txt
│ │ └── trn
│ │ └── fold3.txt
├── dataset.py
├── fss.py
├── coco.py
└── pascal.py
├── scripts
├── train.sh
└── test.sh
├── common
├── utils.py
├── config.py
├── evaluation.py
├── vis.py
└── logger.py
├── model
├── base
│ ├── transformer.py
│ └── swin_transformer.py
└── DCAMA.py
├── test.py
├── train.py
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | start.sh
3 | config.json
4 | __pycache__
5 | logs
--------------------------------------------------------------------------------
/assets/architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/assets/architecture.png
--------------------------------------------------------------------------------
/data/splits/coco/trn/fold0.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/trn/fold0.pkl
--------------------------------------------------------------------------------
/data/splits/coco/trn/fold1.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/trn/fold1.pkl
--------------------------------------------------------------------------------
/data/splits/coco/trn/fold2.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/trn/fold2.pkl
--------------------------------------------------------------------------------
/data/splits/coco/trn/fold3.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/trn/fold3.pkl
--------------------------------------------------------------------------------
/data/splits/coco/val/fold0.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/val/fold0.pkl
--------------------------------------------------------------------------------
/data/splits/coco/val/fold1.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/val/fold1.pkl
--------------------------------------------------------------------------------
/data/splits/coco/val/fold2.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/val/fold2.pkl
--------------------------------------------------------------------------------
/data/splits/coco/val/fold3.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pawn-sxy/DCAMA/HEAD/data/splits/coco/val/fold3.pkl
--------------------------------------------------------------------------------
/scripts/train.sh:
--------------------------------------------------------------------------------
1 | python -u -m torch.distributed.launch --nnodes=1 --nproc_per_node=4 --node_rank=0 --master_port=16005 \
2 | ./train.py --datapath "../datasets" \
3 | --benchmark coco \
4 | --fold 0 \
5 | --bsz 12 \
6 | --nworker 8 \
7 | --backbone swin \
8 | --feature_extractor_path "../backbones/swin_base_patch4_window12_384.pth" \
9 | --logpath "./logs" \
10 | --lr 1e-3 \
11 | --nepoch 500
--------------------------------------------------------------------------------
/scripts/test.sh:
--------------------------------------------------------------------------------
1 | python ./test.py --datapath "../datasets" \
2 | --benchmark coco \
3 | --fold 0 \
4 | --bsz 1 \
5 | --nworker 8 \
6 | --backbone swin \
7 | --feature_extractor_path "../backbones/swin_base_patch4_window12_384.pth" \
8 | --logpath "./logs" \
9 | --load "./best_model.pt" \
10 | --nshot 5 \
11 | --vispath "./vis_5" \
12 | --visualize
13 |
--------------------------------------------------------------------------------
/common/utils.py:
--------------------------------------------------------------------------------
1 | r""" Helper functions """
2 | import random
3 |
4 | import torch
5 | import numpy as np
6 |
7 |
8 | def fix_randseed(seed):
9 | r""" Set random seeds for reproducibility """
10 | if seed is None:
11 | seed = int(random.random() * 1e5)
12 | np.random.seed(seed)
13 | torch.manual_seed(seed)
14 | torch.cuda.manual_seed(seed)
15 | torch.cuda.manual_seed_all(seed)
16 | torch.backends.cudnn.benchmark = False
17 | torch.backends.cudnn.deterministic = True
18 |
19 |
20 | def mean(x):
21 | return sum(x) / len(x) if len(x) > 0 else 0.0
22 |
23 |
24 | def to_cuda(batch):
25 | for key, value in batch.items():
26 | if isinstance(value, torch.Tensor):
27 | batch[key] = value.cuda()
28 | return batch
29 |
30 |
31 | def to_cpu(tensor):
32 | return tensor.detach().clone().cpu()
33 |
--------------------------------------------------------------------------------
/common/config.py:
--------------------------------------------------------------------------------
1 | r"""config"""
2 | import argparse
3 |
4 | def parse_opts():
5 | r"""arguments"""
6 | parser = argparse.ArgumentParser(description='Dense Cross-Query-and-Support Attention Weighted Mask Aggregation for Few-Shot Segmentation')
7 |
8 | # common
9 | parser.add_argument('--datapath', type=str, default='./datasets')
10 | parser.add_argument('--benchmark', type=str, default='pascal', choices=['pascal', 'coco', 'fss'])
11 | parser.add_argument('--fold', type=int, default=0, choices=[0, 1, 2, 3])
12 | parser.add_argument('--bsz', type=int, default=20)
13 | parser.add_argument('--nworker', type=int, default=8)
14 | parser.add_argument('--backbone', type=str, default='swin', choices=['resnet50', 'resnet101', 'swin'])
15 | parser.add_argument('--feature_extractor_path', type=str, default='')
16 | parser.add_argument('--logpath', type=str, default='./logs')
17 |
18 | # for train
19 | parser.add_argument('--lr', type=float, default=1e-3)
20 | parser.add_argument('--nepoch', type=int, default=1000)
21 | parser.add_argument('--local_rank', default=0, type=int, help='node rank for distributed training')
22 |
23 | # for test
24 | parser.add_argument('--load', type=str, default='')
25 | parser.add_argument('--nshot', type=int, default=1)
26 | parser.add_argument('--visualize', action='store_true')
27 | parser.add_argument('--vispath', type=str, default='./vis')
28 | parser.add_argument('--use_original_imgsize', action='store_true')
29 |
30 | args = parser.parse_args()
31 | return args
--------------------------------------------------------------------------------
/common/evaluation.py:
--------------------------------------------------------------------------------
1 | r""" Evaluate mask prediction """
2 | import torch
3 |
4 |
5 | class Evaluator:
6 | r""" Computes intersection and union between prediction and ground-truth """
7 | @classmethod
8 | def initialize(cls):
9 | cls.ignore_index = 255
10 |
11 | @classmethod
12 | def classify_prediction(cls, pred_mask, batch):
13 | gt_mask = batch.get('query_mask')
14 |
15 | # Apply ignore_index in PASCAL-5i masks (following evaluation scheme in PFE-Net (TPAMI 2020))
16 | query_ignore_idx = batch.get('query_ignore_idx')
17 | if query_ignore_idx is not None:
18 | assert torch.logical_and(query_ignore_idx, gt_mask).sum() == 0
19 | query_ignore_idx *= cls.ignore_index
20 | gt_mask = gt_mask + query_ignore_idx
21 | pred_mask[gt_mask == cls.ignore_index] = cls.ignore_index
22 |
23 | # compute intersection and union of each episode in a batch
24 | area_inter, area_pred, area_gt = [], [], []
25 | for _pred_mask, _gt_mask in zip(pred_mask, gt_mask):
26 | _inter = _pred_mask[_pred_mask == _gt_mask]
27 | if _inter.size(0) == 0: # as torch.histc returns error if it gets empty tensor (pytorch 1.5.1)
28 | _area_inter = torch.tensor([0, 0], device=_pred_mask.device)
29 | else:
30 | _area_inter = torch.histc(_inter, bins=2, min=0, max=1)
31 | area_inter.append(_area_inter)
32 | area_pred.append(torch.histc(_pred_mask, bins=2, min=0, max=1))
33 | area_gt.append(torch.histc(_gt_mask, bins=2, min=0, max=1))
34 | area_inter = torch.stack(area_inter).t()
35 | area_pred = torch.stack(area_pred).t()
36 | area_gt = torch.stack(area_gt).t()
37 | area_union = area_pred + area_gt - area_inter
38 |
39 | return area_inter, area_union
40 |
--------------------------------------------------------------------------------
/data/dataset.py:
--------------------------------------------------------------------------------
1 | r""" Dataloader builder for few-shot semantic segmentation dataset """
2 | from torch.utils.data.distributed import DistributedSampler as Sampler
3 | from torch.utils.data import DataLoader
4 | from torchvision import transforms
5 |
6 | from data.pascal import DatasetPASCAL
7 | from data.coco import DatasetCOCO
8 | from data.fss import DatasetFSS
9 |
10 |
11 | class FSSDataset:
12 |
13 | @classmethod
14 | def initialize(cls, img_size, datapath, use_original_imgsize):
15 |
16 | cls.datasets = {
17 | 'pascal': DatasetPASCAL,
18 | 'coco': DatasetCOCO,
19 | 'fss': DatasetFSS,
20 | }
21 |
22 | cls.img_mean = [0.485, 0.456, 0.406]
23 | cls.img_std = [0.229, 0.224, 0.225]
24 | cls.datapath = datapath
25 | cls.use_original_imgsize = use_original_imgsize
26 |
27 | cls.transform = transforms.Compose([transforms.Resize(size=(img_size, img_size)),
28 | transforms.ToTensor(),
29 | transforms.Normalize(cls.img_mean, cls.img_std)])
30 |
31 | @classmethod
32 | def build_dataloader(cls, benchmark, bsz, nworker, fold, split, shot=1):
33 | nworker = nworker if split == 'trn' else 0
34 |
35 | dataset = cls.datasets[benchmark](cls.datapath, fold=fold,
36 | transform=cls.transform,
37 | split=split, shot=shot, use_original_imgsize=cls.use_original_imgsize)
38 | # Force randomness during training for diverse episode combinations
39 | # Freeze randomness during testing for reproducibility
40 | train_sampler = Sampler(dataset) if split == 'trn' else None
41 | dataloader = DataLoader(dataset, batch_size=bsz, shuffle=False, sampler=train_sampler, num_workers=nworker,
42 | pin_memory=True)
43 |
44 | return dataloader
45 |
--------------------------------------------------------------------------------
/model/base/transformer.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import numpy as np
4 | import torch.nn.functional as F
5 | import math, copy
6 | from torch.autograd import Variable
7 |
8 |
9 | class MultiHeadedAttention(nn.Module):
10 | def __init__(self, h, d_model, dropout=0.1):
11 | "Take in model size and number of heads."
12 | super(MultiHeadedAttention, self).__init__()
13 | assert d_model % h == 0
14 | # We assume d_v always equals d_k
15 | self.d_k = d_model // h
16 | self.h = h
17 | self.linears = clones(nn.Linear(d_model, d_model), 2)
18 | self.attn = None
19 | self.dropout = nn.Dropout(p=dropout)
20 |
21 | def forward(self, query, key, value, mask=None):
22 | if mask is not None:
23 | # Same mask applied to all h heads.
24 | mask = mask.unsqueeze(1)
25 | nbatches = query.size(0)
26 |
27 | # 1) Do all the linear projections in batch from d_model => h x d_k
28 | query, key = \
29 | [l(x).view(nbatches, -1, self.h, self.d_k).transpose(1, 2)
30 | for l, x in zip(self.linears, (query, key))]
31 | value = value.repeat(self.h, 1, 1).transpose(0, 1).contiguous().unsqueeze(-1)
32 |
33 | # 2) Apply attention on all the projected vectors in batch.
34 | x, self.attn = attention(query, key, value, mask=mask,
35 | dropout=self.dropout)
36 |
37 | # 3) "Concat" using a view and apply a final linear.
38 | return torch.mean(x, -3)
39 |
40 |
41 | class PositionalEncoding(nn.Module):
42 | "Implement the PE function."
43 |
44 | def __init__(self, d_model, dropout, max_len=10000):
45 | super(PositionalEncoding, self).__init__()
46 | self.dropout = nn.Dropout(p=dropout)
47 |
48 | # Compute the positional encodings once in log space.
49 | pe = torch.zeros(max_len, d_model)
50 | position = torch.arange(0, max_len).unsqueeze(1)
51 | div_term = torch.exp(torch.arange(0, d_model, 2) *
52 | -(math.log(10000.0) / d_model))
53 | pe[:, 0::2] = torch.sin(position * div_term)
54 | pe[:, 1::2] = torch.cos(position * div_term)
55 | pe = pe.unsqueeze(0)
56 | self.register_buffer('pe', pe)
57 |
58 | def forward(self, x):
59 | x = x + Variable(self.pe[:, :x.size(1)],
60 | requires_grad=False)
61 | return self.dropout(x)
62 |
63 |
64 | def attention(query, key, value, mask=None, dropout=None):
65 | "Compute 'Scaled Dot Product Attention'"
66 | d_k = query.size(-1)
67 | scores = torch.matmul(query, key.transpose(-2, -1)) \
68 | / math.sqrt(d_k)
69 | if mask is not None:
70 | scores = scores.masked_fill(mask == 0, -1e9)
71 | p_attn = F.softmax(scores, dim=-1)
72 | if dropout is not None:
73 | p_attn = dropout(p_attn)
74 | return torch.matmul(p_attn, value), p_attn
75 |
76 |
77 | def clones(module, N):
78 | "Produce N identical layers."
79 | return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
--------------------------------------------------------------------------------
/test.py:
--------------------------------------------------------------------------------
1 | r""" Dense Cross-Query-and-Support Attention Weighted Mask Aggregation for Few-Shot Segmentation """
2 | import torch.nn as nn
3 | import torch
4 |
5 | from model.DCAMA import DCAMA
6 | from common.logger import Logger, AverageMeter
7 | from common.vis import Visualizer
8 | from common.evaluation import Evaluator
9 | from common.config import parse_opts
10 | from common import utils
11 | from data.dataset import FSSDataset
12 |
13 |
14 | def test(model, dataloader, nshot):
15 | r""" Test """
16 |
17 | # Freeze randomness during testing for reproducibility
18 | utils.fix_randseed(0)
19 | average_meter = AverageMeter(dataloader.dataset)
20 |
21 | for idx, batch in enumerate(dataloader):
22 |
23 | # 1. forward pass
24 | batch = utils.to_cuda(batch)
25 | pred_mask = model.module.predict_mask_nshot(batch, nshot=nshot)
26 |
27 | assert pred_mask.size() == batch['query_mask'].size()
28 |
29 | # 2. Evaluate prediction
30 | area_inter, area_union = Evaluator.classify_prediction(pred_mask.clone(), batch)
31 | average_meter.update(area_inter, area_union, batch['class_id'], loss=None)
32 | average_meter.write_process(idx, len(dataloader), epoch=-1, write_batch_idx=1)
33 |
34 | # Visualize predictions
35 | if Visualizer.visualize:
36 | Visualizer.visualize_prediction_batch(batch['support_imgs'], batch['support_masks'],
37 | batch['query_img'], batch['query_mask'],
38 | pred_mask, batch['class_id'], idx,
39 | iou_b=area_inter[1].float() / area_union[1].float())
40 |
41 | # Write evaluation results
42 | average_meter.write_result('Test', 0)
43 | miou, fb_iou = average_meter.compute_iou()
44 |
45 | return miou, fb_iou
46 |
47 |
48 | if __name__ == '__main__':
49 |
50 | # Arguments parsing
51 | args = parse_opts()
52 |
53 | Logger.initialize(args, training=False)
54 |
55 | # Model initialization
56 | model = DCAMA(args.backbone, args.feature_extractor_path, args.use_original_imgsize)
57 | model.eval()
58 |
59 | # Device setup
60 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
61 | Logger.info('# available GPUs: %d' % torch.cuda.device_count())
62 | model = nn.DataParallel(model)
63 | model.to(device)
64 |
65 | # Load trained model
66 | if args.load == '': raise Exception('Pretrained model not specified.')
67 | params = model.state_dict()
68 | state_dict = torch.load(args.load)
69 |
70 | for k1, k2 in zip(list(state_dict.keys()), params.keys()):
71 | state_dict[k2] = state_dict.pop(k1)
72 |
73 | model.load_state_dict(state_dict)
74 |
75 | # Helper classes (for testing) initialization
76 | Evaluator.initialize()
77 | Visualizer.initialize(args.visualize, args.vispath)
78 |
79 | # Dataset initialization
80 | FSSDataset.initialize(img_size=384, datapath=args.datapath, use_original_imgsize=args.use_original_imgsize)
81 | dataloader_test = FSSDataset.build_dataloader(args.benchmark, args.bsz, args.nworker, args.fold, 'test', args.nshot)
82 |
83 | # Test
84 | with torch.no_grad():
85 | test_miou, test_fb_iou = test(model, dataloader_test, args.nshot)
86 | Logger.info('Fold %d mIoU: %5.2f \t FB-IoU: %5.2f' % (args.fold, test_miou.item(), test_fb_iou.item()))
87 | Logger.info('==================== Finished Testing ====================')
88 |
--------------------------------------------------------------------------------
/data/splits/fss/val.txt:
--------------------------------------------------------------------------------
1 | handcuff
2 | mortar
3 | matchstick
4 | wine_bottle
5 | dowitcher
6 | triumphal_arch
7 | gyromitra
8 | hatchet
9 | airliner
10 | broccoli
11 | olive
12 | pubg_lvl3backpack
13 | calculator
14 | toucan
15 | shovel
16 | sewing_machine
17 | icecream
18 | woodpecker
19 | pig
20 | relay_stick
21 | mcdonald_sign
22 | cpu
23 | peanut
24 | pumpkin
25 | sturgeon
26 | hammer
27 | hami_melon
28 | squirrel_monkey
29 | shuriken
30 | power_drill
31 | pingpong_ball
32 | crocodile
33 | carambola
34 | monarch_butterfly
35 | drum
36 | water_tower
37 | panda
38 | toilet_brush
39 | pay_phone
40 | yonex_icon
41 | cricketball
42 | revolver
43 | chimpanzee
44 | crab
45 | corn
46 | baseball
47 | rabbit
48 | croquet_ball
49 | artichoke
50 | abacus
51 | harp
52 | bell
53 | gas_tank
54 | scissors
55 | vase
56 | upright_piano
57 | typewriter
58 | bittern
59 | impala
60 | tray
61 | fire_hydrant
62 | beer_bottle
63 | sock
64 | soup_bowl
65 | spider
66 | cherry
67 | macaw
68 | toilet_seat
69 | fire_balloon
70 | french_ball
71 | fox_squirrel
72 | volleyball
73 | cornmeal
74 | folding_chair
75 | pubg_airdrop
76 | beagle
77 | skateboard
78 | narcissus
79 | whiptail
80 | cup
81 | arabian_camel
82 | badger
83 | stopwatch
84 | ab_wheel
85 | ox
86 | lettuce
87 | monocycle
88 | redshank
89 | vulture
90 | whistle
91 | smoothing_iron
92 | mashed_potato
93 | conveyor
94 | yoga_pad
95 | tow_truck
96 | siamese_cat
97 | cigar
98 | white_stork
99 | sniper_rifle
100 | stretcher
101 | tulip
102 | handkerchief
103 | basset
104 | iceberg
105 | gibbon
106 | lacewing
107 | thrush
108 | cheetah
109 | bighorn_sheep
110 | espresso_maker
111 | pretzel
112 | english_setter
113 | sandbar
114 | cheese
115 | daisy
116 | arctic_fox
117 | briard
118 | colubus
119 | balance_beam
120 | coffeepot
121 | soap_dispenser
122 | yawl
123 | consomme
124 | parking_meter
125 | cactus
126 | turnstile
127 | taro
128 | fire_screen
129 | digital_clock
130 | rose
131 | pomegranate
132 | bee_eater
133 | schooner
134 | ski_mask
135 | jay_bird
136 | plaice
137 | red_fox
138 | syringe
139 | camomile
140 | pickelhaube
141 | blenheim_spaniel
142 | pear
143 | parachute
144 | common_newt
145 | bowtie
146 | cigarette
147 | oscilloscope
148 | laptop
149 | african_crocodile
150 | apron
151 | coconut
152 | sandal
153 | kwanyin
154 | lion
155 | eel
156 | balloon
157 | crepe
158 | armadillo
159 | kazoo
160 | lemon
161 | spider_monkey
162 | tape_player
163 | ipod
164 | bee
165 | sea_cucumber
166 | suitcase
167 | television
168 | pillow
169 | banjo
170 | rock_snake
171 | partridge
172 | platypus
173 | lycaenid_butterfly
174 | pinecone
175 | conversion_plug
176 | wolf
177 | frying_pan
178 | timber_wolf
179 | bluetick
180 | crayon
181 | giant_schnauzer
182 | orang
183 | scarerow
184 | kobe_logo
185 | loguat
186 | saxophone
187 | ceiling_fan
188 | cardoon
189 | equestrian_helmet
190 | louvre_pyramid
191 | hotdog
192 | ironing_board
193 | razor
194 | nagoya_castle
195 | loggerhead_turtle
196 | lipstick
197 | cradle
198 | strongbox
199 | raven
200 | kit_fox
201 | albatross
202 | flat-coated_retriever
203 | beer_glass
204 | ice_lolly
205 | sungnyemun
206 | totem_pole
207 | vacuum
208 | bolete
209 | mango
210 | ginger
211 | weasel
212 | cabbage
213 | refrigerator
214 | school_bus
215 | hippo
216 | tiger_cat
217 | saltshaker
218 | piano_keyboard
219 | windsor_tie
220 | sea_urchin
221 | microsd
222 | barbell
223 | swim_ring
224 | bulbul_bird
225 | water_ouzel
226 | ac_ground
227 | sweatshirt
228 | umbrella
229 | hair_drier
230 | hammerhead_shark
231 | tomato
232 | projector
233 | cushion
234 | dishwasher
235 | three-toed_sloth
236 | tiger_shark
237 | har_gow
238 | baby
239 | thor's_hammer
240 | nike_logo
241 |
--------------------------------------------------------------------------------
/data/splits/fss/test.txt:
--------------------------------------------------------------------------------
1 | bus
2 | hotel_slipper
3 | burj_al
4 | reflex_camera
5 | abe's_flyingfish
6 | oiltank_car
7 | doormat
8 | fish_eagle
9 | barber_shaver
10 | motorbike
11 | feather_clothes
12 | wandering_albatross
13 | rice_cooker
14 | delta_wing
15 | fish
16 | nintendo_switch
17 | bustard
18 | diver
19 | minicooper
20 | cathedrale_paris
21 | big_ben
22 | combination_lock
23 | villa_savoye
24 | american_alligator
25 | gym_ball
26 | andean_condor
27 | leggings
28 | pyramid_cube
29 | jet_aircraft
30 | meatloaf
31 | reel
32 | swan
33 | osprey
34 | crt_screen
35 | microscope
36 | rubber_eraser
37 | arrow
38 | monkey
39 | mitten
40 | spiderman
41 | parthenon
42 | bat
43 | chess_king
44 | sulphur_butterfly
45 | quail_egg
46 | oriole
47 | iron_man
48 | wooden_boat
49 | anise
50 | steering_wheel
51 | groenendael
52 | dwarf_beans
53 | pteropus
54 | chalk_brush
55 | bloodhound
56 | moon
57 | english_foxhound
58 | boxing_gloves
59 | peregine_falcon
60 | pyraminx
61 | cicada
62 | screw
63 | shower_curtain
64 | tredmill
65 | bulb
66 | bell_pepper
67 | lemur_catta
68 | doughnut
69 | twin_tower
70 | astronaut
71 | nintendo_3ds
72 | fennel_bulb
73 | indri
74 | captain_america_shield
75 | kunai
76 | broom
77 | iphone
78 | earphone1
79 | flying_squirrel
80 | onion
81 | vinyl
82 | sydney_opera_house
83 | oyster
84 | harmonica
85 | egg
86 | breast_pump
87 | guitar
88 | potato_chips
89 | tunnel
90 | cuckoo
91 | rubick_cube
92 | plastic_bag
93 | phonograph
94 | net_surface_shoes
95 | goldfinch
96 | ipad
97 | mite_predator
98 | coffee_mug
99 | golden_plover
100 | f1_racing
101 | lapwing
102 | nintendo_gba
103 | pizza
104 | rally_car
105 | drilling_platform
106 | cd
107 | fly
108 | magpie_bird
109 | leaf_fan
110 | little_blue_heron
111 | carriage
112 | moist_proof_pad
113 | flying_snakes
114 | dart_target
115 | warehouse_tray
116 | nintendo_wiiu
117 | chiffon_cake
118 | bath_ball
119 | manatee
120 | cloud
121 | marimba
122 | eagle
123 | ruler
124 | soymilk_machine
125 | sled
126 | seagull
127 | glider_flyingfish
128 | doublebus
129 | transport_helicopter
130 | window_screen
131 | truss_bridge
132 | wasp
133 | snowman
134 | poached_egg
135 | strawberry
136 | spinach
137 | earphone2
138 | downy_pitch
139 | taj_mahal
140 | rocking_chair
141 | cablestayed_bridge
142 | sealion
143 | banana_boat
144 | pheasant
145 | stone_lion
146 | electronic_stove
147 | fox
148 | iguana
149 | rugby_ball
150 | hang_glider
151 | water_buffalo
152 | lotus
153 | paper_plane
154 | missile
155 | flamingo
156 | american_chamelon
157 | kart
158 | chinese_knot
159 | cabbage_butterfly
160 | key
161 | church
162 | tiltrotor
163 | helicopter
164 | french_fries
165 | water_heater
166 | snow_leopard
167 | goblet
168 | fan
169 | snowplow
170 | leafhopper
171 | pspgo
172 | black_bear
173 | quail
174 | condor
175 | chandelier
176 | hair_razor
177 | white_wolf
178 | toaster
179 | pidan
180 | pyramid
181 | chicken_leg
182 | letter_opener
183 | apple_icon
184 | porcupine
185 | chicken
186 | stingray
187 | warplane
188 | windmill
189 | bamboo_slip
190 | wig
191 | flying_geckos
192 | stonechat
193 | haddock
194 | australian_terrier
195 | hover_board
196 | siamang
197 | canton_tower
198 | santa_sledge
199 | arch_bridge
200 | curlew
201 | sushi
202 | beet_root
203 | accordion
204 | leaf_egg
205 | stealth_aircraft
206 | stork
207 | bucket
208 | hawk
209 | chess_queen
210 | ocarina
211 | knife
212 | whippet
213 | cantilever_bridge
214 | may_bug
215 | wagtail
216 | leather_shoes
217 | wheelchair
218 | shumai
219 | speedboat
220 | vacuum_cup
221 | chess_knight
222 | pumpkin_pie
223 | wooden_spoon
224 | bamboo_dragonfly
225 | ganeva_chair
226 | soap
227 | clearwing_flyingfish
228 | pencil_sharpener1
229 | cricket
230 | photocopier
231 | nintendo_sp
232 | samarra_mosque
233 | clam
234 | charge_battery
235 | flying_frog
236 | ferrari911
237 | polo_shirt
238 | echidna
239 | coin
240 | tower_pisa
241 |
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | r""" training (validation) code """
2 | import torch.optim as optim
3 | import torch.nn as nn
4 | import torch
5 |
6 | from model.DCAMA import DCAMA
7 | from common.logger import Logger, AverageMeter
8 | from common.evaluation import Evaluator
9 | from common.config import parse_opts
10 | from common import utils
11 | from data.dataset import FSSDataset
12 |
13 |
14 | def train(epoch, model, dataloader, optimizer, training):
15 | r""" Train """
16 |
17 | # Force randomness during training / freeze randomness during testing
18 | utils.fix_randseed(None) if training else utils.fix_randseed(0)
19 | model.module.train_mode() if training else model.module.eval()
20 | average_meter = AverageMeter(dataloader.dataset)
21 |
22 | for idx, batch in enumerate(dataloader):
23 |
24 | # 1. forward pass
25 | batch = utils.to_cuda(batch)
26 | logit_mask = model(batch['query_img'], batch['support_imgs'].squeeze(1), batch['support_masks'].squeeze(1))
27 | pred_mask = logit_mask.argmax(dim=1)
28 |
29 | # 2. Compute loss & update model parameters
30 | loss = model.module.compute_objective(logit_mask, batch['query_mask'])
31 | if training:
32 | optimizer.zero_grad()
33 | loss.backward()
34 | optimizer.step()
35 |
36 | # 3. Evaluate prediction
37 | area_inter, area_union = Evaluator.classify_prediction(pred_mask, batch)
38 | average_meter.update(area_inter, area_union, batch['class_id'], loss.detach().clone())
39 | average_meter.write_process(idx, len(dataloader), epoch, write_batch_idx=50)
40 |
41 | # Write evaluation results
42 | average_meter.write_result('Training' if training else 'Validation', epoch)
43 | avg_loss = utils.mean(average_meter.loss_buf)
44 | miou, fb_iou = average_meter.compute_iou()
45 |
46 | return avg_loss, miou, fb_iou
47 |
48 |
49 | if __name__ == '__main__':
50 |
51 | # Arguments parsing
52 | args = parse_opts()
53 |
54 | # ddp backend initialization
55 | torch.distributed.init_process_group(backend='nccl')
56 | torch.cuda.set_device(args.local_rank)
57 |
58 | # Model initialization
59 | model = DCAMA(args.backbone, args.feature_extractor_path, False)
60 | device = torch.device("cuda", args.local_rank)
61 | model.to(device)
62 | model = nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank], output_device=args.local_rank,
63 | find_unused_parameters=True)
64 |
65 | # Helper classes (for training) initialization
66 | optimizer = optim.SGD([{"params": model.parameters(), "lr": args.lr,
67 | "momentum": 0.9, "weight_decay": args.lr/10, "nesterov": True}])
68 | Evaluator.initialize()
69 | if args.local_rank == 0:
70 | Logger.initialize(args, training=True)
71 | Logger.info('# available GPUs: %d' % torch.cuda.device_count())
72 |
73 | # Dataset initialization
74 | FSSDataset.initialize(img_size=384, datapath=args.datapath, use_original_imgsize=False)
75 | dataloader_trn = FSSDataset.build_dataloader(args.benchmark, args.bsz, args.nworker, args.fold, 'trn')
76 | if args.local_rank == 0:
77 | dataloader_val = FSSDataset.build_dataloader(args.benchmark, args.bsz, args.nworker, args.fold, 'val')
78 |
79 | # Train
80 | best_val_miou = float('-inf')
81 | best_val_loss = float('inf')
82 | for epoch in range(args.nepoch):
83 | dataloader_trn.sampler.set_epoch(epoch)
84 | trn_loss, trn_miou, trn_fb_iou = train(epoch, model, dataloader_trn, optimizer, training=True)
85 |
86 | # evaluation
87 | if args.local_rank == 0:
88 | with torch.no_grad():
89 | val_loss, val_miou, val_fb_iou = train(epoch, model, dataloader_val, optimizer, training=False)
90 |
91 | # Save the best model
92 | if val_miou > best_val_miou:
93 | best_val_miou = val_miou
94 | Logger.save_model_miou(model, epoch, val_miou)
95 |
96 | Logger.tbd_writer.add_scalars('data/loss', {'trn_loss': trn_loss, 'val_loss': val_loss}, epoch)
97 | Logger.tbd_writer.add_scalars('data/miou', {'trn_miou': trn_miou, 'val_miou': val_miou}, epoch)
98 | Logger.tbd_writer.add_scalars('data/fb_iou', {'trn_fb_iou': trn_fb_iou, 'val_fb_iou': val_fb_iou}, epoch)
99 | Logger.tbd_writer.flush()
100 |
101 | if args.local_rank == 0:
102 | Logger.tbd_writer.close()
103 | Logger.info('==================== Finished Training ====================')
104 |
--------------------------------------------------------------------------------
/common/vis.py:
--------------------------------------------------------------------------------
1 | r""" Visualize model predictions """
2 | import os
3 |
4 | from PIL import Image
5 | import numpy as np
6 | import torchvision.transforms as transforms
7 |
8 | from . import utils
9 |
10 |
11 | class Visualizer:
12 |
13 | @classmethod
14 | def initialize(cls, visualize, vispath='./vis/'):
15 | cls.visualize = visualize
16 | if not visualize:
17 | return
18 |
19 | cls.colors = {'red': (255, 50, 50), 'blue': (102, 140, 255)}
20 | for key, value in cls.colors.items():
21 | cls.colors[key] = tuple([c / 255 for c in cls.colors[key]])
22 |
23 | cls.mean_img = [0.485, 0.456, 0.406]
24 | cls.std_img = [0.229, 0.224, 0.225]
25 | cls.to_pil = transforms.ToPILImage()
26 | cls.vis_path = vispath
27 | if not os.path.exists(cls.vis_path): os.makedirs(cls.vis_path)
28 |
29 | @classmethod
30 | def visualize_prediction_batch(cls, spt_img_b, spt_mask_b, qry_img_b, qry_mask_b, pred_mask_b, cls_id_b, batch_idx, iou_b=None):
31 | spt_img_b = utils.to_cpu(spt_img_b)
32 | spt_mask_b = utils.to_cpu(spt_mask_b)
33 | qry_img_b = utils.to_cpu(qry_img_b)
34 | qry_mask_b = utils.to_cpu(qry_mask_b)
35 | pred_mask_b = utils.to_cpu(pred_mask_b)
36 | cls_id_b = utils.to_cpu(cls_id_b)
37 |
38 | for sample_idx, (spt_img, spt_mask, qry_img, qry_mask, pred_mask, cls_id) in \
39 | enumerate(zip(spt_img_b, spt_mask_b, qry_img_b, qry_mask_b, pred_mask_b, cls_id_b)):
40 | iou = iou_b[sample_idx] if iou_b is not None else None
41 | cls.visualize_prediction(spt_img, spt_mask, qry_img, qry_mask, pred_mask, cls_id, batch_idx, sample_idx, True, iou)
42 |
43 | @classmethod
44 | def to_numpy(cls, tensor, type):
45 | if type == 'img':
46 | return np.array(cls.to_pil(cls.unnormalize(tensor))).astype(np.uint8)
47 | elif type == 'mask':
48 | return np.array(tensor).astype(np.uint8)
49 | else:
50 | raise Exception('Undefined tensor type: %s' % type)
51 |
52 | @classmethod
53 | def visualize_prediction(cls, spt_imgs, spt_masks, qry_img, qry_mask, pred_mask, cls_id, batch_idx, sample_idx, label, iou=None):
54 |
55 | spt_color = cls.colors['blue']
56 | qry_color = cls.colors['red']
57 | pred_color = cls.colors['red']
58 |
59 | spt_imgs = [cls.to_numpy(spt_img, 'img') for spt_img in spt_imgs]
60 | spt_pils = [cls.to_pil(spt_img) for spt_img in spt_imgs]
61 | spt_masks = [cls.to_numpy(spt_mask, 'mask') for spt_mask in spt_masks]
62 | spt_masked_pils = [Image.fromarray(cls.apply_mask(spt_img, spt_mask, spt_color)) for spt_img, spt_mask in zip(spt_imgs, spt_masks)]
63 |
64 | qry_img = cls.to_numpy(qry_img, 'img')
65 | qry_pil = cls.to_pil(qry_img)
66 | qry_mask = cls.to_numpy(qry_mask, 'mask')
67 | pred_mask = cls.to_numpy(pred_mask, 'mask')
68 | pred_masked_pil = Image.fromarray(cls.apply_mask(qry_img.astype(np.uint8), pred_mask.astype(np.uint8), pred_color))
69 | qry_masked_pil = Image.fromarray(cls.apply_mask(qry_img.astype(np.uint8), qry_mask.astype(np.uint8), qry_color))
70 |
71 | merged_pil = cls.merge_image_pair(spt_masked_pils + [pred_masked_pil, qry_masked_pil])
72 |
73 | iou = iou.item() if iou else 0.0
74 | merged_pil.save(cls.vis_path + '%d_%d_class-%d_iou-%.2f' % (batch_idx, sample_idx, cls_id, iou) + '.jpg')
75 |
76 | @classmethod
77 | def merge_image_pair(cls, pil_imgs):
78 | r""" Horizontally aligns a pair of pytorch tensor images (3, H, W) and returns PIL object """
79 |
80 | canvas_width = sum([pil.size[0] for pil in pil_imgs])
81 | canvas_height = max([pil.size[1] for pil in pil_imgs])
82 | canvas = Image.new('RGB', (canvas_width, canvas_height))
83 |
84 | xpos = 0
85 | for pil in pil_imgs:
86 | canvas.paste(pil, (xpos, 0))
87 | xpos += pil.size[0]
88 |
89 | return canvas
90 |
91 | @classmethod
92 | def apply_mask(cls, image, mask, color, alpha=0.5):
93 | r""" Apply mask to the given image. """
94 | for c in range(3):
95 | image[:, :, c] = np.where(mask == 1,
96 | image[:, :, c] *
97 | (1 - alpha) + alpha * color[c] * 255,
98 | image[:, :, c])
99 | return image
100 |
101 | @classmethod
102 | def unnormalize(cls, img):
103 | img = img.clone()
104 | for im_channel, mean, std in zip(img, cls.mean_img, cls.std_img):
105 | im_channel.mul_(std).add_(mean)
106 | return img
107 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Dense Cross-Query-and-Support Attention Weighted Mask Aggregation for Few-Shot Segmentation
2 | This is the official implementation of the ECCV'2022 paper "Dense Cross-Query-and-Support Attention Weighted Mask Aggregation for Few-Shot Segmentation".
3 |
4 |
5 |
6 |
7 |
8 | ## Requirements
9 |
10 | - Python 3.7
11 | - PyTorch 1.5.1
12 | - cuda 10.1
13 | - tensorboard 1.14
14 |
15 | Conda environment settings:
16 |
17 | ```bash
18 | conda create -n DCAMA python=3.7
19 | conda activate DCAMA
20 |
21 | conda install pytorch=1.5.1 torchvision cudatoolkit=10.1 -c pytorch
22 | conda install -c conda-forge tensorflow
23 | pip install tensorboardX
24 | ```
25 |
26 | ## Prepare Datasets
27 |
28 | Download COCO2014 train/val images and annotations:
29 |
30 | ```bash
31 | wget http://images.cocodataset.org/zips/train2014.zip
32 | wget http://images.cocodataset.org/zips/val2014.zip
33 | wget http://images.cocodataset.org/annotations/annotations_trainval2014.zip
34 | ```
35 |
36 | Download COCO2014 train/val annotations from Google Drive: [[train2014.zip](https://drive.google.com/file/d/1fcwqp0eQ_Ngf-8ZE73EsHKP8ZLfORdWR/view?usp=sharing)], [[val2014.zip](https://drive.google.com/file/d/16IJeYqt9oHbqnSI9m2nTXcxQWNXCfiGb/view?usp=sharing)].(and locate both train2014/ and val2014/ under annotations/ directory).
37 |
38 | Create a directory 'datasets' and appropriately place coco to have following directory structure:
39 |
40 | datasets/
41 | └── COCO2014/
42 | ├── annotations/
43 | │ ├── train2014/ # (dir.) training masks (from Google Drive)
44 | │ ├── val2014/ # (dir.) validation masks (from Google Drive)
45 | │ └── ..some json files..
46 | ├── train2014/
47 | └── val2014/
48 |
49 | ## Prepare backbones
50 |
51 | Downloading the following pre-trained backbones:
52 |
53 | > 1. [ResNet-50](https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a1h-35c100f8.pth) pretrained on ImageNet-1K by [TIMM](https://github.com/rwightman/pytorch-image-models)
54 | > 2. [ResNet-101](https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet101_a1h-36d3f2aa.pth) pretrained on ImageNet-1K by [TIMM](https://github.com/rwightman/pytorch-image-models)
55 | > 3. [Swin-B](https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window12_384_22kto1k.pth) pretrained on ImageNet by [Swin-Transformer](https://github.com/microsoft/Swin-Transformer)
56 |
57 | Create a directory 'backbones' to place the above backbones. The overall directory structure should be like this:
58 |
59 | ../ # parent directory
60 | ├── DCAMA/ # current (project) directory
61 | │ ├── common/ # (dir.) helper functions
62 | │ ├── data/ # (dir.) dataloaders and splits for each FSS dataset
63 | │ ├── model/ # (dir.) implementation of DCAMA
64 | │ ├── scripts/ # (dir.) Scripts for training and testing
65 | │ ├── README.md # intstruction for reproduction
66 | │ ├── train.py # code for training
67 | │ └── test.py # code for testing
68 | ├── datasets/ # (dir.) Few-Shot Segmentation Datasets
69 | └── backbones/ # (dir.) Pre-trained backbones
70 |
71 | ## Train and Test
72 | You can use our scripts to build your own. Training will take approx. 1.5 days until convergence (trained with four V100 GPUs). For more information, please refer to ./common/config.py
73 |
74 | > ```bash
75 | > sh ./scripts/train.sh
76 | > ```
77 | >
78 | > - For each experiment, a directory that logs training progress will be automatically generated under logs/ directory.
79 | > - From terminal, run 'tensorboard --logdir logs/' to monitor the training progress.
80 | > - Choose the best model when the validation (mIoU) curve starts to saturate.
81 |
82 | For testing, you have to prepare a pretrained model. You can train one by yourself or just download our [pretrained models](https://drive.google.com/drive/folders/1vEw35yKoWkuDgkrclJPNSXeDtsTOVy_c?usp=sharing).
83 | > ```bash
84 | > sh ./scripts/test.sh
85 | > ```
86 | >
87 |
88 | ## BibTeX
89 | If you are interested in our paper, please cite:
90 | ```
91 | @inproceedings{shi2022dense,
92 | title={Dense Cross-Query-and-Support Attention Weighted Mask Aggregation for Few-Shot Segmentation},
93 | author={Shi, Xinyu and Wei, Dong and Zhang, Yu and Lu, Donghuan and Ning, Munan and Chen, Jiashun and Ma, Kai and Zheng, Yefeng},
94 | booktitle={European Conference on Computer Vision},
95 | pages={151--168},
96 | year={2022},
97 | organization={Springer}
98 | }
99 | ```
100 |
--------------------------------------------------------------------------------
/common/logger.py:
--------------------------------------------------------------------------------
1 | r""" Logging during training/testing """
2 | import datetime
3 | import logging
4 | import os
5 |
6 | from tensorboardX import SummaryWriter
7 | import torch
8 |
9 |
10 | class AverageMeter:
11 | r""" Stores loss, evaluation results """
12 | def __init__(self, dataset):
13 | self.benchmark = dataset.benchmark
14 | self.class_ids_interest = dataset.class_ids
15 | self.class_ids_interest = torch.tensor(self.class_ids_interest).cuda()
16 |
17 | if self.benchmark == 'pascal':
18 | self.nclass = 20
19 | elif self.benchmark == 'coco':
20 | self.nclass = 80
21 | elif self.benchmark == 'fss':
22 | self.nclass = 1000
23 |
24 | self.intersection_buf = torch.zeros([2, self.nclass]).float().cuda()
25 | self.union_buf = torch.zeros([2, self.nclass]).float().cuda()
26 | self.ones = torch.ones_like(self.union_buf)
27 | self.loss_buf = []
28 |
29 | def update(self, inter_b, union_b, class_id, loss):
30 | self.intersection_buf.index_add_(1, class_id, inter_b.float())
31 | self.union_buf.index_add_(1, class_id, union_b.float())
32 | if loss is None:
33 | loss = torch.tensor(0.0)
34 | self.loss_buf.append(loss)
35 |
36 | def compute_iou(self):
37 | iou = self.intersection_buf.float() / \
38 | torch.max(torch.stack([self.union_buf, self.ones]), dim=0)[0]
39 | iou = iou.index_select(1, self.class_ids_interest)
40 | miou = iou[1].mean() * 100
41 |
42 | fb_iou = (self.intersection_buf.index_select(1, self.class_ids_interest).sum(dim=1) /
43 | self.union_buf.index_select(1, self.class_ids_interest).sum(dim=1)).mean() * 100
44 |
45 | return miou, fb_iou
46 |
47 | def write_result(self, split, epoch):
48 | iou, fb_iou = self.compute_iou()
49 |
50 | loss_buf = torch.stack(self.loss_buf)
51 | msg = '\n*** %s ' % split
52 | msg += '[@Epoch %02d] ' % epoch
53 | msg += 'Avg L: %6.5f ' % loss_buf.mean()
54 | msg += 'mIoU: %5.2f ' % iou
55 | msg += 'FB-IoU: %5.2f ' % fb_iou
56 |
57 | msg += '***\n'
58 | Logger.info(msg)
59 |
60 | def write_process(self, batch_idx, datalen, epoch, write_batch_idx=20):
61 | if batch_idx % write_batch_idx == 0:
62 | msg = '[Epoch: %02d] ' % epoch if epoch != -1 else ''
63 | msg += '[Batch: %04d/%04d] ' % (batch_idx+1, datalen)
64 | iou, fb_iou = self.compute_iou()
65 | if epoch != -1:
66 | loss_buf = torch.stack(self.loss_buf)
67 | msg += 'L: %6.5f ' % loss_buf[-1]
68 | msg += 'Avg L: %6.5f ' % loss_buf.mean()
69 | msg += 'mIoU: %5.2f | ' % iou
70 | msg += 'FB-IoU: %5.2f' % fb_iou
71 | Logger.info(msg)
72 |
73 |
74 | class Logger:
75 | r""" Writes evaluation results of training/testing """
76 | @classmethod
77 | def initialize(cls, args, training):
78 | logtime = datetime.datetime.now().__format__('_%m%d_%H%M%S')
79 | logpath = os.path.join(args.logpath, 'train/fold_' + str(args.fold) + logtime) if training \
80 | else os.path.join(args.logpath, 'test/fold_' + args.load.split('/')[-2].split('.')[0] + logtime)
81 | if logpath == '': logpath = logtime
82 |
83 | cls.logpath = logpath
84 | cls.benchmark = args.benchmark
85 | if not os.path.exists(cls.logpath): os.makedirs(cls.logpath)
86 |
87 | logging.basicConfig(filemode='w',
88 | filename=os.path.join(cls.logpath, 'log.txt'),
89 | level=logging.INFO,
90 | format='%(message)s',
91 | datefmt='%m-%d %H:%M:%S')
92 |
93 | # Console log config
94 | console = logging.StreamHandler()
95 | console.setLevel(logging.INFO)
96 | formatter = logging.Formatter('%(message)s')
97 | console.setFormatter(formatter)
98 | logging.getLogger('').addHandler(console)
99 |
100 | # Tensorboard writer
101 | cls.tbd_writer = SummaryWriter(os.path.join(cls.logpath, 'tbd/runs'))
102 |
103 | # Log arguments
104 | logging.info('\n:==================== Start =====================')
105 | for arg_key in args.__dict__:
106 | logging.info('| %20s: %-24s' % (arg_key, str(args.__dict__[arg_key])))
107 | logging.info(':================================================\n')
108 |
109 | @classmethod
110 | def info(cls, msg):
111 | r""" Writes log message to log.txt """
112 | logging.info(msg)
113 |
114 | @classmethod
115 | def save_model_miou(cls, model, epoch, val_miou):
116 | torch.save(model.state_dict(), os.path.join(cls.logpath, 'best_model.pt'))
117 | cls.info('Model saved @%d w/ val. mIoU: %5.2f.\n' % (epoch, val_miou))
118 |
--------------------------------------------------------------------------------
/data/fss.py:
--------------------------------------------------------------------------------
1 | r""" FSS-1000 few-shot semantic segmentation dataset """
2 | import os
3 | import glob
4 |
5 | from torch.utils.data import Dataset
6 | import torch.nn.functional as F
7 | import torch
8 | import PIL.Image as Image
9 | import numpy as np
10 |
11 |
12 | class DatasetFSS(Dataset):
13 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize):
14 | self.split = split
15 | self.benchmark = 'fss'
16 | self.shot = shot
17 |
18 | self.base_path = os.path.join(datapath, 'FSS-1000')
19 |
20 | # Given predefined test split, load randomly generated training/val splits:
21 | # (reference regarding trn/val/test splits: https://github.com/HKUSTCV/FSS-1000/issues/7))
22 | with open('./data/splits/fss/%s.txt' % split, 'r') as f:
23 | self.categories = f.read().split('\n')[:-1]
24 | self.categories = sorted(self.categories)
25 |
26 | self.class_ids = self.build_class_ids()
27 | self.img_metadata = self.build_img_metadata()
28 |
29 | self.transform = transform
30 |
31 | def __len__(self):
32 | return len(self.img_metadata)
33 |
34 | def __getitem__(self, idx):
35 | query_name, support_names, class_sample = self.sample_episode(idx)
36 | query_img, query_mask, support_imgs, support_masks = self.load_frame(query_name, support_names)
37 |
38 | query_img = self.transform(query_img)
39 | query_mask = F.interpolate(query_mask.unsqueeze(0).unsqueeze(0).float(), query_img.size()[-2:], mode='nearest').squeeze()
40 |
41 | support_imgs = torch.stack([self.transform(support_img) for support_img in support_imgs])
42 |
43 | support_masks_tmp = []
44 | for smask in support_masks:
45 | smask = F.interpolate(smask.unsqueeze(0).unsqueeze(0).float(), support_imgs.size()[-2:], mode='nearest').squeeze()
46 | support_masks_tmp.append(smask)
47 | support_masks = torch.stack(support_masks_tmp)
48 |
49 | batch = {'query_img': query_img,
50 | 'query_mask': query_mask,
51 | 'query_name': query_name,
52 |
53 | 'support_imgs': support_imgs,
54 | 'support_masks': support_masks,
55 | 'support_names': support_names,
56 |
57 | 'class_id': torch.tensor(class_sample)}
58 |
59 | return batch
60 |
61 | def load_frame(self, query_name, support_names):
62 | query_img = Image.open(query_name).convert('RGB')
63 | support_imgs = [Image.open(name).convert('RGB') for name in support_names]
64 |
65 | query_id = query_name.split('/')[-1].split('.')[0]
66 | query_name = os.path.join(os.path.dirname(query_name), query_id) + '.png'
67 | support_ids = [name.split('/')[-1].split('.')[0] for name in support_names]
68 | support_names = [os.path.join(os.path.dirname(name), sid) + '.png' for name, sid in zip(support_names, support_ids)]
69 |
70 | query_mask = self.read_mask(query_name)
71 | support_masks = [self.read_mask(name) for name in support_names]
72 |
73 | return query_img, query_mask, support_imgs, support_masks
74 |
75 | def read_mask(self, img_name):
76 | mask = torch.tensor(np.array(Image.open(img_name).convert('L')))
77 | mask[mask < 128] = 0
78 | mask[mask >= 128] = 1
79 | return mask
80 |
81 | def sample_episode(self, idx):
82 | query_name = self.img_metadata[idx]
83 | class_sample = self.categories.index(query_name.split('/')[-2])
84 | if self.split == 'val':
85 | class_sample += 520
86 | elif self.split == 'test':
87 | class_sample += 760
88 |
89 | support_names = []
90 | while True: # keep sampling support set if query == support
91 | support_name = np.random.choice(range(1, 11), 1, replace=False)[0]
92 | support_name = os.path.join(os.path.dirname(query_name), str(support_name)) + '.jpg'
93 | if query_name != support_name: support_names.append(support_name)
94 | if len(support_names) == self.shot: break
95 |
96 | return query_name, support_names, class_sample
97 |
98 | def build_class_ids(self):
99 | if self.split == 'trn':
100 | class_ids = range(0, 520)
101 | elif self.split == 'val':
102 | class_ids = range(520, 760)
103 | elif self.split == 'test':
104 | class_ids = range(760, 1000)
105 | return class_ids
106 |
107 | def build_img_metadata(self):
108 | img_metadata = []
109 | for cat in self.categories:
110 | img_paths = sorted([path for path in glob.glob('%s/*' % os.path.join(self.base_path, cat))])
111 | for img_path in img_paths:
112 | if os.path.basename(img_path).split('.')[1] == 'jpg':
113 | img_metadata.append(img_path)
114 | return img_metadata
115 |
--------------------------------------------------------------------------------
/data/coco.py:
--------------------------------------------------------------------------------
1 | r""" COCO-20i few-shot semantic segmentation dataset """
2 | import os
3 | import pickle
4 |
5 | from torch.utils.data import Dataset
6 | import torch.nn.functional as F
7 | import torch
8 | import PIL.Image as Image
9 | import numpy as np
10 |
11 |
12 | class DatasetCOCO(Dataset):
13 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize):
14 | self.split = 'val' if split in ['val', 'test'] else 'trn'
15 | self.fold = fold
16 | self.nfolds = 4
17 | self.nclass = 80
18 | self.benchmark = 'coco'
19 | self.shot = shot
20 | self.split_coco = split if split == 'val2014' else 'train2014'
21 | self.base_path = os.path.join(datapath, 'COCO2014')
22 | self.transform = transform
23 | self.use_original_imgsize = use_original_imgsize
24 |
25 | self.class_ids = self.build_class_ids()
26 | self.img_metadata_classwise = self.build_img_metadata_classwise()
27 | self.img_metadata = self.build_img_metadata()
28 |
29 | def __len__(self):
30 | return len(self.img_metadata) if self.split == 'trn' else 1000
31 |
32 | def __getitem__(self, idx):
33 | # ignores idx during training & testing and perform uniform sampling over object classes to form an episode
34 | # (due to the large size of the COCO dataset)
35 | query_img, query_mask, support_imgs, support_masks, query_name, support_names, class_sample, org_qry_imsize = self.load_frame()
36 |
37 | query_img = self.transform(query_img)
38 | query_mask = query_mask.float()
39 | if not self.use_original_imgsize:
40 | query_mask = F.interpolate(query_mask.unsqueeze(0).unsqueeze(0).float(), query_img.size()[-2:], mode='nearest').squeeze()
41 |
42 | support_imgs = torch.stack([self.transform(support_img) for support_img in support_imgs])
43 | for midx, smask in enumerate(support_masks):
44 | support_masks[midx] = F.interpolate(smask.unsqueeze(0).unsqueeze(0).float(), support_imgs.size()[-2:], mode='nearest').squeeze()
45 | support_masks = torch.stack(support_masks)
46 |
47 | batch = {'query_img': query_img,
48 | 'query_mask': query_mask,
49 | 'query_name': query_name,
50 |
51 | 'org_query_imsize': org_qry_imsize,
52 |
53 | 'support_imgs': support_imgs,
54 | 'support_masks': support_masks,
55 | 'support_names': support_names,
56 | 'class_id': torch.tensor(class_sample)}
57 |
58 | return batch
59 |
60 | def build_class_ids(self):
61 | nclass_trn = self.nclass // self.nfolds
62 | class_ids_val = [self.fold + self.nfolds * v for v in range(nclass_trn)]
63 | class_ids_trn = [x for x in range(self.nclass) if x not in class_ids_val]
64 | class_ids = class_ids_trn if self.split == 'trn' else class_ids_val
65 |
66 | return class_ids
67 |
68 | def build_img_metadata_classwise(self):
69 | with open('./data/splits/coco/%s/fold%d.pkl' % (self.split, self.fold), 'rb') as f:
70 | img_metadata_classwise = pickle.load(f)
71 | return img_metadata_classwise
72 |
73 | def build_img_metadata(self):
74 | img_metadata = []
75 | for k in self.img_metadata_classwise.keys():
76 | img_metadata += self.img_metadata_classwise[k]
77 | return sorted(list(set(img_metadata)))
78 |
79 | def read_mask(self, name):
80 | mask_path = os.path.join(self.base_path, 'annotations', name)
81 | mask = torch.tensor(np.array(Image.open(mask_path[:mask_path.index('.jpg')] + '.png')))
82 | return mask
83 |
84 | def load_frame(self):
85 | class_sample = np.random.choice(self.class_ids, 1, replace=False)[0]
86 | query_name = np.random.choice(self.img_metadata_classwise[class_sample], 1, replace=False)[0]
87 | query_img = Image.open(os.path.join(self.base_path, query_name)).convert('RGB')
88 | query_mask = self.read_mask(query_name)
89 |
90 | org_qry_imsize = query_img.size
91 |
92 | query_mask[query_mask != class_sample + 1] = 0
93 | query_mask[query_mask == class_sample + 1] = 1
94 |
95 | support_names = []
96 | while True: # keep sampling support set if query == support
97 | support_name = np.random.choice(self.img_metadata_classwise[class_sample], 1, replace=False)[0]
98 | if query_name != support_name: support_names.append(support_name)
99 | if len(support_names) == self.shot: break
100 |
101 | support_imgs = []
102 | support_masks = []
103 | for support_name in support_names:
104 | support_imgs.append(Image.open(os.path.join(self.base_path, support_name)).convert('RGB'))
105 | support_mask = self.read_mask(support_name)
106 | support_mask[support_mask != class_sample + 1] = 0
107 | support_mask[support_mask == class_sample + 1] = 1
108 | support_masks.append(support_mask)
109 |
110 | return query_img, query_mask, support_imgs, support_masks, query_name, support_names, class_sample, org_qry_imsize
111 |
112 |
--------------------------------------------------------------------------------
/data/pascal.py:
--------------------------------------------------------------------------------
1 | r""" PASCAL-5i few-shot semantic segmentation dataset """
2 | import os
3 |
4 | from torch.utils.data import Dataset
5 | import torch.nn.functional as F
6 | import torch
7 | import PIL.Image as Image
8 | import numpy as np
9 |
10 |
11 | class DatasetPASCAL(Dataset):
12 | def __init__(self, datapath, fold, transform, split, shot, use_original_imgsize):
13 | self.split = 'val' if split in ['val', 'test'] else 'trn'
14 | self.fold = fold
15 | self.nfolds = 4
16 | self.nclass = 20
17 | self.benchmark = 'pascal'
18 | self.shot = shot
19 | self.use_original_imgsize = use_original_imgsize
20 |
21 | self.img_path = os.path.join(datapath, 'VOC2012/JPEGImages/')
22 | self.ann_path = os.path.join(datapath, 'VOC2012/SegmentationClassAug/')
23 | self.transform = transform
24 |
25 | self.class_ids = self.build_class_ids()
26 | self.img_metadata = self.build_img_metadata()
27 | self.img_metadata_classwise = self.build_img_metadata_classwise()
28 |
29 | def __len__(self):
30 | return len(self.img_metadata) if self.split == 'trn' else 1000
31 |
32 | def __getitem__(self, idx):
33 | idx %= len(self.img_metadata) # for testing, as n_images < 1000
34 | query_name, support_names, class_sample = self.sample_episode(idx)
35 | query_img, query_cmask, support_imgs, support_cmasks, org_qry_imsize = self.load_frame(query_name, support_names)
36 |
37 | query_img = self.transform(query_img)
38 | if not self.use_original_imgsize:
39 | query_cmask = F.interpolate(query_cmask.unsqueeze(0).unsqueeze(0).float(), query_img.size()[-2:], mode='nearest').squeeze()
40 | query_mask, query_ignore_idx = self.extract_ignore_idx(query_cmask.float(), class_sample)
41 |
42 | support_imgs = torch.stack([self.transform(support_img) for support_img in support_imgs])
43 |
44 | support_masks = []
45 | support_ignore_idxs = []
46 | for scmask in support_cmasks:
47 | scmask = F.interpolate(scmask.unsqueeze(0).unsqueeze(0).float(), support_imgs.size()[-2:], mode='nearest').squeeze()
48 | support_mask, support_ignore_idx = self.extract_ignore_idx(scmask, class_sample)
49 | support_masks.append(support_mask)
50 | support_ignore_idxs.append(support_ignore_idx)
51 | support_masks = torch.stack(support_masks)
52 | support_ignore_idxs = torch.stack(support_ignore_idxs)
53 |
54 | batch = {'query_img': query_img,
55 | 'query_mask': query_mask,
56 | 'query_name': query_name,
57 | 'query_ignore_idx': query_ignore_idx,
58 |
59 | 'org_query_imsize': org_qry_imsize,
60 |
61 | 'support_imgs': support_imgs,
62 | 'support_masks': support_masks,
63 | 'support_names': support_names,
64 | 'support_ignore_idxs': support_ignore_idxs,
65 |
66 | 'class_id': torch.tensor(class_sample)}
67 |
68 | return batch
69 |
70 | def extract_ignore_idx(self, mask, class_id):
71 | boundary = (mask / 255).floor()
72 | mask[mask != class_id + 1] = 0
73 | mask[mask == class_id + 1] = 1
74 |
75 | return mask, boundary
76 |
77 | def load_frame(self, query_name, support_names):
78 | query_img = self.read_img(query_name)
79 | query_mask = self.read_mask(query_name)
80 | support_imgs = [self.read_img(name) for name in support_names]
81 | support_masks = [self.read_mask(name) for name in support_names]
82 |
83 | org_qry_imsize = query_img.size
84 |
85 | return query_img, query_mask, support_imgs, support_masks, org_qry_imsize
86 |
87 | def read_mask(self, img_name):
88 | r"""Return segmentation mask in PIL Image"""
89 | mask = torch.tensor(np.array(Image.open(os.path.join(self.ann_path, img_name) + '.png')))
90 | return mask
91 |
92 | def read_img(self, img_name):
93 | r"""Return RGB image in PIL Image"""
94 | return Image.open(os.path.join(self.img_path, img_name) + '.jpg')
95 |
96 | def sample_episode(self, idx):
97 | query_name, class_sample = self.img_metadata[idx]
98 |
99 | support_names = []
100 | while True: # keep sampling support set if query == support
101 | support_name = np.random.choice(self.img_metadata_classwise[class_sample], 1, replace=False)[0]
102 | if query_name != support_name: support_names.append(support_name)
103 | if len(support_names) == self.shot: break
104 |
105 | return query_name, support_names, class_sample
106 |
107 | def build_class_ids(self):
108 | nclass_trn = self.nclass // self.nfolds
109 | class_ids_val = [self.fold * nclass_trn + i for i in range(nclass_trn)]
110 | class_ids_trn = [x for x in range(self.nclass) if x not in class_ids_val]
111 |
112 | if self.split == 'trn':
113 | return class_ids_trn
114 | else:
115 | return class_ids_val
116 |
117 | def build_img_metadata(self):
118 |
119 | def read_metadata(split, fold_id):
120 | fold_n_metadata = os.path.join('data/splits/pascal/%s/fold%d.txt' % (split, fold_id))
121 | with open(fold_n_metadata, 'r') as f:
122 | fold_n_metadata = f.read().split('\n')[:-1]
123 | fold_n_metadata = [[data.split('__')[0], int(data.split('__')[1]) - 1] for data in fold_n_metadata]
124 | return fold_n_metadata
125 |
126 | img_metadata = []
127 | if self.split == 'trn': # For training, read image-metadata of "the other" folds
128 | for fold_id in range(self.nfolds):
129 | if fold_id == self.fold: # Skip validation fold
130 | continue
131 | img_metadata += read_metadata(self.split, fold_id)
132 | elif self.split == 'val': # For validation, read image-metadata of "current" fold
133 | img_metadata = read_metadata(self.split, self.fold)
134 | else:
135 | raise Exception('Undefined split %s: ' % self.split)
136 |
137 | print('Total (%s) images are : %d' % (self.split, len(img_metadata)))
138 |
139 | return img_metadata
140 |
141 | def build_img_metadata_classwise(self):
142 | img_metadata_classwise = {}
143 | for class_id in range(self.nclass):
144 | img_metadata_classwise[class_id] = []
145 |
146 | for img_name, img_class in self.img_metadata:
147 | img_metadata_classwise[img_class] += [img_name]
148 | return img_metadata_classwise
149 |
--------------------------------------------------------------------------------
/data/splits/pascal/val/fold0.txt:
--------------------------------------------------------------------------------
1 | 2007_000033__01
2 | 2007_000061__04
3 | 2007_000129__02
4 | 2007_000346__05
5 | 2007_000529__04
6 | 2007_000559__05
7 | 2007_000572__02
8 | 2007_000762__05
9 | 2007_001288__01
10 | 2007_001289__03
11 | 2007_001311__02
12 | 2007_001408__05
13 | 2007_001568__01
14 | 2007_001630__02
15 | 2007_001761__01
16 | 2007_001884__01
17 | 2007_002094__03
18 | 2007_002266__01
19 | 2007_002376__01
20 | 2007_002400__03
21 | 2007_002619__01
22 | 2007_002719__04
23 | 2007_003088__05
24 | 2007_003131__04
25 | 2007_003188__02
26 | 2007_003349__03
27 | 2007_003571__04
28 | 2007_003621__02
29 | 2007_003682__03
30 | 2007_003861__04
31 | 2007_004052__01
32 | 2007_004143__03
33 | 2007_004241__04
34 | 2007_004468__05
35 | 2007_005074__04
36 | 2007_005107__02
37 | 2007_005294__05
38 | 2007_005304__05
39 | 2007_005428__05
40 | 2007_005509__01
41 | 2007_005600__01
42 | 2007_005705__04
43 | 2007_005828__01
44 | 2007_006076__03
45 | 2007_006086__05
46 | 2007_006449__02
47 | 2007_006946__01
48 | 2007_007084__03
49 | 2007_007235__02
50 | 2007_007341__01
51 | 2007_007470__01
52 | 2007_007477__04
53 | 2007_007836__02
54 | 2007_008051__03
55 | 2007_008084__03
56 | 2007_008204__05
57 | 2007_008670__03
58 | 2007_009088__03
59 | 2007_009258__02
60 | 2007_009323__03
61 | 2007_009458__05
62 | 2007_009687__05
63 | 2007_009817__03
64 | 2007_009911__01
65 | 2008_000120__04
66 | 2008_000123__03
67 | 2008_000533__03
68 | 2008_000725__02
69 | 2008_000911__05
70 | 2008_001013__04
71 | 2008_001040__04
72 | 2008_001135__04
73 | 2008_001260__04
74 | 2008_001404__02
75 | 2008_001514__03
76 | 2008_001531__02
77 | 2008_001546__01
78 | 2008_001580__04
79 | 2008_001966__03
80 | 2008_001971__01
81 | 2008_002043__03
82 | 2008_002269__02
83 | 2008_002358__01
84 | 2008_002429__03
85 | 2008_002467__05
86 | 2008_002504__04
87 | 2008_002775__05
88 | 2008_002864__05
89 | 2008_003034__04
90 | 2008_003076__05
91 | 2008_003108__02
92 | 2008_003110__03
93 | 2008_003155__01
94 | 2008_003270__02
95 | 2008_003369__01
96 | 2008_003858__04
97 | 2008_003876__01
98 | 2008_003886__04
99 | 2008_003926__01
100 | 2008_003976__01
101 | 2008_004363__02
102 | 2008_004654__02
103 | 2008_004659__05
104 | 2008_004704__01
105 | 2008_004758__02
106 | 2008_004995__02
107 | 2008_005262__05
108 | 2008_005338__01
109 | 2008_005628__04
110 | 2008_005727__02
111 | 2008_005812__05
112 | 2008_005904__05
113 | 2008_006216__01
114 | 2008_006229__04
115 | 2008_006254__02
116 | 2008_006703__01
117 | 2008_007120__03
118 | 2008_007143__04
119 | 2008_007219__05
120 | 2008_007350__01
121 | 2008_007498__03
122 | 2008_007811__05
123 | 2008_007994__03
124 | 2008_008268__03
125 | 2008_008629__02
126 | 2008_008711__02
127 | 2008_008746__03
128 | 2009_000032__01
129 | 2009_000037__03
130 | 2009_000121__05
131 | 2009_000149__02
132 | 2009_000201__05
133 | 2009_000205__01
134 | 2009_000318__03
135 | 2009_000354__02
136 | 2009_000387__01
137 | 2009_000421__04
138 | 2009_000440__01
139 | 2009_000446__04
140 | 2009_000457__02
141 | 2009_000469__04
142 | 2009_000573__02
143 | 2009_000619__03
144 | 2009_000664__03
145 | 2009_000723__04
146 | 2009_000828__04
147 | 2009_000840__05
148 | 2009_000879__03
149 | 2009_000991__03
150 | 2009_000998__03
151 | 2009_001108__03
152 | 2009_001160__03
153 | 2009_001255__02
154 | 2009_001278__05
155 | 2009_001314__03
156 | 2009_001332__01
157 | 2009_001565__03
158 | 2009_001607__03
159 | 2009_001683__03
160 | 2009_001718__02
161 | 2009_001765__03
162 | 2009_001818__05
163 | 2009_001850__01
164 | 2009_001851__01
165 | 2009_001941__04
166 | 2009_002185__05
167 | 2009_002295__02
168 | 2009_002320__01
169 | 2009_002372__05
170 | 2009_002521__05
171 | 2009_002594__05
172 | 2009_002604__03
173 | 2009_002649__05
174 | 2009_002727__04
175 | 2009_002732__05
176 | 2009_002749__05
177 | 2009_002808__01
178 | 2009_002856__05
179 | 2009_002888__01
180 | 2009_002928__02
181 | 2009_003003__05
182 | 2009_003005__01
183 | 2009_003043__04
184 | 2009_003080__04
185 | 2009_003193__02
186 | 2009_003224__02
187 | 2009_003269__05
188 | 2009_003273__03
189 | 2009_003343__02
190 | 2009_003378__03
191 | 2009_003450__03
192 | 2009_003498__03
193 | 2009_003504__04
194 | 2009_003517__05
195 | 2009_003640__03
196 | 2009_003696__01
197 | 2009_003707__04
198 | 2009_003806__01
199 | 2009_003858__03
200 | 2009_003971__02
201 | 2009_004021__03
202 | 2009_004084__03
203 | 2009_004125__04
204 | 2009_004247__05
205 | 2009_004324__05
206 | 2009_004509__03
207 | 2009_004540__03
208 | 2009_004568__03
209 | 2009_004579__05
210 | 2009_004635__04
211 | 2009_004653__01
212 | 2009_004848__02
213 | 2009_004882__02
214 | 2009_004886__03
215 | 2009_004895__03
216 | 2009_004969__01
217 | 2009_005038__05
218 | 2009_005137__03
219 | 2009_005156__02
220 | 2009_005189__01
221 | 2009_005190__05
222 | 2009_005260__03
223 | 2009_005262__03
224 | 2009_005302__05
225 | 2010_000065__02
226 | 2010_000083__02
227 | 2010_000084__04
228 | 2010_000238__01
229 | 2010_000241__03
230 | 2010_000272__04
231 | 2010_000342__02
232 | 2010_000426__05
233 | 2010_000572__01
234 | 2010_000622__01
235 | 2010_000814__03
236 | 2010_000906__04
237 | 2010_000961__03
238 | 2010_001016__03
239 | 2010_001017__01
240 | 2010_001024__01
241 | 2010_001036__04
242 | 2010_001061__03
243 | 2010_001069__03
244 | 2010_001174__01
245 | 2010_001367__02
246 | 2010_001367__05
247 | 2010_001448__01
248 | 2010_001830__05
249 | 2010_001995__03
250 | 2010_002017__05
251 | 2010_002030__02
252 | 2010_002142__03
253 | 2010_002147__01
254 | 2010_002150__04
255 | 2010_002200__01
256 | 2010_002310__01
257 | 2010_002536__02
258 | 2010_002546__04
259 | 2010_002693__02
260 | 2010_002939__01
261 | 2010_003127__01
262 | 2010_003132__01
263 | 2010_003168__03
264 | 2010_003362__03
265 | 2010_003365__01
266 | 2010_003418__03
267 | 2010_003468__05
268 | 2010_003473__03
269 | 2010_003495__01
270 | 2010_003547__04
271 | 2010_003716__01
272 | 2010_003771__03
273 | 2010_003781__05
274 | 2010_003820__03
275 | 2010_003912__02
276 | 2010_003915__01
277 | 2010_004041__04
278 | 2010_004056__05
279 | 2010_004208__04
280 | 2010_004314__01
281 | 2010_004419__01
282 | 2010_004520__05
283 | 2010_004529__05
284 | 2010_004551__05
285 | 2010_004556__03
286 | 2010_004559__03
287 | 2010_004662__04
288 | 2010_004772__04
289 | 2010_004828__05
290 | 2010_004994__03
291 | 2010_005252__04
292 | 2010_005401__04
293 | 2010_005428__03
294 | 2010_005496__05
295 | 2010_005531__03
296 | 2010_005534__01
297 | 2010_005582__05
298 | 2010_005664__02
299 | 2010_005705__04
300 | 2010_005718__01
301 | 2010_005762__05
302 | 2010_005877__01
303 | 2010_005888__01
304 | 2010_006034__01
305 | 2010_006070__02
306 | 2011_000066__05
307 | 2011_000112__03
308 | 2011_000185__03
309 | 2011_000234__04
310 | 2011_000238__04
311 | 2011_000412__02
312 | 2011_000435__04
313 | 2011_000456__03
314 | 2011_000482__03
315 | 2011_000585__02
316 | 2011_000669__03
317 | 2011_000747__05
318 | 2011_000874__01
319 | 2011_001114__01
320 | 2011_001161__04
321 | 2011_001263__01
322 | 2011_001287__03
323 | 2011_001407__01
324 | 2011_001421__03
325 | 2011_001434__01
326 | 2011_001589__04
327 | 2011_001624__01
328 | 2011_001793__04
329 | 2011_001880__01
330 | 2011_001988__02
331 | 2011_002064__02
332 | 2011_002098__05
333 | 2011_002223__02
334 | 2011_002295__03
335 | 2011_002327__01
336 | 2011_002515__01
337 | 2011_002675__01
338 | 2011_002713__02
339 | 2011_002754__04
340 | 2011_002863__05
341 | 2011_002929__01
342 | 2011_002975__04
343 | 2011_003003__02
344 | 2011_003030__03
345 | 2011_003145__03
346 | 2011_003271__05
347 |
--------------------------------------------------------------------------------
/data/splits/pascal/val/fold3.txt:
--------------------------------------------------------------------------------
1 | 2007_000042__19
2 | 2007_000123__19
3 | 2007_000175__17
4 | 2007_000187__20
5 | 2007_000452__18
6 | 2007_000559__20
7 | 2007_000629__19
8 | 2007_000636__19
9 | 2007_000661__18
10 | 2007_000676__17
11 | 2007_000804__18
12 | 2007_000925__17
13 | 2007_001154__18
14 | 2007_001175__20
15 | 2007_001408__16
16 | 2007_001430__16
17 | 2007_001430__20
18 | 2007_001457__18
19 | 2007_001458__18
20 | 2007_001585__18
21 | 2007_001594__17
22 | 2007_001678__20
23 | 2007_001717__20
24 | 2007_001733__17
25 | 2007_001763__18
26 | 2007_001763__20
27 | 2007_002119__20
28 | 2007_002132__20
29 | 2007_002268__18
30 | 2007_002284__16
31 | 2007_002378__16
32 | 2007_002426__18
33 | 2007_002427__18
34 | 2007_002565__19
35 | 2007_002618__17
36 | 2007_002648__17
37 | 2007_002728__19
38 | 2007_003011__18
39 | 2007_003011__20
40 | 2007_003169__18
41 | 2007_003367__16
42 | 2007_003499__19
43 | 2007_003506__16
44 | 2007_003530__18
45 | 2007_003587__19
46 | 2007_003714__17
47 | 2007_003848__19
48 | 2007_003957__19
49 | 2007_004190__20
50 | 2007_004193__20
51 | 2007_004275__16
52 | 2007_004281__19
53 | 2007_004483__19
54 | 2007_004510__20
55 | 2007_004558__16
56 | 2007_004649__19
57 | 2007_004712__16
58 | 2007_004969__17
59 | 2007_005469__17
60 | 2007_005626__19
61 | 2007_005689__19
62 | 2007_005813__16
63 | 2007_005857__16
64 | 2007_005915__17
65 | 2007_006171__18
66 | 2007_006348__20
67 | 2007_006373__18
68 | 2007_006678__17
69 | 2007_006680__19
70 | 2007_006802__19
71 | 2007_007130__20
72 | 2007_007165__17
73 | 2007_007168__19
74 | 2007_007195__19
75 | 2007_007196__20
76 | 2007_007203__20
77 | 2007_007417__18
78 | 2007_007534__17
79 | 2007_007624__16
80 | 2007_007795__16
81 | 2007_007881__19
82 | 2007_007996__18
83 | 2007_008204__20
84 | 2007_008260__18
85 | 2007_008339__19
86 | 2007_008374__20
87 | 2007_008543__18
88 | 2007_008547__16
89 | 2007_009068__18
90 | 2007_009252__18
91 | 2007_009320__17
92 | 2007_009419__16
93 | 2007_009446__20
94 | 2007_009521__18
95 | 2007_009521__20
96 | 2007_009592__18
97 | 2007_009655__18
98 | 2007_009684__18
99 | 2007_009750__16
100 | 2008_000016__20
101 | 2008_000149__18
102 | 2008_000270__18
103 | 2008_000391__16
104 | 2008_000589__18
105 | 2008_000657__19
106 | 2008_001078__16
107 | 2008_001283__16
108 | 2008_001688__16
109 | 2008_001688__20
110 | 2008_001966__16
111 | 2008_002273__16
112 | 2008_002379__16
113 | 2008_002464__20
114 | 2008_002536__17
115 | 2008_002680__20
116 | 2008_002900__19
117 | 2008_002929__18
118 | 2008_003003__20
119 | 2008_003026__20
120 | 2008_003105__19
121 | 2008_003135__16
122 | 2008_003676__16
123 | 2008_003709__18
124 | 2008_003733__18
125 | 2008_003885__20
126 | 2008_004172__18
127 | 2008_004212__19
128 | 2008_004279__20
129 | 2008_004367__19
130 | 2008_004453__17
131 | 2008_004477__16
132 | 2008_004562__18
133 | 2008_004610__19
134 | 2008_004621__17
135 | 2008_004754__20
136 | 2008_004854__17
137 | 2008_004910__20
138 | 2008_005089__20
139 | 2008_005217__16
140 | 2008_005242__16
141 | 2008_005254__20
142 | 2008_005439__20
143 | 2008_005445__20
144 | 2008_005544__19
145 | 2008_005633__17
146 | 2008_005680__16
147 | 2008_006055__19
148 | 2008_006159__20
149 | 2008_006327__17
150 | 2008_006523__19
151 | 2008_006553__19
152 | 2008_006752__19
153 | 2008_006784__18
154 | 2008_006835__17
155 | 2008_007497__17
156 | 2008_007527__20
157 | 2008_007677__17
158 | 2008_007814__17
159 | 2008_007828__20
160 | 2008_008103__18
161 | 2008_008221__19
162 | 2008_008434__16
163 | 2009_000022__19
164 | 2009_000039__17
165 | 2009_000087__18
166 | 2009_000096__18
167 | 2009_000136__20
168 | 2009_000242__18
169 | 2009_000391__20
170 | 2009_000418__16
171 | 2009_000418__18
172 | 2009_000487__18
173 | 2009_000488__16
174 | 2009_000488__20
175 | 2009_000628__19
176 | 2009_000675__17
177 | 2009_000704__20
178 | 2009_000712__19
179 | 2009_000732__18
180 | 2009_000845__19
181 | 2009_000924__17
182 | 2009_001300__19
183 | 2009_001333__19
184 | 2009_001363__20
185 | 2009_001505__17
186 | 2009_001644__16
187 | 2009_001644__18
188 | 2009_001644__20
189 | 2009_001684__16
190 | 2009_001731__18
191 | 2009_001768__17
192 | 2009_001775__16
193 | 2009_001775__18
194 | 2009_001991__17
195 | 2009_002082__17
196 | 2009_002094__20
197 | 2009_002202__19
198 | 2009_002265__19
199 | 2009_002291__19
200 | 2009_002346__18
201 | 2009_002366__20
202 | 2009_002390__18
203 | 2009_002487__16
204 | 2009_002562__20
205 | 2009_002568__19
206 | 2009_002571__16
207 | 2009_002571__18
208 | 2009_002573__20
209 | 2009_002584__16
210 | 2009_002638__19
211 | 2009_002732__18
212 | 2009_002887__19
213 | 2009_002982__19
214 | 2009_003105__19
215 | 2009_003123__18
216 | 2009_003299__19
217 | 2009_003311__19
218 | 2009_003433__19
219 | 2009_003523__20
220 | 2009_003551__20
221 | 2009_003564__16
222 | 2009_003564__18
223 | 2009_003607__18
224 | 2009_003666__17
225 | 2009_003857__20
226 | 2009_003895__18
227 | 2009_003895__20
228 | 2009_003938__19
229 | 2009_004099__18
230 | 2009_004140__18
231 | 2009_004255__19
232 | 2009_004298__18
233 | 2009_004687__18
234 | 2009_004730__19
235 | 2009_004799__19
236 | 2009_004993__18
237 | 2009_004993__20
238 | 2009_005148__19
239 | 2009_005220__19
240 | 2010_000256__18
241 | 2010_000284__18
242 | 2010_000309__17
243 | 2010_000318__20
244 | 2010_000330__16
245 | 2010_000639__16
246 | 2010_000738__20
247 | 2010_000764__19
248 | 2010_001011__17
249 | 2010_001079__17
250 | 2010_001104__19
251 | 2010_001149__18
252 | 2010_001151__19
253 | 2010_001246__16
254 | 2010_001256__17
255 | 2010_001327__18
256 | 2010_001367__20
257 | 2010_001522__17
258 | 2010_001557__17
259 | 2010_001577__17
260 | 2010_001699__16
261 | 2010_001734__19
262 | 2010_001752__20
263 | 2010_001767__18
264 | 2010_001773__16
265 | 2010_001851__16
266 | 2010_001951__19
267 | 2010_001962__18
268 | 2010_002106__17
269 | 2010_002137__16
270 | 2010_002137__18
271 | 2010_002232__17
272 | 2010_002531__18
273 | 2010_002682__19
274 | 2010_002921__20
275 | 2010_003014__18
276 | 2010_003123__16
277 | 2010_003302__16
278 | 2010_003514__19
279 | 2010_003541__17
280 | 2010_003597__18
281 | 2010_003781__16
282 | 2010_003956__19
283 | 2010_004149__19
284 | 2010_004226__17
285 | 2010_004382__16
286 | 2010_004479__20
287 | 2010_004757__16
288 | 2010_004757__18
289 | 2010_004783__18
290 | 2010_004825__16
291 | 2010_004857__20
292 | 2010_004951__19
293 | 2010_004980__19
294 | 2010_005180__18
295 | 2010_005187__16
296 | 2010_005305__20
297 | 2010_005606__18
298 | 2010_005706__19
299 | 2010_005719__17
300 | 2010_005727__19
301 | 2010_005788__17
302 | 2010_005860__16
303 | 2010_005871__19
304 | 2010_005991__18
305 | 2010_006054__19
306 | 2011_000070__18
307 | 2011_000173__18
308 | 2011_000283__19
309 | 2011_000291__19
310 | 2011_000310__18
311 | 2011_000436__17
312 | 2011_000521__19
313 | 2011_000747__16
314 | 2011_001005__18
315 | 2011_001060__19
316 | 2011_001281__19
317 | 2011_001350__17
318 | 2011_001567__18
319 | 2011_001601__18
320 | 2011_001614__19
321 | 2011_001674__18
322 | 2011_001713__16
323 | 2011_001713__18
324 | 2011_001726__20
325 | 2011_001794__18
326 | 2011_001862__18
327 | 2011_001863__16
328 | 2011_001910__20
329 | 2011_002124__18
330 | 2011_002156__20
331 | 2011_002178__17
332 | 2011_002247__19
333 | 2011_002379__19
334 | 2011_002391__18
335 | 2011_002532__20
336 | 2011_002535__19
337 | 2011_002644__18
338 | 2011_002644__20
339 | 2011_002879__18
340 | 2011_002879__20
341 | 2011_003103__16
342 | 2011_003103__18
343 | 2011_003146__19
344 | 2011_003182__18
345 | 2011_003197__19
346 | 2011_003256__18
347 |
--------------------------------------------------------------------------------
/data/splits/fss/trn.txt:
--------------------------------------------------------------------------------
1 | fountain
2 | taxi
3 | assult_rifle
4 | radio
5 | comb
6 | box_turtle
7 | igloo
8 | head_cabbage
9 | cottontail
10 | coho
11 | ashtray
12 | joystick
13 | sleeping_bag
14 | jackfruit
15 | trailer_truck
16 | shower_cap
17 | ibex
18 | kinguin
19 | squirrel
20 | ac_wall
21 | sidewinder
22 | remote_control
23 | marshmallow
24 | bolotie
25 | polar_bear
26 | rock_beauty
27 | tokyo_tower
28 | wafer
29 | red_bayberry
30 | electronic_toothbrush
31 | hartebeest
32 | cassette
33 | oil_filter
34 | bomb
35 | walnut
36 | toilet_tissue
37 | memory_stick
38 | wild_boar
39 | cableways
40 | chihuahua
41 | envelope
42 | bison
43 | poker
44 | pubg_lvl3helmet
45 | indian_cobra
46 | staffordshire
47 | park_bench
48 | wombat
49 | black_grouse
50 | submarine
51 | washer
52 | agama
53 | coyote
54 | feeder
55 | sarong
56 | buckingham_palace
57 | frog
58 | steam_locomotive
59 | acorn
60 | german_pointer
61 | obelisk
62 | polecat
63 | black_swan
64 | butterfly
65 | mountain_tent
66 | gorilla
67 | sloth_bear
68 | aubergine
69 | stinkhorn
70 | stole
71 | owl
72 | mooli
73 | pool_table
74 | collar
75 | lhasa_apso
76 | ambulance
77 | spade
78 | pufferfish
79 | paint_brush
80 | lark
81 | golf_ball
82 | hock
83 | fork
84 | drake
85 | bee_house
86 | mooncake
87 | wok
88 | cocacola
89 | water_bike
90 | ladder
91 | psp
92 | bassoon
93 | bear
94 | border_terrier
95 | petri_dish
96 | pill_bottle
97 | aircraft_carrier
98 | panther
99 | canoe
100 | baseball_player
101 | turtle
102 | espresso
103 | throne
104 | cornet
105 | coucal
106 | eletrical_switch
107 | bra
108 | snail
109 | backpack
110 | jacamar
111 | scroll_brush
112 | gliding_lizard
113 | raft
114 | pinwheel
115 | grasshopper
116 | green_mamba
117 | eft_newt
118 | computer_mouse
119 | vine_snake
120 | recreational_vehicle
121 | llama
122 | meerkat
123 | chainsaw
124 | ferret
125 | garbage_can
126 | kangaroo
127 | litchi
128 | carbonara
129 | housefinch
130 | modem
131 | tebby_cat
132 | thatch
133 | face_powder
134 | tomb
135 | apple
136 | ladybug
137 | killer_whale
138 | rocket
139 | airship
140 | surfboard
141 | lesser_panda
142 | jordan_logo
143 | banana
144 | nail_scissor
145 | swab
146 | perfume
147 | punching_bag
148 | victor_icon
149 | waffle_iron
150 | trimaran
151 | garlic
152 | flute
153 | langur
154 | starfish
155 | parallel_bars
156 | dandie_dinmont
157 | cosmetic_brush
158 | screwdriver
159 | brick_card
160 | balance_weight
161 | hornet
162 | carton
163 | toothpaste
164 | bracelet
165 | egg_tart
166 | pencil_sharpener2
167 | swimming_glasses
168 | howler_monkey
169 | camel
170 | dragonfly
171 | lionfish
172 | convertible
173 | mule
174 | usb
175 | conch
176 | papaya
177 | garbage_truck
178 | dingo
179 | radiator
180 | solar_dish
181 | streetcar
182 | trilobite
183 | bouzouki
184 | ringlet_butterfly
185 | space_shuttle
186 | waffle
187 | american_staffordshire
188 | violin
189 | flowerpot
190 | forklift
191 | manx
192 | sundial
193 | snowmobile
194 | chickadee_bird
195 | ruffed_grouse
196 | brick_tea
197 | paddle
198 | stove
199 | carousel
200 | spatula
201 | beaker
202 | gas_pump
203 | lawn_mower
204 | speaker
205 | tank
206 | tresher
207 | kappa_logo
208 | hare
209 | tennis_racket
210 | shopping_cart
211 | thimble
212 | tractor
213 | anemone_fish
214 | trolleybus
215 | steak
216 | capuchin
217 | red_breasted_merganser
218 | golden_retriever
219 | light_tube
220 | flatworm
221 | melon_seed
222 | digital_watch
223 | jacko_lantern
224 | brown_bear
225 | cairn
226 | mushroom
227 | chalk
228 | skull
229 | stapler
230 | potato
231 | telescope
232 | proboscis
233 | microphone
234 | torii
235 | baseball_bat
236 | dhole
237 | excavator
238 | fig
239 | snake
240 | bradypod
241 | pepitas
242 | prairie_chicken
243 | scorpion
244 | shotgun
245 | bottle_cap
246 | file_cabinet
247 | grey_whale
248 | one-armed_bandit
249 | banded_gecko
250 | flying_disc
251 | croissant
252 | toothbrush
253 | miniskirt
254 | pokermon_ball
255 | gazelle
256 | grey_fox
257 | esport_chair
258 | necklace
259 | ptarmigan
260 | watermelon
261 | besom
262 | pomelo
263 | radio_telescope
264 | studio_couch
265 | black_stork
266 | vestment
267 | koala
268 | brambling
269 | muscle_car
270 | window_shade
271 | space_heater
272 | sunglasses
273 | motor_scooter
274 | ladyfinger
275 | pencil_box
276 | titi_monkey
277 | chicken_wings
278 | mount_fuji
279 | giant_panda
280 | dart
281 | fire_engine
282 | running_shoe
283 | dumbbell
284 | donkey
285 | loafer
286 | hard_disk
287 | globe
288 | lifeboat
289 | medical_kit
290 | brain_coral
291 | paper_towel
292 | dugong
293 | seatbelt
294 | skunk
295 | military_vest
296 | cocktail_shaker
297 | zucchini
298 | quad_drone
299 | ocicat
300 | shih-tzu
301 | teapot
302 | tile_roof
303 | cheese_burger
304 | handshower
305 | red_wolf
306 | stop_sign
307 | mouse
308 | battery
309 | adidas_logo2
310 | earplug
311 | hummingbird
312 | brush_pen
313 | pistachio
314 | hamster
315 | air_strip
316 | indian_elephant
317 | otter
318 | cucumber
319 | scabbard
320 | hawthorn
321 | bullet_train
322 | leopard
323 | whale
324 | cream
325 | chinese_date
326 | jellyfish
327 | lobster
328 | skua
329 | single_log
330 | chicory
331 | bagel
332 | beacon
333 | pingpong_racket
334 | spoon
335 | yurt
336 | wallaby
337 | egret
338 | christmas_stocking
339 | mcdonald_uncle
340 | wrench
341 | spark_plug
342 | triceratops
343 | wall_clock
344 | jinrikisha
345 | pickup
346 | rhinoceros
347 | swimming_trunk
348 | band-aid
349 | spotted_salamander
350 | leeks
351 | marmot
352 | warthog
353 | cello
354 | stool
355 | chest
356 | toilet_plunger
357 | wardrobe
358 | cannon
359 | adidas_logo1
360 | drumstick
361 | lady_slipper
362 | puma_logo
363 | great_wall
364 | white_shark
365 | witch_hat
366 | vending_machine
367 | wreck
368 | chopsticks
369 | garfish
370 | african_elephant
371 | children_slide
372 | hornbill
373 | zebra
374 | boa_constrictor
375 | armour
376 | pineapple
377 | angora
378 | brick
379 | car_wheel
380 | wallet
381 | boston_bull
382 | hyena
383 | lynx
384 | crash_helmet
385 | terrapin_turtle
386 | persian_cat
387 | shift_gear
388 | cactus_ball
389 | fur_coat
390 | plate
391 | pen
392 | okra
393 | mario
394 | airedale
395 | cowboy_hat
396 | celery
397 | macaque
398 | candle
399 | goose
400 | raccoon
401 | brasscica
402 | almond
403 | maotai_bottle
404 | soccer_ball
405 | sports_car
406 | tobacco_pipe
407 | water_polo
408 | eggnog
409 | hook
410 | ostrich
411 | patas
412 | table_lamp
413 | teddy
414 | mongoose
415 | spoonbill
416 | redheart
417 | crane
418 | dinosaur
419 | kitchen_knife
420 | seal
421 | baboon
422 | golfcart
423 | roller_coaster
424 | avocado
425 | birdhouse
426 | yorkshire_terrier
427 | saluki
428 | basketball
429 | buckler
430 | harvester
431 | afghan_hound
432 | beam_bridge
433 | guinea_pig
434 | lorikeet
435 | shakuhachi
436 | motarboard
437 | statue_liberty
438 | police_car
439 | sulphur_crested
440 | gourd
441 | sombrero
442 | mailbox
443 | adhensive_tape
444 | night_snake
445 | bushtit
446 | mouthpiece
447 | beaver
448 | bathtub
449 | printer
450 | cumquat
451 | orange
452 | cleaver
453 | quill_pen
454 | panpipe
455 | diamond
456 | gypsy_moth
457 | cauliflower
458 | lampshade
459 | cougar
460 | traffic_light
461 | briefcase
462 | ballpoint
463 | african_grey
464 | kremlin
465 | barometer
466 | peacock
467 | paper_crane
468 | sunscreen
469 | tofu
470 | bedlington_terrier
471 | snowball
472 | carrot
473 | tiger
474 | mink
475 | cristo_redentor
476 | ladle
477 | keyboard
478 | maraca
479 | monitor
480 | water_snake
481 | can_opener
482 | mud_turtle
483 | bald_eagle
484 | carp
485 | cn_tower
486 | egyptian_cat
487 | hen_of_the_woods
488 | measuring_cup
489 | roller_skate
490 | kite
491 | sandwich_cookies
492 | sandwich
493 | persimmon
494 | chess_bishop
495 | coffin
496 | ruddy_turnstone
497 | prayer_rug
498 | rain_barrel
499 | neck_brace
500 | nematode
501 | rosehip
502 | dutch_oven
503 | goldfish
504 | blossom_card
505 | dough
506 | trench_coat
507 | sponge
508 | stupa
509 | wash_basin
510 | electric_fan
511 | spring_scroll
512 | potted_plant
513 | sparrow
514 | car_mirror
515 | gecko
516 | diaper
517 | leatherback_turtle
518 | strainer
519 | guacamole
520 | microwave
521 |
--------------------------------------------------------------------------------
/data/splits/pascal/val/fold1.txt:
--------------------------------------------------------------------------------
1 | 2007_000452__09
2 | 2007_000464__10
3 | 2007_000491__10
4 | 2007_000663__06
5 | 2007_000663__07
6 | 2007_000727__06
7 | 2007_000727__07
8 | 2007_000804__09
9 | 2007_000830__09
10 | 2007_001299__10
11 | 2007_001321__07
12 | 2007_001457__09
13 | 2007_001677__09
14 | 2007_001717__09
15 | 2007_001763__08
16 | 2007_001774__08
17 | 2007_001884__06
18 | 2007_002268__08
19 | 2007_002387__10
20 | 2007_002445__08
21 | 2007_002470__08
22 | 2007_002539__06
23 | 2007_002597__08
24 | 2007_002643__07
25 | 2007_002903__10
26 | 2007_003011__09
27 | 2007_003051__07
28 | 2007_003101__06
29 | 2007_003106__08
30 | 2007_003137__06
31 | 2007_003143__07
32 | 2007_003169__08
33 | 2007_003195__06
34 | 2007_003201__10
35 | 2007_003503__06
36 | 2007_003503__07
37 | 2007_003621__06
38 | 2007_003711__06
39 | 2007_003786__06
40 | 2007_003841__10
41 | 2007_003917__07
42 | 2007_003991__08
43 | 2007_004193__09
44 | 2007_004392__09
45 | 2007_004405__09
46 | 2007_004510__09
47 | 2007_004712__09
48 | 2007_004856__08
49 | 2007_004866__08
50 | 2007_005074__07
51 | 2007_005114__10
52 | 2007_005296__07
53 | 2007_005331__07
54 | 2007_005460__08
55 | 2007_005547__07
56 | 2007_005547__10
57 | 2007_005844__09
58 | 2007_005845__08
59 | 2007_005911__06
60 | 2007_005978__06
61 | 2007_006035__07
62 | 2007_006086__09
63 | 2007_006241__09
64 | 2007_006260__08
65 | 2007_006277__07
66 | 2007_006348__09
67 | 2007_006553__09
68 | 2007_006761__10
69 | 2007_006841__10
70 | 2007_007414__07
71 | 2007_007417__08
72 | 2007_007524__08
73 | 2007_007815__07
74 | 2007_007818__07
75 | 2007_007996__09
76 | 2007_008106__09
77 | 2007_008110__09
78 | 2007_008543__09
79 | 2007_008722__10
80 | 2007_008747__06
81 | 2007_008815__08
82 | 2007_008897__09
83 | 2007_008973__10
84 | 2007_009015__06
85 | 2007_009015__07
86 | 2007_009068__09
87 | 2007_009084__09
88 | 2007_009096__07
89 | 2007_009221__08
90 | 2007_009245__10
91 | 2007_009346__08
92 | 2007_009392__06
93 | 2007_009392__07
94 | 2007_009413__09
95 | 2007_009521__09
96 | 2007_009764__06
97 | 2007_009794__08
98 | 2007_009897__10
99 | 2007_009923__08
100 | 2007_009938__07
101 | 2008_000009__10
102 | 2008_000073__10
103 | 2008_000075__06
104 | 2008_000107__09
105 | 2008_000149__09
106 | 2008_000182__08
107 | 2008_000345__08
108 | 2008_000401__08
109 | 2008_000464__08
110 | 2008_000501__07
111 | 2008_000673__09
112 | 2008_000853__08
113 | 2008_000919__10
114 | 2008_001078__08
115 | 2008_001433__08
116 | 2008_001439__09
117 | 2008_001513__08
118 | 2008_001640__08
119 | 2008_001715__09
120 | 2008_001885__08
121 | 2008_002152__08
122 | 2008_002205__06
123 | 2008_002212__07
124 | 2008_002379__09
125 | 2008_002521__09
126 | 2008_002623__08
127 | 2008_002681__08
128 | 2008_002778__10
129 | 2008_002958__07
130 | 2008_003141__06
131 | 2008_003141__07
132 | 2008_003333__07
133 | 2008_003477__09
134 | 2008_003499__08
135 | 2008_003577__07
136 | 2008_003777__06
137 | 2008_003821__09
138 | 2008_003846__07
139 | 2008_004069__07
140 | 2008_004339__07
141 | 2008_004552__07
142 | 2008_004612__09
143 | 2008_004701__10
144 | 2008_005097__10
145 | 2008_005105__10
146 | 2008_005245__07
147 | 2008_005676__06
148 | 2008_006008__09
149 | 2008_006063__10
150 | 2008_006254__07
151 | 2008_006325__08
152 | 2008_006341__08
153 | 2008_006480__08
154 | 2008_006528__10
155 | 2008_006554__06
156 | 2008_006986__07
157 | 2008_007025__10
158 | 2008_007031__10
159 | 2008_007048__09
160 | 2008_007123__10
161 | 2008_007194__09
162 | 2008_007273__10
163 | 2008_007378__09
164 | 2008_007402__09
165 | 2008_007527__09
166 | 2008_007548__08
167 | 2008_007596__10
168 | 2008_007737__09
169 | 2008_007797__06
170 | 2008_007804__07
171 | 2008_007828__09
172 | 2008_008252__06
173 | 2008_008301__06
174 | 2008_008469__06
175 | 2008_008682__06
176 | 2009_000013__08
177 | 2009_000080__08
178 | 2009_000219__10
179 | 2009_000309__10
180 | 2009_000335__06
181 | 2009_000335__07
182 | 2009_000426__06
183 | 2009_000455__06
184 | 2009_000457__07
185 | 2009_000523__07
186 | 2009_000641__10
187 | 2009_000716__08
188 | 2009_000731__10
189 | 2009_000771__10
190 | 2009_000825__07
191 | 2009_000964__08
192 | 2009_001008__08
193 | 2009_001082__06
194 | 2009_001240__07
195 | 2009_001255__07
196 | 2009_001299__09
197 | 2009_001391__08
198 | 2009_001411__08
199 | 2009_001536__07
200 | 2009_001775__09
201 | 2009_001804__06
202 | 2009_001816__06
203 | 2009_001854__06
204 | 2009_002035__10
205 | 2009_002122__10
206 | 2009_002150__10
207 | 2009_002164__07
208 | 2009_002171__10
209 | 2009_002221__10
210 | 2009_002238__06
211 | 2009_002238__07
212 | 2009_002239__07
213 | 2009_002268__08
214 | 2009_002346__09
215 | 2009_002415__09
216 | 2009_002487__09
217 | 2009_002527__08
218 | 2009_002535__06
219 | 2009_002549__10
220 | 2009_002571__09
221 | 2009_002618__07
222 | 2009_002635__10
223 | 2009_002753__08
224 | 2009_002936__08
225 | 2009_002990__07
226 | 2009_003003__07
227 | 2009_003059__10
228 | 2009_003071__09
229 | 2009_003269__07
230 | 2009_003304__06
231 | 2009_003387__07
232 | 2009_003406__07
233 | 2009_003494__09
234 | 2009_003507__09
235 | 2009_003542__10
236 | 2009_003549__07
237 | 2009_003569__10
238 | 2009_003589__07
239 | 2009_003703__06
240 | 2009_003771__08
241 | 2009_003773__10
242 | 2009_003849__09
243 | 2009_003895__09
244 | 2009_003904__08
245 | 2009_004072__06
246 | 2009_004140__09
247 | 2009_004217__09
248 | 2009_004248__08
249 | 2009_004455__07
250 | 2009_004504__08
251 | 2009_004590__06
252 | 2009_004594__07
253 | 2009_004687__09
254 | 2009_004721__08
255 | 2009_004732__06
256 | 2009_004748__07
257 | 2009_004789__06
258 | 2009_004859__09
259 | 2009_004867__06
260 | 2009_005158__08
261 | 2009_005219__08
262 | 2009_005231__06
263 | 2010_000003__09
264 | 2010_000160__07
265 | 2010_000163__08
266 | 2010_000372__07
267 | 2010_000427__10
268 | 2010_000530__07
269 | 2010_000552__08
270 | 2010_000573__06
271 | 2010_000628__07
272 | 2010_000639__09
273 | 2010_000682__06
274 | 2010_000683__08
275 | 2010_000724__08
276 | 2010_000907__10
277 | 2010_000941__08
278 | 2010_000952__07
279 | 2010_001000__10
280 | 2010_001010__10
281 | 2010_001070__08
282 | 2010_001206__06
283 | 2010_001292__08
284 | 2010_001331__08
285 | 2010_001351__08
286 | 2010_001403__06
287 | 2010_001403__07
288 | 2010_001534__08
289 | 2010_001553__07
290 | 2010_001579__09
291 | 2010_001646__06
292 | 2010_001656__08
293 | 2010_001692__10
294 | 2010_001699__09
295 | 2010_001767__07
296 | 2010_001851__09
297 | 2010_001913__08
298 | 2010_002017__07
299 | 2010_002017__09
300 | 2010_002025__08
301 | 2010_002137__08
302 | 2010_002146__08
303 | 2010_002305__08
304 | 2010_002336__09
305 | 2010_002348__08
306 | 2010_002361__07
307 | 2010_002390__10
308 | 2010_002422__08
309 | 2010_002512__08
310 | 2010_002531__08
311 | 2010_002546__06
312 | 2010_002623__09
313 | 2010_002693__08
314 | 2010_002693__09
315 | 2010_002763__08
316 | 2010_002763__10
317 | 2010_002868__06
318 | 2010_002900__08
319 | 2010_002902__07
320 | 2010_002921__09
321 | 2010_002929__07
322 | 2010_002988__07
323 | 2010_003123__07
324 | 2010_003183__10
325 | 2010_003231__07
326 | 2010_003239__10
327 | 2010_003275__08
328 | 2010_003276__07
329 | 2010_003293__06
330 | 2010_003302__09
331 | 2010_003325__09
332 | 2010_003381__07
333 | 2010_003402__08
334 | 2010_003409__09
335 | 2010_003446__07
336 | 2010_003453__07
337 | 2010_003468__08
338 | 2010_003531__09
339 | 2010_003675__08
340 | 2010_003746__07
341 | 2010_003758__08
342 | 2010_003764__08
343 | 2010_003768__07
344 | 2010_003772__06
345 | 2010_003781__08
346 | 2010_003813__07
347 | 2010_003854__07
348 | 2010_003971__08
349 | 2010_003971__09
350 | 2010_004104__08
351 | 2010_004120__08
352 | 2010_004320__08
353 | 2010_004322__10
354 | 2010_004348__06
355 | 2010_004369__08
356 | 2010_004472__07
357 | 2010_004479__08
358 | 2010_004635__10
359 | 2010_004763__09
360 | 2010_004783__09
361 | 2010_004789__10
362 | 2010_004815__08
363 | 2010_004825__09
364 | 2010_004861__08
365 | 2010_004946__07
366 | 2010_005013__07
367 | 2010_005021__08
368 | 2010_005021__09
369 | 2010_005063__06
370 | 2010_005108__08
371 | 2010_005118__06
372 | 2010_005160__06
373 | 2010_005166__10
374 | 2010_005284__06
375 | 2010_005344__08
376 | 2010_005421__08
377 | 2010_005432__07
378 | 2010_005501__07
379 | 2010_005508__08
380 | 2010_005606__08
381 | 2010_005709__08
382 | 2010_005718__07
383 | 2010_005860__07
384 | 2010_005899__08
385 | 2010_006070__07
386 | 2011_000178__06
387 | 2011_000226__09
388 | 2011_000239__06
389 | 2011_000248__06
390 | 2011_000312__06
391 | 2011_000338__09
392 | 2011_000419__08
393 | 2011_000503__07
394 | 2011_000548__10
395 | 2011_000566__10
396 | 2011_000607__09
397 | 2011_000661__08
398 | 2011_000661__09
399 | 2011_000780__08
400 | 2011_000789__08
401 | 2011_000809__09
402 | 2011_000813__08
403 | 2011_000813__09
404 | 2011_000830__06
405 | 2011_000843__09
406 | 2011_000888__06
407 | 2011_000900__07
408 | 2011_000969__06
409 | 2011_001047__10
410 | 2011_001064__06
411 | 2011_001071__09
412 | 2011_001110__07
413 | 2011_001159__10
414 | 2011_001232__10
415 | 2011_001292__08
416 | 2011_001341__06
417 | 2011_001346__09
418 | 2011_001447__09
419 | 2011_001530__10
420 | 2011_001534__08
421 | 2011_001546__10
422 | 2011_001567__09
423 | 2011_001597__08
424 | 2011_001601__08
425 | 2011_001607__08
426 | 2011_001665__09
427 | 2011_001708__10
428 | 2011_001775__08
429 | 2011_001782__10
430 | 2011_001812__09
431 | 2011_002041__09
432 | 2011_002064__07
433 | 2011_002124__09
434 | 2011_002200__09
435 | 2011_002298__09
436 | 2011_002322__07
437 | 2011_002343__09
438 | 2011_002358__09
439 | 2011_002391__09
440 | 2011_002509__09
441 | 2011_002592__07
442 | 2011_002644__09
443 | 2011_002685__08
444 | 2011_002812__07
445 | 2011_002885__10
446 | 2011_003011__09
447 | 2011_003019__07
448 | 2011_003019__10
449 | 2011_003055__07
450 | 2011_003103__09
451 | 2011_003114__06
452 |
--------------------------------------------------------------------------------
/model/DCAMA.py:
--------------------------------------------------------------------------------
1 | r""" Dense Cross-Query-and-Support Attention Weighted Mask Aggregation for Few-Shot Segmentation """
2 | from functools import reduce
3 | from operator import add
4 |
5 | import torch
6 | import torch.nn as nn
7 | import torch.nn.functional as F
8 | from torchvision.models import resnet
9 |
10 | from .base.swin_transformer import SwinTransformer
11 | from model.base.transformer import MultiHeadedAttention, PositionalEncoding
12 |
13 |
14 | class DCAMA(nn.Module):
15 |
16 | def __init__(self, backbone, pretrained_path, use_original_imgsize):
17 | super(DCAMA, self).__init__()
18 |
19 | self.backbone = backbone
20 | self.use_original_imgsize = use_original_imgsize
21 |
22 | # feature extractor initialization
23 | if backbone == 'resnet50':
24 | self.feature_extractor = resnet.resnet50()
25 | self.feature_extractor.load_state_dict(torch.load(pretrained_path))
26 | self.feat_channels = [256, 512, 1024, 2048]
27 | self.nlayers = [3, 4, 6, 3]
28 | self.feat_ids = list(range(0, 17))
29 | elif backbone == 'resnet101':
30 | self.feature_extractor = resnet.resnet101()
31 | self.feature_extractor.load_state_dict(torch.load(pretrained_path))
32 | self.feat_channels = [256, 512, 1024, 2048]
33 | self.nlayers = [3, 4, 23, 3]
34 | self.feat_ids = list(range(0, 34))
35 | elif backbone == 'swin':
36 | self.feature_extractor = SwinTransformer(img_size=384, patch_size=4, window_size=12, embed_dim=128,
37 | depths=[2, 2, 18, 2], num_heads=[4, 8, 16, 32])
38 | self.feature_extractor.load_state_dict(torch.load(pretrained_path)['model'])
39 | self.feat_channels = [128, 256, 512, 1024]
40 | self.nlayers = [2, 2, 18, 2]
41 | else:
42 | raise Exception('Unavailable backbone: %s' % backbone)
43 | self.feature_extractor.eval()
44 |
45 | # define model
46 | self.lids = reduce(add, [[i + 1] * x for i, x in enumerate(self.nlayers)])
47 | self.stack_ids = torch.tensor(self.lids).bincount()[-4:].cumsum(dim=0)
48 | self.model = DCAMA_model(in_channels=self.feat_channels, stack_ids=self.stack_ids)
49 |
50 | self.cross_entropy_loss = nn.CrossEntropyLoss()
51 |
52 | def forward(self, query_img, support_img, support_mask):
53 | with torch.no_grad():
54 | query_feats = self.extract_feats(query_img)
55 | support_feats = self.extract_feats(support_img)
56 |
57 | logit_mask = self.model(query_feats, support_feats, support_mask.clone())
58 |
59 | return logit_mask
60 |
61 | def extract_feats(self, img):
62 | r""" Extract input image features """
63 | feats = []
64 |
65 | if self.backbone == 'swin':
66 | _ = self.feature_extractor.forward_features(img)
67 | for feat in self.feature_extractor.feat_maps:
68 | bsz, hw, c = feat.size()
69 | h = int(hw ** 0.5)
70 | feat = feat.view(bsz, h, h, c).permute(0, 3, 1, 2).contiguous()
71 | feats.append(feat)
72 | elif self.backbone == 'resnet50' or self.backbone == 'resnet101':
73 | bottleneck_ids = reduce(add, list(map(lambda x: list(range(x)), self.nlayers)))
74 | # Layer 0
75 | feat = self.feature_extractor.conv1.forward(img)
76 | feat = self.feature_extractor.bn1.forward(feat)
77 | feat = self.feature_extractor.relu.forward(feat)
78 | feat = self.feature_extractor.maxpool.forward(feat)
79 |
80 | # Layer 1-4
81 | for hid, (bid, lid) in enumerate(zip(bottleneck_ids, self.lids)):
82 | res = feat
83 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].conv1.forward(feat)
84 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].bn1.forward(feat)
85 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].relu.forward(feat)
86 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].conv2.forward(feat)
87 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].bn2.forward(feat)
88 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].relu.forward(feat)
89 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].conv3.forward(feat)
90 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].bn3.forward(feat)
91 |
92 | if bid == 0:
93 | res = self.feature_extractor.__getattr__('layer%d' % lid)[bid].downsample.forward(res)
94 |
95 | feat += res
96 |
97 | if hid + 1 in self.feat_ids:
98 | feats.append(feat.clone())
99 |
100 | feat = self.feature_extractor.__getattr__('layer%d' % lid)[bid].relu.forward(feat)
101 |
102 | return feats
103 |
104 | def predict_mask_nshot(self, batch, nshot):
105 | r""" n-shot inference """
106 | query_img = batch['query_img']
107 | support_imgs = batch['support_imgs']
108 | support_masks = batch['support_masks']
109 |
110 | if nshot == 1:
111 | logit_mask = self(query_img, support_imgs[:, 0], support_masks[:, 0])
112 | else:
113 | with torch.no_grad():
114 | query_feats = self.extract_feats(query_img)
115 | n_support_feats = []
116 | for k in range(nshot):
117 | support_feats = self.extract_feats(support_imgs[:, k])
118 | n_support_feats.append(support_feats)
119 | logit_mask = self.model(query_feats, n_support_feats, support_masks.clone(), nshot)
120 |
121 | if self.use_original_imgsize:
122 | org_qry_imsize = tuple([batch['org_query_imsize'][1].item(), batch['org_query_imsize'][0].item()])
123 | logit_mask = F.interpolate(logit_mask, org_qry_imsize, mode='bilinear', align_corners=True)
124 | else:
125 | logit_mask = F.interpolate(logit_mask, support_imgs[0].size()[2:], mode='bilinear', align_corners=True)
126 |
127 | return logit_mask.argmax(dim=1)
128 |
129 | def compute_objective(self, logit_mask, gt_mask):
130 | bsz = logit_mask.size(0)
131 | logit_mask = logit_mask.view(bsz, 2, -1)
132 | gt_mask = gt_mask.view(bsz, -1).long()
133 |
134 | return self.cross_entropy_loss(logit_mask, gt_mask)
135 |
136 | def train_mode(self):
137 | self.train()
138 | self.feature_extractor.eval()
139 |
140 |
141 | class DCAMA_model(nn.Module):
142 | def __init__(self, in_channels, stack_ids):
143 | super(DCAMA_model, self).__init__()
144 |
145 | self.stack_ids = stack_ids
146 |
147 | # DCAMA blocks
148 | self.DCAMA_blocks = nn.ModuleList()
149 | self.pe = nn.ModuleList()
150 | for inch in in_channels[1:]:
151 | self.DCAMA_blocks.append(MultiHeadedAttention(h=8, d_model=inch, dropout=0.5))
152 | self.pe.append(PositionalEncoding(d_model=inch, dropout=0.5))
153 |
154 | outch1, outch2, outch3 = 16, 64, 128
155 |
156 | # conv blocks
157 | self.conv1 = self.build_conv_block(stack_ids[3]-stack_ids[2], [outch1, outch2, outch3], [3, 3, 3], [1, 1, 1]) # 1/32
158 | self.conv2 = self.build_conv_block(stack_ids[2]-stack_ids[1], [outch1, outch2, outch3], [5, 3, 3], [1, 1, 1]) # 1/16
159 | self.conv3 = self.build_conv_block(stack_ids[1]-stack_ids[0], [outch1, outch2, outch3], [5, 5, 3], [1, 1, 1]) # 1/8
160 |
161 | self.conv4 = self.build_conv_block(outch3, [outch3, outch3, outch3], [3, 3, 3], [1, 1, 1]) # 1/32 + 1/16
162 | self.conv5 = self.build_conv_block(outch3, [outch3, outch3, outch3], [3, 3, 3], [1, 1, 1]) # 1/16 + 1/8
163 |
164 | # mixer blocks
165 | self.mixer1 = nn.Sequential(nn.Conv2d(outch3+2*in_channels[1]+2*in_channels[0], outch3, (3, 3), padding=(1, 1), bias=True),
166 | nn.ReLU(),
167 | nn.Conv2d(outch3, outch2, (3, 3), padding=(1, 1), bias=True),
168 | nn.ReLU())
169 |
170 | self.mixer2 = nn.Sequential(nn.Conv2d(outch2, outch2, (3, 3), padding=(1, 1), bias=True),
171 | nn.ReLU(),
172 | nn.Conv2d(outch2, outch1, (3, 3), padding=(1, 1), bias=True),
173 | nn.ReLU())
174 |
175 | self.mixer3 = nn.Sequential(nn.Conv2d(outch1, outch1, (3, 3), padding=(1, 1), bias=True),
176 | nn.ReLU(),
177 | nn.Conv2d(outch1, 2, (3, 3), padding=(1, 1), bias=True))
178 |
179 | def forward(self, query_feats, support_feats, support_mask, nshot=1):
180 | coarse_masks = []
181 | for idx, query_feat in enumerate(query_feats):
182 | # 1/4 scale feature only used in skip connect
183 | if idx < self.stack_ids[0]: continue
184 |
185 | bsz, ch, ha, wa = query_feat.size()
186 |
187 | # reshape the input feature and mask
188 | query = query_feat.view(bsz, ch, -1).permute(0, 2, 1).contiguous()
189 | if nshot == 1:
190 | support_feat = support_feats[idx]
191 | mask = F.interpolate(support_mask.unsqueeze(1).float(), support_feat.size()[2:], mode='bilinear',
192 | align_corners=True).view(support_feat.size()[0], -1)
193 | support_feat = support_feat.view(support_feat.size()[0], support_feat.size()[1], -1).permute(0, 2, 1).contiguous()
194 | else:
195 | support_feat = torch.stack([support_feats[k][idx] for k in range(nshot)])
196 | support_feat = support_feat.view(-1, ch, ha * wa).permute(0, 2, 1).contiguous()
197 | mask = torch.stack([F.interpolate(k.unsqueeze(1).float(), (ha, wa), mode='bilinear', align_corners=True)
198 | for k in support_mask])
199 | mask = mask.view(bsz, -1)
200 |
201 | # DCAMA blocks forward
202 | if idx < self.stack_ids[1]:
203 | coarse_mask = self.DCAMA_blocks[0](self.pe[0](query), self.pe[0](support_feat), mask)
204 | elif idx < self.stack_ids[2]:
205 | coarse_mask = self.DCAMA_blocks[1](self.pe[1](query), self.pe[1](support_feat), mask)
206 | else:
207 | coarse_mask = self.DCAMA_blocks[2](self.pe[2](query), self.pe[2](support_feat), mask)
208 | coarse_masks.append(coarse_mask.permute(0, 2, 1).contiguous().view(bsz, 1, ha, wa))
209 |
210 | # multi-scale conv blocks forward
211 | bsz, ch, ha, wa = coarse_masks[self.stack_ids[3]-1-self.stack_ids[0]].size()
212 | coarse_masks1 = torch.stack(coarse_masks[self.stack_ids[2]-self.stack_ids[0]:self.stack_ids[3]-self.stack_ids[0]]).transpose(0, 1).contiguous().view(bsz, -1, ha, wa)
213 | bsz, ch, ha, wa = coarse_masks[self.stack_ids[2]-1-self.stack_ids[0]].size()
214 | coarse_masks2 = torch.stack(coarse_masks[self.stack_ids[1]-self.stack_ids[0]:self.stack_ids[2]-self.stack_ids[0]]).transpose(0, 1).contiguous().view(bsz, -1, ha, wa)
215 | bsz, ch, ha, wa = coarse_masks[self.stack_ids[1]-1-self.stack_ids[0]].size()
216 | coarse_masks3 = torch.stack(coarse_masks[0:self.stack_ids[1]-self.stack_ids[0]]).transpose(0, 1).contiguous().view(bsz, -1, ha, wa)
217 |
218 | coarse_masks1 = self.conv1(coarse_masks1)
219 | coarse_masks2 = self.conv2(coarse_masks2)
220 | coarse_masks3 = self.conv3(coarse_masks3)
221 |
222 | # multi-scale cascade (pixel-wise addition)
223 | coarse_masks1 = F.interpolate(coarse_masks1, coarse_masks2.size()[-2:], mode='bilinear', align_corners=True)
224 | mix = coarse_masks1 + coarse_masks2
225 | mix = self.conv4(mix)
226 |
227 | mix = F.interpolate(mix, coarse_masks3.size()[-2:], mode='bilinear', align_corners=True)
228 | mix = mix + coarse_masks3
229 | mix = self.conv5(mix)
230 |
231 | # skip connect 1/8 and 1/4 features (concatenation)
232 | if nshot == 1:
233 | support_feat = support_feats[self.stack_ids[1] - 1]
234 | else:
235 | support_feat = torch.stack([support_feats[k][self.stack_ids[1] - 1] for k in range(nshot)]).max(dim=0).values
236 | mix = torch.cat((mix, query_feats[self.stack_ids[1] - 1], support_feat), 1)
237 |
238 | upsample_size = (mix.size(-1) * 2,) * 2
239 | mix = F.interpolate(mix, upsample_size, mode='bilinear', align_corners=True)
240 | if nshot == 1:
241 | support_feat = support_feats[self.stack_ids[0] - 1]
242 | else:
243 | support_feat = torch.stack([support_feats[k][self.stack_ids[0] - 1] for k in range(nshot)]).max(dim=0).values
244 | mix = torch.cat((mix, query_feats[self.stack_ids[0] - 1], support_feat), 1)
245 |
246 | # mixer blocks forward
247 | out = self.mixer1(mix)
248 | upsample_size = (out.size(-1) * 2,) * 2
249 | out = F.interpolate(out, upsample_size, mode='bilinear', align_corners=True)
250 | out = self.mixer2(out)
251 | upsample_size = (out.size(-1) * 2,) * 2
252 | out = F.interpolate(out, upsample_size, mode='bilinear', align_corners=True)
253 | logit_mask = self.mixer3(out)
254 |
255 | return logit_mask
256 |
257 | def build_conv_block(self, in_channel, out_channels, kernel_sizes, spt_strides, group=4):
258 | r""" bulid conv blocks """
259 | assert len(out_channels) == len(kernel_sizes) == len(spt_strides)
260 |
261 | building_block_layers = []
262 | for idx, (outch, ksz, stride) in enumerate(zip(out_channels, kernel_sizes, spt_strides)):
263 | inch = in_channel if idx == 0 else out_channels[idx - 1]
264 | pad = ksz // 2
265 |
266 | building_block_layers.append(nn.Conv2d(in_channels=inch, out_channels=outch,
267 | kernel_size=ksz, stride=stride, padding=pad))
268 | building_block_layers.append(nn.GroupNorm(group, outch))
269 | building_block_layers.append(nn.ReLU(inplace=True))
270 |
271 | return nn.Sequential(*building_block_layers)
272 |
--------------------------------------------------------------------------------
/data/splits/pascal/val/fold2.txt:
--------------------------------------------------------------------------------
1 | 2007_000129__15
2 | 2007_000323__15
3 | 2007_000332__13
4 | 2007_000346__15
5 | 2007_000762__11
6 | 2007_000762__15
7 | 2007_000783__13
8 | 2007_000783__15
9 | 2007_000799__13
10 | 2007_000799__15
11 | 2007_000830__11
12 | 2007_000847__11
13 | 2007_000847__15
14 | 2007_000999__15
15 | 2007_001175__15
16 | 2007_001239__12
17 | 2007_001284__15
18 | 2007_001311__15
19 | 2007_001408__15
20 | 2007_001423__15
21 | 2007_001430__11
22 | 2007_001430__15
23 | 2007_001526__15
24 | 2007_001585__15
25 | 2007_001586__13
26 | 2007_001586__15
27 | 2007_001594__15
28 | 2007_001630__15
29 | 2007_001677__11
30 | 2007_001678__15
31 | 2007_001717__15
32 | 2007_001763__12
33 | 2007_001955__13
34 | 2007_002046__13
35 | 2007_002119__15
36 | 2007_002260__14
37 | 2007_002268__12
38 | 2007_002378__15
39 | 2007_002426__15
40 | 2007_002539__15
41 | 2007_002565__15
42 | 2007_002597__12
43 | 2007_002624__11
44 | 2007_002624__15
45 | 2007_002643__15
46 | 2007_002728__15
47 | 2007_002823__14
48 | 2007_002823__15
49 | 2007_002824__15
50 | 2007_002852__12
51 | 2007_003011__11
52 | 2007_003020__15
53 | 2007_003022__13
54 | 2007_003022__15
55 | 2007_003088__15
56 | 2007_003106__15
57 | 2007_003110__12
58 | 2007_003134__15
59 | 2007_003188__15
60 | 2007_003194__12
61 | 2007_003367__14
62 | 2007_003367__15
63 | 2007_003373__12
64 | 2007_003373__15
65 | 2007_003530__15
66 | 2007_003621__15
67 | 2007_003742__11
68 | 2007_003742__15
69 | 2007_003872__12
70 | 2007_004033__14
71 | 2007_004033__15
72 | 2007_004112__12
73 | 2007_004112__15
74 | 2007_004121__15
75 | 2007_004189__12
76 | 2007_004275__14
77 | 2007_004275__15
78 | 2007_004281__15
79 | 2007_004380__14
80 | 2007_004380__15
81 | 2007_004392__15
82 | 2007_004405__11
83 | 2007_004538__13
84 | 2007_004538__15
85 | 2007_004644__12
86 | 2007_004712__11
87 | 2007_004712__15
88 | 2007_004722__13
89 | 2007_004722__15
90 | 2007_004902__13
91 | 2007_004902__15
92 | 2007_005114__13
93 | 2007_005114__15
94 | 2007_005149__12
95 | 2007_005173__14
96 | 2007_005173__15
97 | 2007_005281__15
98 | 2007_005304__15
99 | 2007_005331__13
100 | 2007_005331__15
101 | 2007_005354__14
102 | 2007_005354__15
103 | 2007_005509__15
104 | 2007_005547__15
105 | 2007_005608__14
106 | 2007_005608__15
107 | 2007_005696__12
108 | 2007_005759__14
109 | 2007_005803__11
110 | 2007_005844__11
111 | 2007_005845__15
112 | 2007_006028__15
113 | 2007_006076__15
114 | 2007_006086__11
115 | 2007_006117__15
116 | 2007_006171__12
117 | 2007_006171__15
118 | 2007_006241__11
119 | 2007_006364__13
120 | 2007_006364__15
121 | 2007_006373__15
122 | 2007_006444__12
123 | 2007_006444__15
124 | 2007_006560__15
125 | 2007_006647__14
126 | 2007_006647__15
127 | 2007_006698__15
128 | 2007_006802__15
129 | 2007_006841__15
130 | 2007_006864__15
131 | 2007_006866__13
132 | 2007_006866__15
133 | 2007_007007__11
134 | 2007_007007__15
135 | 2007_007109__13
136 | 2007_007109__15
137 | 2007_007195__15
138 | 2007_007203__15
139 | 2007_007211__14
140 | 2007_007235__15
141 | 2007_007417__12
142 | 2007_007493__15
143 | 2007_007498__11
144 | 2007_007498__15
145 | 2007_007651__11
146 | 2007_007651__15
147 | 2007_007688__14
148 | 2007_007748__13
149 | 2007_007748__15
150 | 2007_007795__15
151 | 2007_007810__11
152 | 2007_007810__15
153 | 2007_007815__15
154 | 2007_007836__15
155 | 2007_007849__15
156 | 2007_007996__15
157 | 2007_008110__15
158 | 2007_008204__15
159 | 2007_008222__12
160 | 2007_008256__13
161 | 2007_008256__15
162 | 2007_008260__12
163 | 2007_008374__15
164 | 2007_008415__12
165 | 2007_008430__15
166 | 2007_008596__13
167 | 2007_008596__15
168 | 2007_008708__15
169 | 2007_008802__13
170 | 2007_008897__15
171 | 2007_008944__15
172 | 2007_008964__12
173 | 2007_008964__15
174 | 2007_008980__12
175 | 2007_009068__15
176 | 2007_009084__12
177 | 2007_009084__14
178 | 2007_009251__13
179 | 2007_009251__15
180 | 2007_009258__15
181 | 2007_009320__15
182 | 2007_009331__12
183 | 2007_009331__13
184 | 2007_009331__15
185 | 2007_009413__11
186 | 2007_009413__15
187 | 2007_009521__11
188 | 2007_009562__12
189 | 2007_009592__12
190 | 2007_009654__15
191 | 2007_009655__15
192 | 2007_009684__15
193 | 2007_009687__15
194 | 2007_009691__14
195 | 2007_009691__15
196 | 2007_009706__11
197 | 2007_009750__15
198 | 2007_009756__14
199 | 2007_009756__15
200 | 2007_009841__13
201 | 2007_009938__14
202 | 2008_000080__12
203 | 2008_000213__15
204 | 2008_000215__15
205 | 2008_000223__15
206 | 2008_000233__15
207 | 2008_000234__15
208 | 2008_000239__12
209 | 2008_000270__12
210 | 2008_000270__15
211 | 2008_000271__15
212 | 2008_000359__15
213 | 2008_000474__15
214 | 2008_000510__15
215 | 2008_000573__11
216 | 2008_000573__15
217 | 2008_000602__13
218 | 2008_000630__15
219 | 2008_000661__12
220 | 2008_000661__15
221 | 2008_000662__15
222 | 2008_000666__15
223 | 2008_000673__15
224 | 2008_000700__15
225 | 2008_000725__15
226 | 2008_000731__15
227 | 2008_000763__11
228 | 2008_000763__15
229 | 2008_000765__13
230 | 2008_000782__14
231 | 2008_000795__15
232 | 2008_000811__14
233 | 2008_000811__15
234 | 2008_000863__12
235 | 2008_000943__12
236 | 2008_000992__15
237 | 2008_001013__15
238 | 2008_001028__15
239 | 2008_001070__12
240 | 2008_001074__15
241 | 2008_001076__15
242 | 2008_001150__14
243 | 2008_001170__15
244 | 2008_001231__15
245 | 2008_001249__15
246 | 2008_001283__15
247 | 2008_001308__15
248 | 2008_001379__12
249 | 2008_001404__15
250 | 2008_001478__12
251 | 2008_001491__15
252 | 2008_001504__15
253 | 2008_001531__15
254 | 2008_001547__15
255 | 2008_001629__15
256 | 2008_001682__13
257 | 2008_001821__15
258 | 2008_001874__15
259 | 2008_001895__12
260 | 2008_001895__15
261 | 2008_001992__13
262 | 2008_001992__15
263 | 2008_002212__15
264 | 2008_002239__12
265 | 2008_002240__14
266 | 2008_002241__15
267 | 2008_002379__11
268 | 2008_002383__14
269 | 2008_002495__15
270 | 2008_002536__12
271 | 2008_002588__15
272 | 2008_002775__11
273 | 2008_002775__15
274 | 2008_002835__13
275 | 2008_002835__15
276 | 2008_002859__12
277 | 2008_002864__11
278 | 2008_002864__15
279 | 2008_002904__12
280 | 2008_002929__15
281 | 2008_002936__12
282 | 2008_002942__15
283 | 2008_002958__12
284 | 2008_003034__15
285 | 2008_003076__15
286 | 2008_003108__15
287 | 2008_003141__15
288 | 2008_003210__15
289 | 2008_003238__12
290 | 2008_003238__15
291 | 2008_003330__15
292 | 2008_003333__14
293 | 2008_003333__15
294 | 2008_003379__13
295 | 2008_003451__14
296 | 2008_003451__15
297 | 2008_003461__13
298 | 2008_003461__15
299 | 2008_003477__11
300 | 2008_003492__15
301 | 2008_003511__12
302 | 2008_003511__15
303 | 2008_003546__15
304 | 2008_003576__12
305 | 2008_003676__15
306 | 2008_003733__15
307 | 2008_003782__13
308 | 2008_003856__15
309 | 2008_003874__15
310 | 2008_004101__15
311 | 2008_004140__11
312 | 2008_004140__15
313 | 2008_004175__13
314 | 2008_004345__14
315 | 2008_004396__13
316 | 2008_004399__14
317 | 2008_004399__15
318 | 2008_004575__11
319 | 2008_004575__15
320 | 2008_004624__13
321 | 2008_004654__15
322 | 2008_004687__13
323 | 2008_004705__13
324 | 2008_005049__14
325 | 2008_005089__15
326 | 2008_005145__11
327 | 2008_005197__12
328 | 2008_005197__15
329 | 2008_005245__14
330 | 2008_005245__15
331 | 2008_005399__15
332 | 2008_005422__14
333 | 2008_005445__15
334 | 2008_005525__13
335 | 2008_005637__14
336 | 2008_005642__13
337 | 2008_005691__13
338 | 2008_005738__15
339 | 2008_005812__15
340 | 2008_005915__14
341 | 2008_006008__11
342 | 2008_006036__13
343 | 2008_006108__11
344 | 2008_006108__15
345 | 2008_006130__12
346 | 2008_006216__15
347 | 2008_006219__13
348 | 2008_006254__15
349 | 2008_006275__15
350 | 2008_006341__15
351 | 2008_006408__11
352 | 2008_006408__15
353 | 2008_006526__14
354 | 2008_006526__15
355 | 2008_006554__15
356 | 2008_006722__12
357 | 2008_006722__15
358 | 2008_006874__14
359 | 2008_006874__15
360 | 2008_006981__12
361 | 2008_007048__11
362 | 2008_007219__15
363 | 2008_007378__11
364 | 2008_007378__12
365 | 2008_007392__13
366 | 2008_007392__15
367 | 2008_007402__11
368 | 2008_007402__15
369 | 2008_007513__12
370 | 2008_007737__15
371 | 2008_007828__15
372 | 2008_007945__13
373 | 2008_007994__15
374 | 2008_008051__11
375 | 2008_008127__14
376 | 2008_008127__15
377 | 2008_008221__15
378 | 2008_008335__11
379 | 2008_008335__15
380 | 2008_008362__11
381 | 2008_008362__15
382 | 2008_008392__13
383 | 2008_008393__13
384 | 2008_008421__13
385 | 2008_008469__15
386 | 2009_000012__13
387 | 2009_000074__14
388 | 2009_000074__15
389 | 2009_000156__12
390 | 2009_000219__15
391 | 2009_000309__15
392 | 2009_000412__13
393 | 2009_000418__15
394 | 2009_000421__15
395 | 2009_000457__15
396 | 2009_000704__15
397 | 2009_000705__13
398 | 2009_000727__13
399 | 2009_000730__14
400 | 2009_000730__15
401 | 2009_000825__14
402 | 2009_000825__15
403 | 2009_000839__12
404 | 2009_000892__12
405 | 2009_000931__13
406 | 2009_000935__12
407 | 2009_001215__11
408 | 2009_001215__15
409 | 2009_001299__15
410 | 2009_001433__13
411 | 2009_001433__15
412 | 2009_001535__12
413 | 2009_001663__15
414 | 2009_001687__12
415 | 2009_001687__15
416 | 2009_001718__15
417 | 2009_001768__15
418 | 2009_001854__15
419 | 2009_002012__12
420 | 2009_002042__15
421 | 2009_002097__13
422 | 2009_002155__12
423 | 2009_002165__13
424 | 2009_002185__15
425 | 2009_002239__14
426 | 2009_002239__15
427 | 2009_002317__14
428 | 2009_002317__15
429 | 2009_002346__12
430 | 2009_002346__15
431 | 2009_002372__15
432 | 2009_002382__14
433 | 2009_002382__15
434 | 2009_002415__11
435 | 2009_002445__12
436 | 2009_002487__11
437 | 2009_002539__12
438 | 2009_002571__11
439 | 2009_002584__15
440 | 2009_002649__15
441 | 2009_002651__14
442 | 2009_002651__15
443 | 2009_002732__15
444 | 2009_002975__13
445 | 2009_003003__11
446 | 2009_003003__15
447 | 2009_003063__12
448 | 2009_003065__15
449 | 2009_003071__11
450 | 2009_003071__15
451 | 2009_003123__11
452 | 2009_003196__14
453 | 2009_003217__12
454 | 2009_003241__12
455 | 2009_003269__15
456 | 2009_003323__13
457 | 2009_003323__15
458 | 2009_003466__12
459 | 2009_003481__13
460 | 2009_003494__15
461 | 2009_003507__11
462 | 2009_003576__14
463 | 2009_003576__15
464 | 2009_003756__12
465 | 2009_003804__13
466 | 2009_003810__12
467 | 2009_003849__11
468 | 2009_003849__15
469 | 2009_003903__13
470 | 2009_003928__12
471 | 2009_003991__11
472 | 2009_003991__15
473 | 2009_004033__12
474 | 2009_004043__14
475 | 2009_004043__15
476 | 2009_004140__11
477 | 2009_004221__15
478 | 2009_004455__14
479 | 2009_004497__13
480 | 2009_004507__12
481 | 2009_004507__15
482 | 2009_004581__12
483 | 2009_004592__12
484 | 2009_004738__14
485 | 2009_004738__15
486 | 2009_004848__15
487 | 2009_004859__11
488 | 2009_004859__15
489 | 2009_004942__13
490 | 2009_004987__14
491 | 2009_004987__15
492 | 2009_004994__12
493 | 2009_004994__15
494 | 2009_005038__11
495 | 2009_005038__15
496 | 2009_005078__14
497 | 2009_005087__15
498 | 2009_005217__13
499 | 2009_005217__15
500 | 2010_000003__12
501 | 2010_000038__13
502 | 2010_000038__15
503 | 2010_000087__14
504 | 2010_000087__15
505 | 2010_000110__12
506 | 2010_000110__15
507 | 2010_000159__12
508 | 2010_000174__11
509 | 2010_000174__15
510 | 2010_000216__12
511 | 2010_000238__15
512 | 2010_000256__15
513 | 2010_000422__12
514 | 2010_000530__15
515 | 2010_000559__15
516 | 2010_000639__12
517 | 2010_000666__13
518 | 2010_000666__15
519 | 2010_000738__15
520 | 2010_000788__12
521 | 2010_000874__13
522 | 2010_000904__12
523 | 2010_001024__15
524 | 2010_001124__12
525 | 2010_001251__14
526 | 2010_001264__12
527 | 2010_001313__14
528 | 2010_001313__15
529 | 2010_001367__15
530 | 2010_001376__12
531 | 2010_001451__13
532 | 2010_001553__14
533 | 2010_001563__12
534 | 2010_001563__15
535 | 2010_001579__11
536 | 2010_001579__15
537 | 2010_001692__15
538 | 2010_001699__15
539 | 2010_001734__15
540 | 2010_001767__15
541 | 2010_001851__11
542 | 2010_001908__12
543 | 2010_001956__12
544 | 2010_002017__15
545 | 2010_002137__15
546 | 2010_002161__13
547 | 2010_002161__15
548 | 2010_002228__12
549 | 2010_002251__14
550 | 2010_002251__15
551 | 2010_002271__14
552 | 2010_002336__11
553 | 2010_002396__14
554 | 2010_002396__15
555 | 2010_002480__12
556 | 2010_002623__15
557 | 2010_002691__13
558 | 2010_002763__15
559 | 2010_002792__15
560 | 2010_002902__15
561 | 2010_002929__15
562 | 2010_003014__15
563 | 2010_003060__12
564 | 2010_003187__12
565 | 2010_003207__14
566 | 2010_003239__15
567 | 2010_003325__11
568 | 2010_003325__15
569 | 2010_003381__15
570 | 2010_003409__15
571 | 2010_003446__15
572 | 2010_003506__12
573 | 2010_003531__11
574 | 2010_003532__13
575 | 2010_003597__11
576 | 2010_003597__15
577 | 2010_003746__12
578 | 2010_003746__15
579 | 2010_003947__14
580 | 2010_003971__11
581 | 2010_004042__14
582 | 2010_004165__12
583 | 2010_004165__15
584 | 2010_004219__14
585 | 2010_004219__15
586 | 2010_004337__15
587 | 2010_004355__14
588 | 2010_004432__15
589 | 2010_004472__15
590 | 2010_004479__15
591 | 2010_004519__13
592 | 2010_004550__12
593 | 2010_004559__15
594 | 2010_004628__12
595 | 2010_004697__14
596 | 2010_004697__15
597 | 2010_004795__12
598 | 2010_004815__15
599 | 2010_004825__11
600 | 2010_004828__15
601 | 2010_004856__13
602 | 2010_004941__14
603 | 2010_004951__15
604 | 2010_005046__11
605 | 2010_005046__15
606 | 2010_005118__15
607 | 2010_005159__12
608 | 2010_005160__14
609 | 2010_005166__15
610 | 2010_005174__13
611 | 2010_005206__12
612 | 2010_005245__12
613 | 2010_005245__15
614 | 2010_005252__14
615 | 2010_005252__15
616 | 2010_005284__15
617 | 2010_005366__14
618 | 2010_005433__14
619 | 2010_005501__14
620 | 2010_005575__12
621 | 2010_005582__15
622 | 2010_005606__15
623 | 2010_005626__11
624 | 2010_005626__15
625 | 2010_005644__12
626 | 2010_005709__15
627 | 2010_005871__15
628 | 2010_005991__12
629 | 2010_005991__15
630 | 2010_005992__12
631 | 2011_000045__12
632 | 2011_000051__15
633 | 2011_000054__15
634 | 2011_000178__15
635 | 2011_000226__11
636 | 2011_000248__15
637 | 2011_000338__11
638 | 2011_000396__13
639 | 2011_000435__15
640 | 2011_000438__15
641 | 2011_000455__14
642 | 2011_000455__15
643 | 2011_000479__15
644 | 2011_000512__14
645 | 2011_000526__13
646 | 2011_000536__12
647 | 2011_000566__15
648 | 2011_000585__15
649 | 2011_000598__11
650 | 2011_000618__14
651 | 2011_000618__15
652 | 2011_000638__15
653 | 2011_000780__15
654 | 2011_000809__11
655 | 2011_000809__15
656 | 2011_000843__15
657 | 2011_000953__11
658 | 2011_000953__15
659 | 2011_001014__12
660 | 2011_001060__15
661 | 2011_001069__15
662 | 2011_001071__15
663 | 2011_001159__15
664 | 2011_001276__11
665 | 2011_001276__12
666 | 2011_001276__15
667 | 2011_001346__15
668 | 2011_001416__15
669 | 2011_001447__15
670 | 2011_001530__15
671 | 2011_001567__15
672 | 2011_001619__15
673 | 2011_001642__12
674 | 2011_001665__11
675 | 2011_001674__15
676 | 2011_001714__12
677 | 2011_001714__15
678 | 2011_001722__13
679 | 2011_001745__12
680 | 2011_001794__15
681 | 2011_001862__11
682 | 2011_001862__12
683 | 2011_001868__12
684 | 2011_001984__12
685 | 2011_001988__15
686 | 2011_002002__15
687 | 2011_002040__12
688 | 2011_002075__11
689 | 2011_002075__15
690 | 2011_002098__12
691 | 2011_002110__12
692 | 2011_002110__15
693 | 2011_002121__12
694 | 2011_002124__15
695 | 2011_002156__12
696 | 2011_002200__11
697 | 2011_002200__15
698 | 2011_002247__15
699 | 2011_002279__12
700 | 2011_002298__12
701 | 2011_002308__15
702 | 2011_002317__15
703 | 2011_002322__14
704 | 2011_002322__15
705 | 2011_002343__15
706 | 2011_002358__11
707 | 2011_002358__15
708 | 2011_002371__12
709 | 2011_002498__15
710 | 2011_002509__15
711 | 2011_002532__15
712 | 2011_002575__15
713 | 2011_002578__15
714 | 2011_002589__12
715 | 2011_002623__15
716 | 2011_002641__15
717 | 2011_002675__15
718 | 2011_002951__13
719 | 2011_002997__15
720 | 2011_003019__14
721 | 2011_003019__15
722 | 2011_003085__13
723 | 2011_003114__15
724 | 2011_003240__15
725 | 2011_003256__12
726 |
--------------------------------------------------------------------------------
/model/base/swin_transformer.py:
--------------------------------------------------------------------------------
1 | # --------------------------------------------------------
2 | # Swin Transformer
3 | # Copyright (c) 2021 Microsoft
4 | # Licensed under The MIT License [see LICENSE for details]
5 | # Written by Ze Liu
6 | # --------------------------------------------------------
7 |
8 | import torch
9 | import torch.nn as nn
10 | import torch.utils.checkpoint as checkpoint
11 | from timm.models.layers import DropPath, to_2tuple, trunc_normal_
12 |
13 |
14 | class Mlp(nn.Module):
15 | def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):
16 | super().__init__()
17 | out_features = out_features or in_features
18 | hidden_features = hidden_features or in_features
19 | self.fc1 = nn.Linear(in_features, hidden_features)
20 | self.act = act_layer()
21 | self.fc2 = nn.Linear(hidden_features, out_features)
22 | self.drop = nn.Dropout(drop)
23 |
24 | def forward(self, x):
25 | x = self.fc1(x)
26 | x = self.act(x)
27 | x = self.drop(x)
28 | x = self.fc2(x)
29 | x = self.drop(x)
30 | return x
31 |
32 |
33 | def window_partition(x, window_size):
34 | """
35 | Args:
36 | x: (B, H, W, C)
37 | window_size (int): window size
38 |
39 | Returns:
40 | windows: (num_windows*B, window_size, window_size, C)
41 | """
42 | B, H, W, C = x.shape
43 | x = x.view(B, H // window_size, window_size, W // window_size, window_size, C)
44 | windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C)
45 | return windows
46 |
47 |
48 | def window_reverse(windows, window_size, H, W):
49 | """
50 | Args:
51 | windows: (num_windows*B, window_size, window_size, C)
52 | window_size (int): Window size
53 | H (int): Height of image
54 | W (int): Width of image
55 |
56 | Returns:
57 | x: (B, H, W, C)
58 | """
59 | B = int(windows.shape[0] / (H * W / window_size / window_size))
60 | x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1)
61 | x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1)
62 | return x
63 |
64 |
65 | class WindowAttention(nn.Module):
66 | r""" Window based multi-head self attention (W-MSA) module with relative position bias.
67 | It supports both of shifted and non-shifted window.
68 |
69 | Args:
70 | dim (int): Number of input channels.
71 | window_size (tuple[int]): The height and width of the window.
72 | num_heads (int): Number of attention heads.
73 | qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
74 | qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set
75 | attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
76 | proj_drop (float, optional): Dropout ratio of output. Default: 0.0
77 | """
78 |
79 | def __init__(self, dim, window_size, num_heads, qkv_bias=True, qk_scale=None, attn_drop=0., proj_drop=0.):
80 |
81 | super().__init__()
82 | self.dim = dim
83 | self.window_size = window_size # Wh, Ww
84 | self.num_heads = num_heads
85 | head_dim = dim // num_heads
86 | self.scale = qk_scale or head_dim ** -0.5
87 |
88 | # define a parameter table of relative position bias
89 | self.relative_position_bias_table = nn.Parameter(
90 | torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads)) # 2*Wh-1 * 2*Ww-1, nH
91 |
92 | # get pair-wise relative position index for each token inside the window
93 | coords_h = torch.arange(self.window_size[0])
94 | coords_w = torch.arange(self.window_size[1])
95 | coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww
96 | coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww
97 | relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww
98 | relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2
99 | relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
100 | relative_coords[:, :, 1] += self.window_size[1] - 1
101 | relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1
102 | relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww
103 | self.register_buffer("relative_position_index", relative_position_index)
104 |
105 | self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
106 | self.attn_drop = nn.Dropout(attn_drop)
107 | self.proj = nn.Linear(dim, dim)
108 | self.proj_drop = nn.Dropout(proj_drop)
109 |
110 | trunc_normal_(self.relative_position_bias_table, std=.02)
111 | self.softmax = nn.Softmax(dim=-1)
112 |
113 | def forward(self, x, mask=None):
114 | """
115 | Args:
116 | x: input features with shape of (num_windows*B, N, C)
117 | mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
118 | """
119 | B_, N, C = x.shape
120 | qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
121 | q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple)
122 |
123 | q = q * self.scale
124 | attn = (q @ k.transpose(-2, -1))
125 |
126 | relative_position_bias = self.relative_position_bias_table[self.relative_position_index.view(-1)].view(
127 | self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1) # Wh*Ww,Wh*Ww,nH
128 | relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww
129 | attn = attn + relative_position_bias.unsqueeze(0)
130 |
131 | if mask is not None:
132 | nW = mask.shape[0]
133 | attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0)
134 | attn = attn.view(-1, self.num_heads, N, N)
135 | attn = self.softmax(attn)
136 | else:
137 | attn = self.softmax(attn)
138 |
139 | attn = self.attn_drop(attn)
140 |
141 | x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
142 | x = self.proj(x)
143 | x = self.proj_drop(x)
144 | return x
145 |
146 | def extra_repr(self) -> str:
147 | return f'dim={self.dim}, window_size={self.window_size}, num_heads={self.num_heads}'
148 |
149 | def flops(self, N):
150 | # calculate flops for 1 window with token length of N
151 | flops = 0
152 | # qkv = self.qkv(x)
153 | flops += N * self.dim * 3 * self.dim
154 | # attn = (q @ k.transpose(-2, -1))
155 | flops += self.num_heads * N * (self.dim // self.num_heads) * N
156 | # x = (attn @ v)
157 | flops += self.num_heads * N * N * (self.dim // self.num_heads)
158 | # x = self.proj(x)
159 | flops += N * self.dim * self.dim
160 | return flops
161 |
162 |
163 | class SwinTransformerBlock(nn.Module):
164 | r""" Swin Transformer Block.
165 |
166 | Args:
167 | dim (int): Number of input channels.
168 | input_resolution (tuple[int]): Input resulotion.
169 | num_heads (int): Number of attention heads.
170 | window_size (int): Window size.
171 | shift_size (int): Shift size for SW-MSA.
172 | mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
173 | qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
174 | qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
175 | drop (float, optional): Dropout rate. Default: 0.0
176 | attn_drop (float, optional): Attention dropout rate. Default: 0.0
177 | drop_path (float, optional): Stochastic depth rate. Default: 0.0
178 | act_layer (nn.Module, optional): Activation layer. Default: nn.GELU
179 | norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
180 | """
181 |
182 | def __init__(self, dim, input_resolution, num_heads, window_size=7, shift_size=0,
183 | mlp_ratio=4., qkv_bias=True, qk_scale=None, drop=0., attn_drop=0., drop_path=0.,
184 | act_layer=nn.GELU, norm_layer=nn.LayerNorm):
185 | super().__init__()
186 | self.dim = dim
187 | self.input_resolution = input_resolution
188 | self.num_heads = num_heads
189 | self.window_size = window_size
190 | self.shift_size = shift_size
191 | self.mlp_ratio = mlp_ratio
192 | if min(self.input_resolution) <= self.window_size:
193 | # if window size is larger than input resolution, we don't partition windows
194 | self.shift_size = 0
195 | self.window_size = min(self.input_resolution)
196 | assert 0 <= self.shift_size < self.window_size, "shift_size must in 0-window_size"
197 |
198 | self.norm1 = norm_layer(dim)
199 | self.attn = WindowAttention(
200 | dim, window_size=to_2tuple(self.window_size), num_heads=num_heads,
201 | qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop)
202 |
203 | self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
204 | self.norm2 = norm_layer(dim)
205 | mlp_hidden_dim = int(dim * mlp_ratio)
206 | self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop)
207 |
208 | if self.shift_size > 0:
209 | # calculate attention mask for SW-MSA
210 | H, W = self.input_resolution
211 | img_mask = torch.zeros((1, H, W, 1)) # 1 H W 1
212 | h_slices = (slice(0, -self.window_size),
213 | slice(-self.window_size, -self.shift_size),
214 | slice(-self.shift_size, None))
215 | w_slices = (slice(0, -self.window_size),
216 | slice(-self.window_size, -self.shift_size),
217 | slice(-self.shift_size, None))
218 | cnt = 0
219 | for h in h_slices:
220 | for w in w_slices:
221 | img_mask[:, h, w, :] = cnt
222 | cnt += 1
223 |
224 | mask_windows = window_partition(img_mask, self.window_size) # nW, window_size, window_size, 1
225 | mask_windows = mask_windows.view(-1, self.window_size * self.window_size)
226 | attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)
227 | attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(attn_mask == 0, float(0.0))
228 | else:
229 | attn_mask = None
230 |
231 | self.register_buffer("attn_mask", attn_mask)
232 |
233 | def forward(self, x):
234 | H, W = self.input_resolution
235 | B, L, C = x.shape
236 | assert L == H * W, "input feature has wrong size"
237 |
238 | shortcut = x
239 | x = self.norm1(x)
240 | x = x.view(B, H, W, C)
241 |
242 | # cyclic shift
243 | if self.shift_size > 0:
244 | shifted_x = torch.roll(x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2))
245 | else:
246 | shifted_x = x
247 |
248 | # partition windows
249 | x_windows = window_partition(shifted_x, self.window_size) # nW*B, window_size, window_size, C
250 | x_windows = x_windows.view(-1, self.window_size * self.window_size, C) # nW*B, window_size*window_size, C
251 |
252 | # W-MSA/SW-MSA
253 | attn_windows = self.attn(x_windows, mask=self.attn_mask) # nW*B, window_size*window_size, C
254 |
255 | # merge windows
256 | attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C)
257 | shifted_x = window_reverse(attn_windows, self.window_size, H, W) # B H' W' C
258 |
259 | # reverse cyclic shift
260 | if self.shift_size > 0:
261 | x = torch.roll(shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2))
262 | else:
263 | x = shifted_x
264 | x = x.view(B, H * W, C)
265 |
266 | # FFN
267 | x = shortcut + self.drop_path(x)
268 | x = x + self.drop_path(self.mlp(self.norm2(x)))
269 |
270 | return x
271 |
272 | def extra_repr(self) -> str:
273 | return f"dim={self.dim}, input_resolution={self.input_resolution}, num_heads={self.num_heads}, " \
274 | f"window_size={self.window_size}, shift_size={self.shift_size}, mlp_ratio={self.mlp_ratio}"
275 |
276 | def flops(self):
277 | flops = 0
278 | H, W = self.input_resolution
279 | # norm1
280 | flops += self.dim * H * W
281 | # W-MSA/SW-MSA
282 | nW = H * W / self.window_size / self.window_size
283 | flops += nW * self.attn.flops(self.window_size * self.window_size)
284 | # mlp
285 | flops += 2 * H * W * self.dim * self.dim * self.mlp_ratio
286 | # norm2
287 | flops += self.dim * H * W
288 | return flops
289 |
290 |
291 | class PatchMerging(nn.Module):
292 | r""" Patch Merging Layer.
293 |
294 | Args:
295 | input_resolution (tuple[int]): Resolution of input feature.
296 | dim (int): Number of input channels.
297 | norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
298 | """
299 |
300 | def __init__(self, input_resolution, dim, norm_layer=nn.LayerNorm):
301 | super().__init__()
302 | self.input_resolution = input_resolution
303 | self.dim = dim
304 | self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False)
305 | self.norm = norm_layer(4 * dim)
306 |
307 | def forward(self, x):
308 | """
309 | x: B, H*W, C
310 | """
311 | H, W = self.input_resolution
312 | B, L, C = x.shape
313 | assert L == H * W, "input feature has wrong size"
314 | assert H % 2 == 0 and W % 2 == 0, f"x size ({H}*{W}) are not even."
315 |
316 | x = x.view(B, H, W, C)
317 |
318 | x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C
319 | x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C
320 | x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C
321 | x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C
322 | x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C
323 | x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C
324 |
325 | x = self.norm(x)
326 | x = self.reduction(x)
327 |
328 | return x
329 |
330 | def extra_repr(self) -> str:
331 | return f"input_resolution={self.input_resolution}, dim={self.dim}"
332 |
333 | def flops(self):
334 | H, W = self.input_resolution
335 | flops = H * W * self.dim
336 | flops += (H // 2) * (W // 2) * 4 * self.dim * 2 * self.dim
337 | return flops
338 |
339 |
340 | class BasicLayer(nn.Module):
341 | """ A basic Swin Transformer layer for one stage.
342 |
343 | Args:
344 | dim (int): Number of input channels.
345 | input_resolution (tuple[int]): Input resolution.
346 | depth (int): Number of blocks.
347 | num_heads (int): Number of attention heads.
348 | window_size (int): Local window size.
349 | mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
350 | qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
351 | qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
352 | drop (float, optional): Dropout rate. Default: 0.0
353 | attn_drop (float, optional): Attention dropout rate. Default: 0.0
354 | drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0
355 | norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
356 | downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None
357 | use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False.
358 | """
359 |
360 | def __init__(self, dim, input_resolution, depth, num_heads, window_size,
361 | mlp_ratio=4., qkv_bias=True, qk_scale=None, drop=0., attn_drop=0.,
362 | drop_path=0., norm_layer=nn.LayerNorm, downsample=None, use_checkpoint=False):
363 |
364 | super().__init__()
365 | self.dim = dim
366 | self.input_resolution = input_resolution
367 | self.depth = depth
368 | self.use_checkpoint = use_checkpoint
369 |
370 | # build blocks
371 | self.blocks = nn.ModuleList([
372 | SwinTransformerBlock(dim=dim, input_resolution=input_resolution,
373 | num_heads=num_heads, window_size=window_size,
374 | shift_size=0 if (i % 2 == 0) else window_size // 2,
375 | mlp_ratio=mlp_ratio,
376 | qkv_bias=qkv_bias, qk_scale=qk_scale,
377 | drop=drop, attn_drop=attn_drop,
378 | drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path,
379 | norm_layer=norm_layer)
380 | for i in range(depth)])
381 |
382 | # patch merging layer
383 | if downsample is not None:
384 | self.downsample = downsample(input_resolution, dim=dim, norm_layer=norm_layer)
385 | else:
386 | self.downsample = None
387 |
388 | def forward(self, x):
389 | feats = []
390 | for blk in self.blocks:
391 | if self.use_checkpoint:
392 | x = checkpoint.checkpoint(blk, x)
393 | else:
394 | x = blk(x)
395 | feats.append(x.clone().detach())
396 | if self.downsample is not None:
397 | x = self.downsample(x)
398 | return feats, x
399 |
400 | def extra_repr(self) -> str:
401 | return f"dim={self.dim}, input_resolution={self.input_resolution}, depth={self.depth}"
402 |
403 | def flops(self):
404 | flops = 0
405 | for blk in self.blocks:
406 | flops += blk.flops()
407 | if self.downsample is not None:
408 | flops += self.downsample.flops()
409 | return flops
410 |
411 |
412 | class PatchEmbed(nn.Module):
413 | r""" Image to Patch Embedding
414 |
415 | Args:
416 | img_size (int): Image size. Default: 224.
417 | patch_size (int): Patch token size. Default: 4.
418 | in_chans (int): Number of input image channels. Default: 3.
419 | embed_dim (int): Number of linear projection output channels. Default: 96.
420 | norm_layer (nn.Module, optional): Normalization layer. Default: None
421 | """
422 |
423 | def __init__(self, img_size=224, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None):
424 | super().__init__()
425 | img_size = to_2tuple(img_size)
426 | patch_size = to_2tuple(patch_size)
427 | patches_resolution = [img_size[0] // patch_size[0], img_size[1] // patch_size[1]]
428 | self.img_size = img_size
429 | self.patch_size = patch_size
430 | self.patches_resolution = patches_resolution
431 | self.num_patches = patches_resolution[0] * patches_resolution[1]
432 |
433 | self.in_chans = in_chans
434 | self.embed_dim = embed_dim
435 |
436 | self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size)
437 | if norm_layer is not None:
438 | self.norm = norm_layer(embed_dim)
439 | else:
440 | self.norm = None
441 |
442 | def forward(self, x):
443 | B, C, H, W = x.shape
444 | # FIXME look at relaxing size constraints
445 | assert H == self.img_size[0] and W == self.img_size[1], \
446 | f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})."
447 | x = self.proj(x).flatten(2).transpose(1, 2) # B Ph*Pw C
448 | if self.norm is not None:
449 | x = self.norm(x)
450 | return x
451 |
452 | def flops(self):
453 | Ho, Wo = self.patches_resolution
454 | flops = Ho * Wo * self.embed_dim * self.in_chans * (self.patch_size[0] * self.patch_size[1])
455 | if self.norm is not None:
456 | flops += Ho * Wo * self.embed_dim
457 | return flops
458 |
459 |
460 | class SwinTransformer(nn.Module):
461 | r""" Swin Transformer
462 | A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` -
463 | https://arxiv.org/pdf/2103.14030
464 |
465 | Args:
466 | img_size (int | tuple(int)): Input image size. Default 224
467 | patch_size (int | tuple(int)): Patch size. Default: 4
468 | in_chans (int): Number of input image channels. Default: 3
469 | num_classes (int): Number of classes for classification head. Default: 1000
470 | embed_dim (int): Patch embedding dimension. Default: 96
471 | depths (tuple(int)): Depth of each Swin Transformer layer.
472 | num_heads (tuple(int)): Number of attention heads in different layers.
473 | window_size (int): Window size. Default: 7
474 | mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4
475 | qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True
476 | qk_scale (float): Override default qk scale of head_dim ** -0.5 if set. Default: None
477 | drop_rate (float): Dropout rate. Default: 0
478 | attn_drop_rate (float): Attention dropout rate. Default: 0
479 | drop_path_rate (float): Stochastic depth rate. Default: 0.1
480 | norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm.
481 | ape (bool): If True, add absolute position embedding to the patch embedding. Default: False
482 | patch_norm (bool): If True, add normalization after patch embedding. Default: True
483 | use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False
484 | """
485 |
486 | def __init__(self, img_size=224, patch_size=4, in_chans=3, num_classes=1000,
487 | embed_dim=96, depths=[2, 2, 6, 2], num_heads=[3, 6, 12, 24],
488 | window_size=7, mlp_ratio=4., qkv_bias=True, qk_scale=None,
489 | drop_rate=0., attn_drop_rate=0., drop_path_rate=0.1,
490 | norm_layer=nn.LayerNorm, ape=False, patch_norm=True,
491 | use_checkpoint=False, feat_ids=[1, 2, 3, 4], **kwargs):
492 | super().__init__()
493 |
494 | self.num_classes = num_classes
495 | self.num_layers = len(depths)
496 | self.embed_dim = embed_dim
497 | self.ape = ape
498 | self.patch_norm = patch_norm
499 | self.num_features = int(embed_dim * 2 ** (self.num_layers - 1))
500 | self.mlp_ratio = mlp_ratio
501 |
502 | # split image into non-overlapping patches
503 | self.patch_embed = PatchEmbed(
504 | img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim,
505 | norm_layer=norm_layer if self.patch_norm else None)
506 | num_patches = self.patch_embed.num_patches
507 | patches_resolution = self.patch_embed.patches_resolution
508 | self.patches_resolution = patches_resolution
509 |
510 | # absolute position embedding
511 | if self.ape:
512 | self.absolute_pos_embed = nn.Parameter(torch.zeros(1, num_patches, embed_dim))
513 | trunc_normal_(self.absolute_pos_embed, std=.02)
514 |
515 | self.pos_drop = nn.Dropout(p=drop_rate)
516 |
517 | # stochastic depth
518 | dpr = [x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))] # stochastic depth decay rule
519 |
520 | # build layers
521 | self.layers = nn.ModuleList()
522 | for i_layer in range(self.num_layers):
523 | layer = BasicLayer(dim=int(embed_dim * 2 ** i_layer),
524 | input_resolution=(patches_resolution[0] // (2 ** i_layer),
525 | patches_resolution[1] // (2 ** i_layer)),
526 | depth=depths[i_layer],
527 | num_heads=num_heads[i_layer],
528 | window_size=window_size,
529 | mlp_ratio=self.mlp_ratio,
530 | qkv_bias=qkv_bias, qk_scale=qk_scale,
531 | drop=drop_rate, attn_drop=attn_drop_rate,
532 | drop_path=dpr[sum(depths[:i_layer]):sum(depths[:i_layer + 1])],
533 | norm_layer=norm_layer,
534 | downsample=PatchMerging if (i_layer < self.num_layers - 1) else None,
535 | use_checkpoint=use_checkpoint)
536 | self.layers.append(layer)
537 |
538 | self.norm = norm_layer(self.num_features)
539 | self.avgpool = nn.AdaptiveAvgPool1d(1)
540 | self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()
541 | self.feat_ids = feat_ids
542 |
543 | self.apply(self._init_weights)
544 |
545 | def _init_weights(self, m):
546 | if isinstance(m, nn.Linear):
547 | trunc_normal_(m.weight, std=.02)
548 | if isinstance(m, nn.Linear) and m.bias is not None:
549 | nn.init.constant_(m.bias, 0)
550 | elif isinstance(m, nn.LayerNorm):
551 | nn.init.constant_(m.bias, 0)
552 | nn.init.constant_(m.weight, 1.0)
553 |
554 | @torch.jit.ignore
555 | def no_weight_decay(self):
556 | return {'absolute_pos_embed'}
557 |
558 | @torch.jit.ignore
559 | def no_weight_decay_keywords(self):
560 | return {'relative_position_bias_table'}
561 |
562 | def forward_features(self, x):
563 | x = self.patch_embed(x)
564 | if self.ape:
565 | x = x + self.absolute_pos_embed
566 | x = self.pos_drop(x)
567 |
568 | self.feat_maps = []
569 | for i, layer in enumerate(self.layers):
570 | feats, x = layer(x)
571 | if i+1 in self.feat_ids:
572 | self.feat_maps += feats
573 |
574 | x = self.norm(x) # B L C
575 | x = self.avgpool(x.transpose(1, 2)) # B C 1
576 | x = torch.flatten(x, 1)
577 | return x
578 |
579 | def forward(self, x):
580 | x = self.forward_features(x)
581 | x = self.head(x)
582 | return x
583 |
584 | def flops(self):
585 | flops = 0
586 | flops += self.patch_embed.flops()
587 | for i, layer in enumerate(self.layers):
588 | flops += layer.flops()
589 | flops += self.num_features * self.patches_resolution[0] * self.patches_resolution[1] // (2 ** self.num_layers)
590 | flops += self.num_features * self.num_classes
591 | return flops
592 |
593 |
594 | if __name__ == '__main__':
595 | input = torch.randn(2, 3, 384, 384).cuda()
596 |
597 | net = SwinTransformer(img_size=384, patch_size=4, window_size=12, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32))
598 | net.load_state_dict(torch.load("/apdcephfs/share_1290796/shixinyu/checkpoints/swin_base_patch4_window12_384_22kto1k.pth")['model'])
599 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
600 | net.to(device)
601 |
602 | out = net.forward_features(input)
603 | feat = net.feat_maps
604 | for x in feat:
605 | print(x.shape)
606 |
--------------------------------------------------------------------------------
/data/splits/pascal/trn/fold3.txt:
--------------------------------------------------------------------------------
1 | 2007_001149__16
2 | 2007_001420__16
3 | 2007_002361__16
4 | 2007_002967__16
5 | 2007_003189__16
6 | 2007_003778__16
7 | 2007_004081__16
8 | 2007_004707__16
9 | 2007_004948__16
10 | 2007_006303__16
11 | 2007_006605__16
12 | 2007_007890__16
13 | 2007_008043__16
14 | 2007_008140__16
15 | 2007_008821__16
16 | 2007_009630__16
17 | 2008_000188__16
18 | 2008_000196__16
19 | 2008_000274__16
20 | 2008_000287__16
21 | 2008_000491__16
22 | 2008_000564__16
23 | 2008_000790__16
24 | 2008_000841__16
25 | 2008_000857__16
26 | 2008_000916__16
27 | 2008_000923__16
28 | 2008_000960__16
29 | 2008_001133__16
30 | 2008_001451__16
31 | 2008_001460__16
32 | 2008_001467__16
33 | 2008_001784__16
34 | 2008_001862__16
35 | 2008_001865__16
36 | 2008_002026__16
37 | 2008_002191__16
38 | 2008_002317__16
39 | 2008_002653__16
40 | 2008_003225__16
41 | 2008_003320__16
42 | 2008_003434__16
43 | 2008_003466__16
44 | 2008_003523__16
45 | 2008_003547__16
46 | 2008_003636__16
47 | 2008_003665__16
48 | 2008_003726__16
49 | 2008_003767__16
50 | 2008_003768__16
51 | 2008_003815__16
52 | 2008_003838__16
53 | 2008_003978__16
54 | 2008_004002__16
55 | 2008_004003__16
56 | 2008_004092__16
57 | 2008_004171__16
58 | 2008_004380__16
59 | 2008_004428__16
60 | 2008_004435__16
61 | 2008_004497__16
62 | 2008_004615__16
63 | 2008_004619__16
64 | 2008_004634__16
65 | 2008_004661__16
66 | 2008_004756__16
67 | 2008_004977__16
68 | 2008_005001__16
69 | 2008_005040__16
70 | 2008_005042__16
71 | 2008_005111__16
72 | 2008_005146__16
73 | 2008_005214__16
74 | 2008_005345__16
75 | 2008_005417__16
76 | 2008_005501__16
77 | 2008_005511__16
78 | 2008_005519__16
79 | 2008_005608__16
80 | 2008_005794__16
81 | 2008_005798__16
82 | 2008_005847__16
83 | 2008_005874__16
84 | 2008_005897__16
85 | 2008_005914__16
86 | 2008_005954__16
87 | 2008_006068__16
88 | 2008_006112__16
89 | 2008_006203__16
90 | 2008_006207__16
91 | 2008_006262__16
92 | 2008_006295__16
93 | 2008_006337__16
94 | 2008_006441__16
95 | 2008_006524__16
96 | 2008_006534__16
97 | 2008_006543__16
98 | 2008_006562__16
99 | 2008_006751__16
100 | 2008_006796__16
101 | 2008_006807__16
102 | 2008_006816__16
103 | 2008_006828__16
104 | 2008_006864__16
105 | 2008_006881__16
106 | 2008_006950__16
107 | 2008_007042__16
108 | 2008_007108__16
109 | 2008_007223__16
110 | 2008_007226__16
111 | 2008_007281__16
112 | 2008_007388__16
113 | 2008_007461__16
114 | 2008_007525__16
115 | 2008_007621__16
116 | 2008_007701__16
117 | 2008_007823__16
118 | 2008_007831__16
119 | 2008_007835__16
120 | 2008_007973__16
121 | 2008_007977__16
122 | 2008_008024__16
123 | 2008_008070__16
124 | 2008_008096__16
125 | 2008_008184__16
126 | 2008_008208__16
127 | 2008_008235__16
128 | 2008_008237__16
129 | 2008_008310__16
130 | 2008_008330__16
131 | 2008_008331__16
132 | 2008_008341__16
133 | 2008_008363__16
134 | 2008_008517__16
135 | 2008_008531__16
136 | 2008_008608__16
137 | 2008_008621__16
138 | 2008_008641__16
139 | 2008_008689__16
140 | 2009_000158__16
141 | 2009_000198__16
142 | 2009_000297__16
143 | 2009_000419__16
144 | 2009_000526__16
145 | 2009_000590__16
146 | 2009_000624__16
147 | 2009_000635__16
148 | 2009_000760__16
149 | 2009_000867__16
150 | 2009_000926__16
151 | 2009_001085__16
152 | 2009_001100__16
153 | 2009_001137__16
154 | 2009_001229__16
155 | 2009_001249__16
156 | 2009_001417__16
157 | 2009_001440__16
158 | 2009_001514__16
159 | 2009_001627__16
160 | 2009_001667__16
161 | 2009_001704__16
162 | 2009_001806__16
163 | 2009_001864__16
164 | 2009_001888__16
165 | 2009_001922__16
166 | 2009_001934__16
167 | 2009_001975__16
168 | 2009_002088__16
169 | 2009_002123__16
170 | 2009_002386__16
171 | 2009_002433__16
172 | 2009_002444__16
173 | 2009_002628__16
174 | 2009_002670__16
175 | 2009_002688__16
176 | 2009_002698__16
177 | 2009_002741__16
178 | 2009_002755__16
179 | 2009_002817__16
180 | 2009_002935__16
181 | 2009_002952__16
182 | 2009_003039__16
183 | 2009_003074__16
184 | 2009_003077__16
185 | 2009_003238__16
186 | 2009_003288__16
187 | 2009_003301__16
188 | 2009_003351__16
189 | 2009_003384__16
190 | 2009_003440__16
191 | 2009_003476__16
192 | 2009_003642__16
193 | 2009_003654__16
194 | 2009_003677__16
195 | 2009_003679__16
196 | 2009_003697__16
197 | 2009_003815__16
198 | 2009_003888__16
199 | 2009_003929__16
200 | 2009_004025__16
201 | 2009_004050__16
202 | 2009_004051__16
203 | 2009_004093__16
204 | 2009_004191__16
205 | 2009_004263__16
206 | 2009_004274__16
207 | 2009_004283__16
208 | 2009_004394__16
209 | 2009_004404__16
210 | 2009_004426__16
211 | 2009_004438__16
212 | 2009_004514__16
213 | 2009_004532__16
214 | 2009_004537__16
215 | 2009_004554__16
216 | 2009_004631__16
217 | 2009_004642__16
218 | 2009_004645__16
219 | 2009_004745__16
220 | 2009_004794__16
221 | 2009_004869__16
222 | 2009_004885__16
223 | 2009_004901__16
224 | 2009_004919__16
225 | 2009_004983__16
226 | 2009_004990__16
227 | 2009_005008__16
228 | 2009_005069__16
229 | 2009_005070__16
230 | 2009_005165__16
231 | 2009_005170__16
232 | 2009_005278__16
233 | 2009_005286__16
234 | 2009_005310__16
235 | 2010_000015__16
236 | 2010_000027__16
237 | 2010_000132__16
238 | 2010_000202__16
239 | 2010_000399__16
240 | 2010_000470__16
241 | 2010_000567__16
242 | 2010_000601__16
243 | 2010_000669__16
244 | 2010_000737__16
245 | 2010_000773__16
246 | 2010_000800__16
247 | 2010_000871__16
248 | 2010_000876__16
249 | 2010_000973__16
250 | 2010_001111__16
251 | 2010_001134__16
252 | 2010_001219__16
253 | 2010_001310__16
254 | 2010_001479__16
255 | 2010_001544__16
256 | 2010_001717__16
257 | 2010_001743__16
258 | 2010_001787__16
259 | 2010_001843__16
260 | 2010_001864__16
261 | 2010_001940__16
262 | 2010_002193__16
263 | 2010_002195__16
264 | 2010_002316__16
265 | 2010_002366__16
266 | 2010_002379__16
267 | 2010_002462__16
268 | 2010_002537__16
269 | 2010_002605__16
270 | 2010_002661__16
271 | 2010_002676__16
272 | 2010_002742__16
273 | 2010_002830__16
274 | 2010_002982__16
275 | 2010_003017__16
276 | 2010_003101__16
277 | 2010_003103__16
278 | 2010_003203__16
279 | 2010_003218__16
280 | 2010_003390__16
281 | 2010_003556__16
282 | 2010_003651__16
283 | 2010_003667__16
284 | 2010_003674__16
285 | 2010_003728__16
286 | 2010_003729__16
287 | 2010_003910__16
288 | 2010_004062__16
289 | 2010_004072__16
290 | 2010_004224__16
291 | 2010_004358__16
292 | 2010_004466__16
293 | 2010_004683__16
294 | 2010_004908__16
295 | 2010_004910__16
296 | 2010_004944__16
297 | 2010_004974__16
298 | 2010_004982__16
299 | 2010_005158__16
300 | 2010_005274__16
301 | 2010_005279__16
302 | 2010_005455__16
303 | 2010_005536__16
304 | 2010_005593__16
305 | 2010_005758__16
306 | 2010_005830__16
307 | 2010_005930__16
308 | 2010_005932__16
309 | 2010_005975__16
310 | 2011_000072__16
311 | 2011_000145__16
312 | 2011_000196__16
313 | 2011_000361__16
314 | 2011_000388__16
315 | 2011_000468__16
316 | 2011_000514__16
317 | 2011_000530__16
318 | 2011_000572__16
319 | 2011_000731__16
320 | 2011_000743__16
321 | 2011_000823__16
322 | 2011_000875__16
323 | 2011_000885__16
324 | 2011_000919__16
325 | 2011_000934__16
326 | 2011_000957__16
327 | 2011_001009__16
328 | 2011_001011__16
329 | 2011_001022__16
330 | 2011_001034__16
331 | 2011_001055__16
332 | 2011_001221__16
333 | 2011_001226__16
334 | 2011_001360__16
335 | 2011_001369__16
336 | 2011_001382__16
337 | 2011_001440__16
338 | 2011_001456__16
339 | 2011_001600__16
340 | 2011_001611__16
341 | 2011_001689__16
342 | 2011_001766__16
343 | 2011_001820__16
344 | 2011_001845__16
345 | 2011_001946__16
346 | 2011_002022__16
347 | 2011_002031__16
348 | 2011_002318__16
349 | 2011_002386__16
350 | 2011_002443__16
351 | 2011_002614__16
352 | 2011_002808__16
353 | 2011_002810__16
354 | 2011_002924__16
355 | 2011_002978__16
356 | 2011_003002__16
357 | 2011_003047__16
358 | 2011_003162__16
359 | 2007_001416__17
360 | 2007_001872__17
361 | 2007_002845__17
362 | 2007_003190__17
363 | 2007_003593__17
364 | 2007_004423__17
365 | 2007_004768__17
366 | 2007_006136__17
367 | 2007_006832__17
368 | 2007_006899__17
369 | 2007_006944__17
370 | 2007_007048__17
371 | 2007_007230__17
372 | 2007_007621__17
373 | 2008_000084__17
374 | 2008_000099__17
375 | 2008_000669__17
376 | 2008_001601__17
377 | 2008_002061__17
378 | 2008_002150__17
379 | 2008_002343__17
380 | 2008_002430__17
381 | 2008_003147__17
382 | 2008_004007__17
383 | 2008_004629__17
384 | 2008_005447__17
385 | 2008_005494__17
386 | 2008_005505__17
387 | 2008_005635__17
388 | 2008_005706__17
389 | 2008_005736__17
390 | 2008_005938__17
391 | 2008_005987__17
392 | 2008_006059__17
393 | 2008_006070__17
394 | 2008_006100__17
395 | 2008_006221__17
396 | 2008_006339__17
397 | 2008_006477__17
398 | 2008_006570__17
399 | 2008_006892__17
400 | 2008_006939__17
401 | 2008_007069__17
402 | 2008_007070__17
403 | 2008_007245__17
404 | 2008_007334__17
405 | 2008_007430__17
406 | 2008_007693__17
407 | 2008_007806__17
408 | 2008_007890__17
409 | 2008_007909__17
410 | 2008_007985__17
411 | 2008_008109__17
412 | 2008_008319__17
413 | 2008_008322__17
414 | 2008_008323__17
415 | 2008_008601__17
416 | 2008_008613__17
417 | 2008_008623__17
418 | 2008_008665__17
419 | 2008_008666__17
420 | 2008_008714__17
421 | 2008_008744__17
422 | 2009_000168__17
423 | 2009_000223__17
424 | 2009_000289__17
425 | 2009_000356__17
426 | 2009_000670__17
427 | 2009_000725__17
428 | 2009_000750__17
429 | 2009_000837__17
430 | 2009_001172__17
431 | 2009_001177__17
432 | 2009_001203__17
433 | 2009_001236__17
434 | 2009_001263__17
435 | 2009_001349__17
436 | 2009_001403__17
437 | 2009_001537__17
438 | 2009_001618__17
439 | 2009_001643__17
440 | 2009_001699__17
441 | 2009_001732__17
442 | 2009_001738__17
443 | 2009_001783__17
444 | 2009_001959__17
445 | 2009_002133__17
446 | 2009_002245__17
447 | 2009_002282__17
448 | 2009_002391__17
449 | 2009_002719__17
450 | 2009_002921__17
451 | 2009_002988__17
452 | 2009_003076__17
453 | 2009_003249__17
454 | 2009_003254__17
455 | 2009_003271__17
456 | 2009_003425__17
457 | 2009_003430__17
458 | 2009_003460__17
459 | 2009_003541__17
460 | 2009_003618__17
461 | 2009_003624__17
462 | 2009_003784__17
463 | 2009_004164__17
464 | 2009_004171__17
465 | 2009_004181__17
466 | 2009_004222__17
467 | 2009_004513__17
468 | 2009_004547__17
469 | 2009_004706__17
470 | 2009_004768__17
471 | 2009_004805__17
472 | 2009_004834__17
473 | 2009_004943__17
474 | 2009_004945__17
475 | 2009_005005__17
476 | 2009_005193__17
477 | 2010_000002__17
478 | 2010_000052__17
479 | 2010_000089__17
480 | 2010_000117__17
481 | 2010_000139__17
482 | 2010_000189__17
483 | 2010_000190__17
484 | 2010_000307__17
485 | 2010_000327__17
486 | 2010_000390__17
487 | 2010_000436__17
488 | 2010_000483__17
489 | 2010_000527__17
490 | 2010_000562__17
491 | 2010_000641__17
492 | 2010_000667__17
493 | 2010_000727__17
494 | 2010_000735__17
495 | 2010_000822__17
496 | 2010_000831__17
497 | 2010_000847__17
498 | 2010_000866__17
499 | 2010_000920__17
500 | 2010_000970__17
501 | 2010_000978__17
502 | 2010_001076__17
503 | 2010_001082__17
504 | 2010_001160__17
505 | 2010_001175__17
506 | 2010_001245__17
507 | 2010_001257__17
508 | 2010_001286__17
509 | 2010_001356__17
510 | 2010_001607__17
511 | 2010_001746__17
512 | 2010_001796__17
513 | 2010_001881__17
514 | 2010_001987__17
515 | 2010_002039__17
516 | 2010_002041__17
517 | 2010_002058__17
518 | 2010_002113__17
519 | 2010_002124__17
520 | 2010_002130__17
521 | 2010_002176__17
522 | 2010_002215__17
523 | 2010_002294__17
524 | 2010_002346__17
525 | 2010_002353__17
526 | 2010_002378__17
527 | 2010_002501__17
528 | 2010_002507__17
529 | 2010_002518__17
530 | 2010_002582__17
531 | 2010_002624__17
532 | 2010_002628__17
533 | 2010_002665__17
534 | 2010_002705__17
535 | 2010_002736__17
536 | 2010_002737__17
537 | 2010_002821__17
538 | 2010_003013__17
539 | 2010_003028__17
540 | 2010_003074__17
541 | 2010_003094__17
542 | 2010_003102__17
543 | 2010_003153__17
544 | 2010_003253__17
545 | 2010_003343__17
546 | 2010_003372__17
547 | 2010_003376__17
548 | 2010_003429__17
549 | 2010_003491__17
550 | 2010_003567__17
551 | 2010_003725__17
552 | 2010_003742__17
553 | 2010_003754__17
554 | 2010_003761__17
555 | 2010_003774__17
556 | 2010_003792__17
557 | 2010_003806__17
558 | 2010_003865__17
559 | 2010_003919__17
560 | 2010_003939__17
561 | 2010_003954__17
562 | 2010_003996__17
563 | 2010_004061__17
564 | 2010_004065__17
565 | 2010_004067__17
566 | 2010_004074__17
567 | 2010_004089__17
568 | 2010_004105__17
569 | 2010_004188__17
570 | 2010_004225__17
571 | 2010_004259__17
572 | 2010_004332__17
573 | 2010_004428__17
574 | 2010_004431__17
575 | 2010_004436__17
576 | 2010_004499__17
577 | 2010_004514__17
578 | 2010_004560__17
579 | 2010_004629__17
580 | 2010_004659__17
581 | 2010_004694__17
582 | 2010_004704__17
583 | 2010_004710__17
584 | 2010_004812__17
585 | 2010_004868__17
586 | 2010_005002__17
587 | 2010_005026__17
588 | 2010_005066__17
589 | 2010_005098__17
590 | 2010_005120__17
591 | 2010_005183__17
592 | 2010_005260__17
593 | 2010_005285__17
594 | 2010_005310__17
595 | 2010_005385__17
596 | 2010_005416__17
597 | 2010_005466__17
598 | 2010_005514__17
599 | 2010_005519__17
600 | 2010_005566__17
601 | 2010_005567__17
602 | 2010_005601__17
603 | 2010_005635__17
604 | 2010_005688__17
605 | 2010_005736__17
606 | 2010_005740__17
607 | 2010_005767__17
608 | 2010_005986__17
609 | 2011_000102__17
610 | 2011_000332__17
611 | 2011_000404__17
612 | 2011_000450__17
613 | 2011_000454__17
614 | 2011_000641__17
615 | 2011_000759__17
616 | 2011_000829__17
617 | 2011_000834__17
618 | 2011_001240__17
619 | 2011_001246__17
620 | 2011_001329__17
621 | 2011_001373__17
622 | 2011_001549__17
623 | 2011_001757__17
624 | 2011_001822__17
625 | 2011_001986__17
626 | 2011_002027__17
627 | 2011_002119__17
628 | 2011_002169__17
629 | 2011_002447__17
630 | 2011_002464__17
631 | 2011_002553__17
632 | 2011_002571__17
633 | 2011_002817__17
634 | 2011_003023__17
635 | 2011_003223__17
636 | 2007_000584__18
637 | 2007_001027__18
638 | 2007_001149__18
639 | 2007_001901__18
640 | 2007_002055__18
641 | 2007_002368__18
642 | 2007_002545__18
643 | 2007_003451__18
644 | 2007_004166__18
645 | 2007_005212__18
646 | 2007_005266__18
647 | 2007_005647__18
648 | 2007_006066__18
649 | 2007_006530__18
650 | 2007_008203__18
651 | 2007_008468__18
652 | 2007_008821__18
653 | 2007_009435__18
654 | 2007_009554__18
655 | 2008_000093__18
656 | 2008_000128__18
657 | 2008_000321__18
658 | 2008_000419__18
659 | 2008_000421__18
660 | 2008_000465__18
661 | 2008_000493__18
662 | 2008_000541__18
663 | 2008_000636__18
664 | 2008_000648__18
665 | 2008_000704__18
666 | 2008_000857__18
667 | 2008_001030__18
668 | 2008_001092__18
669 | 2008_001133__18
670 | 2008_001238__18
671 | 2008_001333__18
672 | 2008_001366__18
673 | 2008_001390__18
674 | 2008_001399__18
675 | 2008_001461__18
676 | 2008_001589__18
677 | 2008_001660__18
678 | 2008_001694__18
679 | 2008_001781__18
680 | 2008_001787__18
681 | 2008_001838__18
682 | 2008_001869__18
683 | 2008_001896__18
684 | 2008_002082__18
685 | 2008_002092__18
686 | 2008_002119__18
687 | 2008_002434__18
688 | 2008_002508__18
689 | 2008_002533__18
690 | 2008_002776__18
691 | 2008_002801__18
692 | 2008_002916__18
693 | 2008_002920__18
694 | 2008_002922__18
695 | 2008_002948__18
696 | 2008_003271__18
697 | 2008_003393__18
698 | 2008_003562__18
699 | 2008_003607__18
700 | 2008_003814__18
701 | 2008_004269__18
702 | 2008_004271__18
703 | 2008_004321__18
704 | 2008_004416__18
705 | 2008_004435__18
706 | 2008_004492__18
707 | 2008_004497__18
708 | 2008_004632__18
709 | 2008_004661__18
710 | 2008_004670__18
711 | 2008_004697__18
712 | 2008_004774__18
713 | 2008_004881__18
714 | 2008_004887__18
715 | 2008_004938__18
716 | 2008_004964__18
717 | 2008_005090__18
718 | 2008_005323__18
719 | 2008_005395__18
720 | 2008_005444__18
721 | 2008_005623__18
722 | 2008_005627__18
723 | 2008_005788__18
724 | 2008_005850__18
725 | 2008_005882__18
726 | 2008_005926__18
727 | 2008_006038__18
728 | 2008_006117__18
729 | 2008_006276__18
730 | 2008_006370__18
731 | 2008_006389__18
732 | 2008_006436__18
733 | 2008_006616__18
734 | 2008_006665__18
735 | 2008_006737__18
736 | 2008_006773__18
737 | 2008_006843__18
738 | 2008_006868__18
739 | 2008_006979__18
740 | 2008_007021__18
741 | 2008_007043__18
742 | 2008_007050__18
743 | 2008_007169__18
744 | 2008_007182__18
745 | 2008_007218__18
746 | 2008_007282__18
747 | 2008_007285__18
748 | 2008_007511__18
749 | 2008_007682__18
750 | 2008_007733__18
751 | 2008_007837__18
752 | 2008_008029__18
753 | 2008_008106__18
754 | 2008_008162__18
755 | 2008_008190__18
756 | 2008_008206__18
757 | 2008_008271__18
758 | 2008_008276__18
759 | 2008_008313__18
760 | 2008_008410__18
761 | 2008_008433__18
762 | 2008_008470__18
763 | 2008_008517__18
764 | 2008_008522__18
765 | 2008_008526__18
766 | 2008_008538__18
767 | 2008_008550__18
768 | 2008_008554__18
769 | 2008_008560__18
770 | 2008_008567__18
771 | 2008_008574__18
772 | 2008_008578__18
773 | 2008_008588__18
774 | 2008_008590__18
775 | 2008_008606__18
776 | 2008_008608__18
777 | 2008_008621__18
778 | 2008_008622__18
779 | 2008_008628__18
780 | 2008_008642__18
781 | 2008_008649__18
782 | 2008_008658__18
783 | 2008_008772__18
784 | 2009_000014__18
785 | 2009_000016__18
786 | 2009_000142__18
787 | 2009_000189__18
788 | 2009_000217__18
789 | 2009_000251__18
790 | 2009_000300__18
791 | 2009_000316__18
792 | 2009_000342__18
793 | 2009_000375__18
794 | 2009_000379__18
795 | 2009_000416__18
796 | 2009_000422__18
797 | 2009_000449__18
798 | 2009_000474__18
799 | 2009_000505__18
800 | 2009_000563__18
801 | 2009_000577__18
802 | 2009_000615__18
803 | 2009_000653__18
804 | 2009_000672__18
805 | 2009_000674__18
806 | 2009_000779__18
807 | 2009_000925__18
808 | 2009_000926__18
809 | 2009_000937__18
810 | 2009_000939__18
811 | 2009_000973__18
812 | 2009_000995__18
813 | 2009_001021__18
814 | 2009_001081__18
815 | 2009_001107__18
816 | 2009_001146__18
817 | 2009_001190__18
818 | 2009_001212__18
819 | 2009_001241__18
820 | 2009_001243__18
821 | 2009_001249__18
822 | 2009_001268__18
823 | 2009_001313__18
824 | 2009_001343__18
825 | 2009_001357__18
826 | 2009_001376__18
827 | 2009_001437__18
828 | 2009_001440__18
829 | 2009_001446__18
830 | 2009_001470__18
831 | 2009_001577__18
832 | 2009_001581__18
833 | 2009_001605__18
834 | 2009_001608__18
835 | 2009_001631__18
836 | 2009_001719__18
837 | 2009_001743__18
838 | 2009_001746__18
839 | 2009_001774__18
840 | 2009_001871__18
841 | 2009_001874__18
842 | 2009_001888__18
843 | 2009_001906__18
844 | 2009_001908__18
845 | 2009_001961__18
846 | 2009_001980__18
847 | 2009_002083__18
848 | 2009_002192__18
849 | 2009_002208__18
850 | 2009_002253__18
851 | 2009_002325__18
852 | 2009_002370__18
853 | 2009_002408__18
854 | 2009_002522__18
855 | 2009_002523__18
856 | 2009_002558__18
857 | 2009_002611__18
858 | 2009_002612__18
859 | 2009_002663__18
860 | 2009_002673__18
861 | 2009_002681__18
862 | 2009_002683__18
863 | 2009_002713__18
864 | 2009_002717__18
865 | 2009_002893__18
866 | 2009_002972__18
867 | 2009_002998__18
868 | 2009_003087__18
869 | 2009_003129__18
870 | 2009_003156__18
871 | 2009_003208__18
872 | 2009_003373__18
873 | 2009_003377__18
874 | 2009_003394__18
875 | 2009_003409__18
876 | 2009_003441__18
877 | 2009_003459__18
878 | 2009_003581__18
879 | 2009_003605__18
880 | 2009_003613__18
881 | 2009_003642__18
882 | 2009_003646__18
883 | 2009_003656__18
884 | 2009_003671__18
885 | 2009_003695__18
886 | 2009_003711__18
887 | 2009_003785__18
888 | 2009_003795__18
889 | 2009_003819__18
890 | 2009_003835__18
891 | 2009_003843__18
892 | 2009_003848__18
893 | 2009_003965__18
894 | 2009_003966__18
895 | 2009_003995__18
896 | 2009_004073__18
897 | 2009_004076__18
898 | 2009_004088__18
899 | 2009_004091__18
900 | 2009_004161__18
901 | 2009_004165__18
902 | 2009_004177__18
903 | 2009_004180__18
904 | 2009_004188__18
905 | 2009_004193__18
906 | 2009_004264__18
907 | 2009_004283__18
908 | 2009_004291__18
909 | 2009_004301__18
910 | 2009_004322__18
911 | 2009_004419__18
912 | 2009_004426__18
913 | 2009_004449__18
914 | 2009_004452__18
915 | 2009_004456__18
916 | 2009_004457__18
917 | 2009_004464__18
918 | 2009_004560__18
919 | 2009_004582__18
920 | 2009_004588__18
921 | 2009_004593__18
922 | 2009_004674__18
923 | 2009_004701__18
924 | 2009_004718__18
925 | 2009_004782__18
926 | 2009_004823__18
927 | 2009_004839__18
928 | 2009_004901__18
929 | 2009_004983__18
930 | 2009_005070__18
931 | 2009_005240__18
932 | 2009_005299__18
933 | 2010_000018__18
934 | 2010_000097__18
935 | 2010_000329__18
936 | 2010_000344__18
937 | 2010_000588__18
938 | 2010_000671__18
939 | 2010_000691__18
940 | 2010_000694__18
941 | 2010_000821__18
942 | 2010_000830__18
943 | 2010_000922__18
944 | 2010_000968__18
945 | 2010_001051__18
946 | 2010_001066__18
947 | 2010_001111__18
948 | 2010_001127__18
949 | 2010_001148__18
950 | 2010_001189__18
951 | 2010_001277__18
952 | 2010_001287__18
953 | 2010_001434__18
954 | 2010_001514__18
955 | 2010_001547__18
956 | 2010_001586__18
957 | 2010_001636__18
958 | 2010_001743__18
959 | 2010_001763__18
960 | 2010_001933__18
961 | 2010_002015__18
962 | 2010_002045__18
963 | 2010_002191__18
964 | 2010_002193__18
965 | 2010_002312__18
966 | 2010_002337__18
967 | 2010_002461__18
968 | 2010_002527__18
969 | 2010_002659__18
970 | 2010_002710__18
971 | 2010_002811__18
972 | 2010_002817__18
973 | 2010_002860__18
974 | 2010_002962__18
975 | 2010_003027__18
976 | 2010_003114__18
977 | 2010_003143__18
978 | 2010_003149__18
979 | 2010_003169__18
980 | 2010_003174__18
981 | 2010_003203__18
982 | 2010_003278__18
983 | 2010_003305__18
984 | 2010_003331__18
985 | 2010_003401__18
986 | 2010_003451__18
987 | 2010_003556__18
988 | 2010_003613__18
989 | 2010_003717__18
990 | 2010_003804__18
991 | 2010_003822__18
992 | 2010_003861__18
993 | 2010_003864__18
994 | 2010_004025__18
995 | 2010_004043__18
996 | 2010_004062__18
997 | 2010_004095__18
998 | 2010_004109__18
999 | 2010_004111__18
1000 | 2010_004125__18
1001 | 2010_004358__18
1002 | 2010_004409__18
1003 | 2010_004447__18
1004 | 2010_004481__18
1005 | 2010_004741__18
1006 | 2010_004765__18
1007 | 2010_004805__18
1008 | 2010_005049__18
1009 | 2010_005054__18
1010 | 2010_005170__18
1011 | 2010_005193__18
1012 | 2010_005388__18
1013 | 2010_005398__18
1014 | 2010_005532__18
1015 | 2010_005610__18
1016 | 2010_005614__18
1017 | 2010_005681__18
1018 | 2010_005692__18
1019 | 2010_005734__18
1020 | 2010_005770__18
1021 | 2010_005830__18
1022 | 2010_005898__18
1023 | 2010_005937__18
1024 | 2010_005980__18
1025 | 2010_006056__18
1026 | 2010_006073__18
1027 | 2011_000006__18
1028 | 2011_000037__18
1029 | 2011_000082__18
1030 | 2011_000122__18
1031 | 2011_000142__18
1032 | 2011_000146__18
1033 | 2011_000182__18
1034 | 2011_000224__18
1035 | 2011_000304__18
1036 | 2011_000364__18
1037 | 2011_000379__18
1038 | 2011_000386__18
1039 | 2011_000399__18
1040 | 2011_000434__18
1041 | 2011_000457__18
1042 | 2011_000475__18
1043 | 2011_000477__18
1044 | 2011_000499__18
1045 | 2011_000550__18
1046 | 2011_000565__18
1047 | 2011_000572__18
1048 | 2011_000608__18
1049 | 2011_000630__18
1050 | 2011_000646__18
1051 | 2011_000657__18
1052 | 2011_000689__18
1053 | 2011_000765__18
1054 | 2011_000820__18
1055 | 2011_000947__18
1056 | 2011_001027__18
1057 | 2011_001031__18
1058 | 2011_001167__18
1059 | 2011_001175__18
1060 | 2011_001192__18
1061 | 2011_001198__18
1062 | 2011_001215__18
1063 | 2011_001283__18
1064 | 2011_001304__18
1065 | 2011_001330__18
1066 | 2011_001402__18
1067 | 2011_001404__18
1068 | 2011_001412__18
1069 | 2011_001440__18
1070 | 2011_001451__18
1071 | 2011_001518__18
1072 | 2011_001531__18
1073 | 2011_001547__18
1074 | 2011_001600__18
1075 | 2011_001662__18
1076 | 2011_001691__18
1077 | 2011_001733__18
1078 | 2011_001739__18
1079 | 2011_001751__18
1080 | 2011_001811__18
1081 | 2011_001820__18
1082 | 2011_001845__18
1083 | 2011_001856__18
1084 | 2011_001895__18
1085 | 2011_001914__18
1086 | 2011_001922__18
1087 | 2011_001932__18
1088 | 2011_001974__18
1089 | 2011_001977__18
1090 | 2011_001980__18
1091 | 2011_002109__18
1092 | 2011_002184__18
1093 | 2011_002186__18
1094 | 2011_002268__18
1095 | 2011_002291__18
1096 | 2011_002335__18
1097 | 2011_002359__18
1098 | 2011_002395__18
1099 | 2011_002414__18
1100 | 2011_002507__18
1101 | 2011_002554__18
1102 | 2011_002561__18
1103 | 2011_002594__18
1104 | 2011_002714__18
1105 | 2011_002726__18
1106 | 2011_002752__18
1107 | 2011_002756__18
1108 | 2011_002775__18
1109 | 2011_002784__18
1110 | 2011_002810__18
1111 | 2011_002814__18
1112 | 2011_002834__18
1113 | 2011_002852__18
1114 | 2011_002953__18
1115 | 2011_002965__18
1116 | 2011_003038__18
1117 | 2011_003039__18
1118 | 2011_003044__18
1119 | 2011_003049__18
1120 | 2011_003188__18
1121 | 2011_003201__18
1122 | 2011_003212__18
1123 | 2007_000333__19
1124 | 2007_002462__19
1125 | 2007_003178__19
1126 | 2007_003286__19
1127 | 2007_004627__19
1128 | 2007_004663__19
1129 | 2007_004951__19
1130 | 2007_005360__19
1131 | 2007_006254__19
1132 | 2007_006400__19
1133 | 2007_006803__19
1134 | 2007_007387__19
1135 | 2007_007726__19
1136 | 2007_007947__19
1137 | 2007_009436__19
1138 | 2007_009580__19
1139 | 2007_009597__19
1140 | 2007_009950__19
1141 | 2008_000003__19
1142 | 2008_000045__19
1143 | 2008_000343__19
1144 | 2008_000373__19
1145 | 2008_000470__19
1146 | 2008_000916__19
1147 | 2008_001105__19
1148 | 2008_001114__19
1149 | 2008_001118__19
1150 | 2008_001164__19
1151 | 2008_001169__19
1152 | 2008_001358__19
1153 | 2008_001625__19
1154 | 2008_001710__19
1155 | 2008_001850__19
1156 | 2008_001866__19
1157 | 2008_001905__19
1158 | 2008_001926__19
1159 | 2008_001956__19
1160 | 2008_002158__19
1161 | 2008_002193__19
1162 | 2008_002222__19
1163 | 2008_002279__19
1164 | 2008_002325__19
1165 | 2008_002344__19
1166 | 2008_002452__19
1167 | 2008_002457__19
1168 | 2008_002465__19
1169 | 2008_002965__19
1170 | 2008_003025__19
1171 | 2008_003068__19
1172 | 2008_003083__19
1173 | 2008_003263__19
1174 | 2008_003414__19
1175 | 2008_003571__19
1176 | 2008_003578__19
1177 | 2008_003826__19
1178 | 2008_003992__19
1179 | 2008_004110__19
1180 | 2008_004214__19
1181 | 2008_004235__19
1182 | 2008_004357__19
1183 | 2008_004358__19
1184 | 2008_004547__19
1185 | 2008_004663__19
1186 | 2008_004770__19
1187 | 2008_004852__19
1188 | 2008_004869__19
1189 | 2008_004946__19
1190 | 2008_005085__19
1191 | 2008_005185__19
1192 | 2008_005269__19
1193 | 2008_005282__19
1194 | 2008_005354__19
1195 | 2008_005446__19
1196 | 2008_005653__19
1197 | 2008_005742__19
1198 | 2008_005763__19
1199 | 2008_005801__19
1200 | 2008_005825__19
1201 | 2008_005968__19
1202 | 2008_006010__19
1203 | 2008_006158__19
1204 | 2008_006365__19
1205 | 2008_006368__19
1206 | 2008_006655__19
1207 | 2008_006818__19
1208 | 2008_006849__19
1209 | 2008_006865__19
1210 | 2008_006900__19
1211 | 2008_006919__19
1212 | 2008_007011__19
1213 | 2008_007084__19
1214 | 2008_007105__19
1215 | 2008_007115__19
1216 | 2008_007185__19
1217 | 2008_007189__19
1218 | 2008_007201__19
1219 | 2008_007231__19
1220 | 2008_007247__19
1221 | 2008_007280__19
1222 | 2008_007383__19
1223 | 2008_007521__19
1224 | 2008_007648__19
1225 | 2008_007749__19
1226 | 2008_007759__19
1227 | 2008_007760__19
1228 | 2008_007779__19
1229 | 2008_007787__19
1230 | 2008_007829__19
1231 | 2008_007887__19
1232 | 2008_007999__19
1233 | 2008_008001__19
1234 | 2008_008020__19
1235 | 2008_008055__19
1236 | 2008_008074__19
1237 | 2008_008123__19
1238 | 2008_008152__19
1239 | 2008_008192__19
1240 | 2008_008200__19
1241 | 2008_008203__19
1242 | 2008_008223__19
1243 | 2008_008275__19
1244 | 2008_008297__19
1245 | 2008_008302__19
1246 | 2008_008321__19
1247 | 2008_008336__19
1248 | 2008_008342__19
1249 | 2008_008364__19
1250 | 2008_008379__19
1251 | 2008_008382__19
1252 | 2008_008527__19
1253 | 2008_008545__19
1254 | 2008_008583__19
1255 | 2008_008615__19
1256 | 2008_008618__19
1257 | 2008_008632__19
1258 | 2008_008637__19
1259 | 2008_008641__19
1260 | 2008_008662__19
1261 | 2008_008673__19
1262 | 2008_008676__19
1263 | 2008_008681__19
1264 | 2008_008690__19
1265 | 2008_008696__19
1266 | 2008_008697__19
1267 | 2008_008726__19
1268 | 2008_008732__19
1269 | 2008_008735__19
1270 | 2008_008739__19
1271 | 2008_008749__19
1272 | 2008_008751__19
1273 | 2008_008757__19
1274 | 2008_008767__19
1275 | 2008_008770__19
1276 | 2009_000011__19
1277 | 2009_000051__19
1278 | 2009_000073__19
1279 | 2009_000090__19
1280 | 2009_000105__19
1281 | 2009_000137__19
1282 | 2009_000177__19
1283 | 2009_000244__19
1284 | 2009_000283__19
1285 | 2009_000347__19
1286 | 2009_000443__19
1287 | 2009_000476__19
1288 | 2009_000501__19
1289 | 2009_000592__19
1290 | 2009_000597__19
1291 | 2009_000658__19
1292 | 2009_000663__19
1293 | 2009_000689__19
1294 | 2009_000789__19
1295 | 2009_000824__19
1296 | 2009_000890__19
1297 | 2009_000910__19
1298 | 2009_000920__19
1299 | 2009_000974__19
1300 | 2009_001007__19
1301 | 2009_001042__19
1302 | 2009_001078__19
1303 | 2009_001118__19
1304 | 2009_001152__19
1305 | 2009_001164__19
1306 | 2009_001192__19
1307 | 2009_001245__19
1308 | 2009_001259__19
1309 | 2009_001291__19
1310 | 2009_001350__19
1311 | 2009_001359__19
1312 | 2009_001412__19
1313 | 2009_001468__19
1314 | 2009_001493__19
1315 | 2009_001516__19
1316 | 2009_001519__19
1317 | 2009_001534__19
1318 | 2009_001648__19
1319 | 2009_001651__19
1320 | 2009_001671__19
1321 | 2009_001735__19
1322 | 2009_001747__19
1323 | 2009_001802__19
1324 | 2009_001823__19
1325 | 2009_001831__19
1326 | 2009_001853__19
1327 | 2009_001865__19
1328 | 2009_001868__19
1329 | 2009_001904__19
1330 | 2009_001977__19
1331 | 2009_002009__19
1332 | 2009_002116__19
1333 | 2009_002144__19
1334 | 2009_002175__19
1335 | 2009_002197__19
1336 | 2009_002214__19
1337 | 2009_002219__19
1338 | 2009_002225__19
1339 | 2009_002274__19
1340 | 2009_002281__19
1341 | 2009_002377__19
1342 | 2009_002441__19
1343 | 2009_002557__19
1344 | 2009_002616__19
1345 | 2009_002624__19
1346 | 2009_002669__19
1347 | 2009_002676__19
1348 | 2009_002689__19
1349 | 2009_002695__19
1350 | 2009_002712__19
1351 | 2009_002725__19
1352 | 2009_002734__19
1353 | 2009_002774__19
1354 | 2009_002838__19
1355 | 2009_002867__19
1356 | 2009_002938__19
1357 | 2009_002947__19
1358 | 2009_003022__19
1359 | 2009_003054__19
1360 | 2009_003185__19
1361 | 2009_003230__19
1362 | 2009_003233__19
1363 | 2009_003333__19
1364 | 2009_003348__19
1365 | 2009_003407__19
1366 | 2009_003436__19
1367 | 2009_003453__19
1368 | 2009_003492__19
1369 | 2009_003497__19
1370 | 2009_003534__19
1371 | 2009_003543__19
1372 | 2009_003583__19
1373 | 2009_003638__19
1374 | 2009_003650__19
1375 | 2009_003758__19
1376 | 2009_003765__19
1377 | 2009_003790__19
1378 | 2009_003821__19
1379 | 2009_003863__19
1380 | 2009_003892__19
1381 | 2009_003942__19
1382 | 2009_003951__19
1383 | 2009_004019__19
1384 | 2009_004109__19
1385 | 2009_004159__19
1386 | 2009_004163__19
1387 | 2009_004170__19
1388 | 2009_004211__19
1389 | 2009_004213__19
1390 | 2009_004329__19
1391 | 2009_004336__19
1392 | 2009_004371__19
1393 | 2009_004406__19
1394 | 2009_004453__19
1395 | 2009_004468__19
1396 | 2009_004511__19
1397 | 2009_004527__19
1398 | 2009_004559__19
1399 | 2009_004619__19
1400 | 2009_004624__19
1401 | 2009_004669__19
1402 | 2009_004671__19
1403 | 2009_004677__19
1404 | 2009_004708__19
1405 | 2009_004766__19
1406 | 2009_004771__19
1407 | 2009_004804__19
1408 | 2009_004880__19
1409 | 2009_004956__19
1410 | 2009_004958__19
1411 | 2009_004977__19
1412 | 2009_004988__19
1413 | 2009_005024__19
1414 | 2009_005061__19
1415 | 2009_005084__19
1416 | 2009_005126__19
1417 | 2009_005128__19
1418 | 2009_005149__19
1419 | 2009_005246__19
1420 | 2009_005287__19
1421 | 2009_005292__19
1422 | 2009_005303__19
1423 | 2010_000080__19
1424 | 2010_000085__19
1425 | 2010_000136__19
1426 | 2010_000199__19
1427 | 2010_000233__19
1428 | 2010_000249__19
1429 | 2010_000313__19
1430 | 2010_000321__19
1431 | 2010_000406__19
1432 | 2010_000513__19
1433 | 2010_000537__19
1434 | 2010_000581__19
1435 | 2010_000633__19
1436 | 2010_000651__19
1437 | 2010_000740__19
1438 | 2010_000743__19
1439 | 2010_000786__19
1440 | 2010_000860__19
1441 | 2010_000865__19
1442 | 2010_000955__19
1443 | 2010_000959__19
1444 | 2010_000984__19
1445 | 2010_001052__19
1446 | 2010_001105__19
1447 | 2010_001143__19
1448 | 2010_001250__19
1449 | 2010_001272__19
1450 | 2010_001374__19
1451 | 2010_001395__19
1452 | 2010_001405__19
1453 | 2010_001408__19
1454 | 2010_001502__19
1455 | 2010_001515__19
1456 | 2010_001539__19
1457 | 2010_001560__19
1458 | 2010_001586__19
1459 | 2010_001625__19
1460 | 2010_001719__19
1461 | 2010_001748__19
1462 | 2010_001788__19
1463 | 2010_001801__19
1464 | 2010_001941__19
1465 | 2010_002073__19
1466 | 2010_002080__19
1467 | 2010_002179__19
1468 | 2010_002182__19
1469 | 2010_002208__19
1470 | 2010_002223__19
1471 | 2010_002261__19
1472 | 2010_002369__19
1473 | 2010_002420__19
1474 | 2010_002487__19
1475 | 2010_002556__19
1476 | 2010_002618__19
1477 | 2010_002667__19
1478 | 2010_002697__19
1479 | 2010_002722__19
1480 | 2010_002838__19
1481 | 2010_002840__19
1482 | 2010_002851__19
1483 | 2010_002896__19
1484 | 2010_002937__19
1485 | 2010_002946__19
1486 | 2010_003129__19
1487 | 2010_003160__19
1488 | 2010_003274__19
1489 | 2010_003335__19
1490 | 2010_003384__19
1491 | 2010_003470__19
1492 | 2010_003482__19
1493 | 2010_003601__19
1494 | 2010_003609__19
1495 | 2010_003628__19
1496 | 2010_003630__19
1497 | 2010_003788__19
1498 | 2010_003847__19
1499 | 2010_003900__19
1500 | 2010_003944__19
1501 | 2010_004075__19
1502 | 2010_004148__19
1503 | 2010_004168__19
1504 | 2010_004193__19
1505 | 2010_004256__19
1506 | 2010_004313__19
1507 | 2010_004350__19
1508 | 2010_004412__19
1509 | 2010_004469__19
1510 | 2010_004475__19
1511 | 2010_004478__19
1512 | 2010_004536__19
1513 | 2010_004604__19
1514 | 2010_004669__19
1515 | 2010_004677__19
1516 | 2010_004779__19
1517 | 2010_004826__19
1518 | 2010_005055__19
1519 | 2010_005130__19
1520 | 2010_005309__19
1521 | 2010_005463__19
1522 | 2010_005506__19
1523 | 2010_005515__19
1524 | 2010_005559__19
1525 | 2010_005565__19
1526 | 2010_005643__19
1527 | 2010_005768__19
1528 | 2010_005810__19
1529 | 2010_005816__19
1530 | 2010_005934__19
1531 | 2010_005996__19
1532 | 2011_000012__19
1533 | 2011_000058__19
1534 | 2011_000105__19
1535 | 2011_000195__19
1536 | 2011_000197__19
1537 | 2011_000210__19
1538 | 2011_000221__19
1539 | 2011_000241__19
1540 | 2011_000250__19
1541 | 2011_000277__19
1542 | 2011_000346__19
1543 | 2011_000398__19
1544 | 2011_000442__19
1545 | 2011_000491__19
1546 | 2011_000498__19
1547 | 2011_000513__19
1548 | 2011_000558__19
1549 | 2011_000627__19
1550 | 2011_000688__19
1551 | 2011_000819__19
1552 | 2011_000848__19
1553 | 2011_000858__19
1554 | 2011_000895__19
1555 | 2011_000909__19
1556 | 2011_000944__19
1557 | 2011_000979__19
1558 | 2011_000987__19
1559 | 2011_000997__19
1560 | 2011_001019__19
1561 | 2011_001052__19
1562 | 2011_001086__19
1563 | 2011_001126__19
1564 | 2011_001152__19
1565 | 2011_001166__19
1566 | 2011_001217__19
1567 | 2011_001337__19
1568 | 2011_001375__19
1569 | 2011_001381__19
1570 | 2011_001525__19
1571 | 2011_001560__19
1572 | 2011_001602__19
1573 | 2011_001655__19
1574 | 2011_001671__19
1575 | 2011_001741__19
1576 | 2011_001776__19
1577 | 2011_001796__19
1578 | 2011_001827__19
1579 | 2011_001889__19
1580 | 2011_001904__19
1581 | 2011_001927__19
1582 | 2011_001951__19
1583 | 2011_002053__19
1584 | 2011_002105__19
1585 | 2011_002173__19
1586 | 2011_002251__19
1587 | 2011_002278__19
1588 | 2011_002280__19
1589 | 2011_002294__19
1590 | 2011_002385__19
1591 | 2011_002396__19
1592 | 2011_002492__19
1593 | 2011_002516__19
1594 | 2011_002528__19
1595 | 2011_002636__19
1596 | 2011_002738__19
1597 | 2011_002779__19
1598 | 2011_002821__19
1599 | 2011_002917__19
1600 | 2011_002932__19
1601 | 2011_002987__19
1602 | 2011_003028__19
1603 | 2011_003059__19
1604 | 2011_003124__19
1605 | 2011_003132__19
1606 | 2011_003149__19
1607 | 2011_003187__19
1608 | 2011_003228__19
1609 | 2011_003260__19
1610 | 2011_003274__19
1611 | 2007_000039__20
1612 | 2007_000121__20
1613 | 2007_001027__20
1614 | 2007_001149__20
1615 | 2007_001704__20
1616 | 2007_002227__20
1617 | 2007_002953__20
1618 | 2007_003451__20
1619 | 2007_003604__20
1620 | 2007_005210__20
1621 | 2007_005902__20
1622 | 2007_006066__20
1623 | 2007_006704__20
1624 | 2007_007250__20
1625 | 2007_007432__20
1626 | 2007_007530__20
1627 | 2007_008407__20
1628 | 2007_008948__20
1629 | 2007_009216__20
1630 | 2007_009295__20
1631 | 2007_009594__20
1632 | 2008_000002__20
1633 | 2008_000023__20
1634 | 2008_000093__20
1635 | 2008_000145__20
1636 | 2008_000202__20
1637 | 2008_000244__20
1638 | 2008_000305__20
1639 | 2008_000309__20
1640 | 2008_000348__20
1641 | 2008_000383__20
1642 | 2008_000495__20
1643 | 2008_000566__20
1644 | 2008_000578__20
1645 | 2008_000904__20
1646 | 2008_001021__20
1647 | 2008_001073__20
1648 | 2008_001130__20
1649 | 2008_001401__20
1650 | 2008_001428__20
1651 | 2008_001481__20
1652 | 2008_001576__20
1653 | 2008_001641__20
1654 | 2008_001704__20
1655 | 2008_001781__20
1656 | 2008_001815__20
1657 | 2008_001880__20
1658 | 2008_001888__20
1659 | 2008_001896__20
1660 | 2008_001920__20
1661 | 2008_001997__20
1662 | 2008_002066__20
1663 | 2008_002082__20
1664 | 2008_002140__20
1665 | 2008_002218__20
1666 | 2008_002328__20
1667 | 2008_002547__20
1668 | 2008_002650__20
1669 | 2008_002676__20
1670 | 2008_002776__20
1671 | 2008_002817__20
1672 | 2008_002826__20
1673 | 2008_002831__20
1674 | 2008_002954__20
1675 | 2008_003200__20
1676 | 2008_003213__20
1677 | 2008_003248__20
1678 | 2008_003280__20
1679 | 2008_003348__20
1680 | 2008_003432__20
1681 | 2008_003434__20
1682 | 2008_003435__20
1683 | 2008_003466__20
1684 | 2008_003500__20
1685 | 2008_003585__20
1686 | 2008_003589__20
1687 | 2008_003609__20
1688 | 2008_003667__20
1689 | 2008_003712__20
1690 | 2008_003814__20
1691 | 2008_003825__20
1692 | 2008_003883__20
1693 | 2008_003948__20
1694 | 2008_003995__20
1695 | 2008_004004__20
1696 | 2008_004006__20
1697 | 2008_004008__20
1698 | 2008_004093__20
1699 | 2008_004097__20
1700 | 2008_004217__20
1701 | 2008_004259__20
1702 | 2008_004297__20
1703 | 2008_004301__20
1704 | 2008_004321__20
1705 | 2008_004330__20
1706 | 2008_004333__20
1707 | 2008_004501__20
1708 | 2008_004506__20
1709 | 2008_004526__20
1710 | 2008_004541__20
1711 | 2008_004550__20
1712 | 2008_004606__20
1713 | 2008_004719__20
1714 | 2008_004720__20
1715 | 2008_004781__20
1716 | 2008_004807__20
1717 | 2008_004881__20
1718 | 2008_004898__20
1719 | 2008_004908__20
1720 | 2008_004930__20
1721 | 2008_004961__20
1722 | 2008_005006__20
1723 | 2008_005008__20
1724 | 2008_005037__20
1725 | 2008_005064__20
1726 | 2008_005066__20
1727 | 2008_005090__20
1728 | 2008_005094__20
1729 | 2008_005191__20
1730 | 2008_005231__20
1731 | 2008_005255__20
1732 | 2008_005329__20
1733 | 2008_005342__20
1734 | 2008_005393__20
1735 | 2008_005569__20
1736 | 2008_005609__20
1737 | 2008_005625__20
1738 | 2008_005639__20
1739 | 2008_005660__20
1740 | 2008_005678__20
1741 | 2008_005732__20
1742 | 2008_005817__20
1743 | 2008_005877__20
1744 | 2008_005918__20
1745 | 2008_005929__20
1746 | 2008_005954__20
1747 | 2008_005957__20
1748 | 2008_005962__20
1749 | 2008_005967__20
1750 | 2008_005976__20
1751 | 2008_006031__20
1752 | 2008_006047__20
1753 | 2008_006062__20
1754 | 2008_006135__20
1755 | 2008_006136__20
1756 | 2008_006147__20
1757 | 2008_006233__20
1758 | 2008_006267__20
1759 | 2008_006271__20
1760 | 2008_006273__20
1761 | 2008_006288__20
1762 | 2008_006295__20
1763 | 2008_006366__20
1764 | 2008_006373__20
1765 | 2008_006409__20
1766 | 2008_006433__20
1767 | 2008_006591__20
1768 | 2008_006605__20
1769 | 2008_006606__20
1770 | 2008_006617__20
1771 | 2008_006624__20
1772 | 2008_006662__20
1773 | 2008_006668__20
1774 | 2008_006710__20
1775 | 2008_006719__20
1776 | 2008_006733__20
1777 | 2008_006946__20
1778 | 2008_007010__20
1779 | 2008_007038__20
1780 | 2008_007114__20
1781 | 2008_007196__20
1782 | 2008_007217__20
1783 | 2008_007242__20
1784 | 2008_007246__20
1785 | 2008_007324__20
1786 | 2008_007332__20
1787 | 2008_007361__20
1788 | 2008_007446__20
1789 | 2008_007476__20
1790 | 2008_007536__20
1791 | 2008_007561__20
1792 | 2008_007567__20
1793 | 2008_007685__20
1794 | 2008_007696__20
1795 | 2008_007798__20
1796 | 2008_007864__20
1797 | 2008_007916__20
1798 | 2008_007933__20
1799 | 2008_007962__20
1800 | 2008_007987__20
1801 | 2008_008269__20
1802 | 2008_008429__20
1803 | 2008_008439__20
1804 | 2008_008524__20
1805 | 2008_008590__20
1806 | 2008_008608__20
1807 | 2008_008649__20
1808 | 2009_000010__20
1809 | 2009_000014__20
1810 | 2009_000041__20
1811 | 2009_000157__20
1812 | 2009_000214__20
1813 | 2009_000216__20
1814 | 2009_000336__20
1815 | 2009_000398__20
1816 | 2009_000439__20
1817 | 2009_000444__20
1818 | 2009_000544__20
1819 | 2009_000549__20
1820 | 2009_000552__20
1821 | 2009_000585__20
1822 | 2009_000629__20
1823 | 2009_000679__20
1824 | 2009_000722__20
1825 | 2009_000791__20
1826 | 2009_000848__20
1827 | 2009_000895__20
1828 | 2009_000981__20
1829 | 2009_000987__20
1830 | 2009_001069__20
1831 | 2009_001103__20
1832 | 2009_001106__20
1833 | 2009_001111__20
1834 | 2009_001133__20
1835 | 2009_001188__20
1836 | 2009_001357__20
1837 | 2009_001393__20
1838 | 2009_001452__20
1839 | 2009_001526__20
1840 | 2009_001553__20
1841 | 2009_001555__20
1842 | 2009_001608__20
1843 | 2009_001615__20
1844 | 2009_001682__20
1845 | 2009_001779__20
1846 | 2009_001809__20
1847 | 2009_001812__20
1848 | 2009_001839__20
1849 | 2009_001852__20
1850 | 2009_001864__20
1851 | 2009_001874__20
1852 | 2009_001875__20
1853 | 2009_001961__20
1854 | 2009_001964__20
1855 | 2009_002110__20
1856 | 2009_002139__20
1857 | 2009_002232__20
1858 | 2009_002409__20
1859 | 2009_002537__20
1860 | 2009_002652__20
1861 | 2009_002663__20
1862 | 2009_002705__20
1863 | 2009_002733__20
1864 | 2009_002755__20
1865 | 2009_002758__20
1866 | 2009_002820__20
1867 | 2009_002827__20
1868 | 2009_002849__20
1869 | 2009_002872__20
1870 | 2009_002932__20
1871 | 2009_002967__20
1872 | 2009_002970__20
1873 | 2009_002984__20
1874 | 2009_002995__20
1875 | 2009_003078__20
1876 | 2009_003093__20
1877 | 2009_003140__20
1878 | 2009_003191__20
1879 | 2009_003204__20
1880 | 2009_003214__20
1881 | 2009_003316__20
1882 | 2009_003367__20
1883 | 2009_003537__20
1884 | 2009_003554__20
1885 | 2009_003646__20
1886 | 2009_003720__20
1887 | 2009_003753__20
1888 | 2009_003852__20
1889 | 2009_003920__20
1890 | 2009_004062__20
1891 | 2009_004128__20
1892 | 2009_004138__20
1893 | 2009_004176__20
1894 | 2009_004243__20
1895 | 2009_004301__20
1896 | 2009_004341__20
1897 | 2009_004357__20
1898 | 2009_004359__20
1899 | 2009_004478__20
1900 | 2009_004503__20
1901 | 2009_004519__20
1902 | 2009_004631__20
1903 | 2009_004719__20
1904 | 2009_004760__20
1905 | 2009_004763__20
1906 | 2009_004902__20
1907 | 2009_004905__20
1908 | 2009_004922__20
1909 | 2009_004965__20
1910 | 2009_005030__20
1911 | 2009_005042__20
1912 | 2009_005062__20
1913 | 2009_005070__20
1914 | 2009_005221__20
1915 | 2009_005240__20
1916 | 2009_005256__20
1917 | 2010_000053__20
1918 | 2010_000141__20
1919 | 2010_000291__20
1920 | 2010_000375__20
1921 | 2010_000379__20
1922 | 2010_000442__20
1923 | 2010_000449__20
1924 | 2010_000578__20
1925 | 2010_000658__20
1926 | 2010_000669__20
1927 | 2010_000705__20
1928 | 2010_000773__20
1929 | 2010_000787__20
1930 | 2010_000800__20
1931 | 2010_000807__20
1932 | 2010_000931__20
1933 | 2010_000944__20
1934 | 2010_000974__20
1935 | 2010_001099__20
1936 | 2010_001127__20
1937 | 2010_001270__20
1938 | 2010_001277__20
1939 | 2010_001363__20
1940 | 2010_001533__20
1941 | 2010_001562__20
1942 | 2010_001580__20
1943 | 2010_001690__20
1944 | 2010_001780__20
1945 | 2010_001860__20
1946 | 2010_001918__20
1947 | 2010_001939__20
1948 | 2010_002002__20
1949 | 2010_002015__20
1950 | 2010_002094__20
1951 | 2010_002097__20
1952 | 2010_002152__20
1953 | 2010_002167__20
1954 | 2010_002193__20
1955 | 2010_002245__20
1956 | 2010_002247__20
1957 | 2010_002327__20
1958 | 2010_002427__20
1959 | 2010_002513__20
1960 | 2010_002526__20
1961 | 2010_002561__20
1962 | 2010_002567__20
1963 | 2010_002586__20
1964 | 2010_002652__20
1965 | 2010_002686__20
1966 | 2010_002770__20
1967 | 2010_002791__20
1968 | 2010_002843__20
1969 | 2010_002982__20
1970 | 2010_003035__20
1971 | 2010_003103__20
1972 | 2010_003137__20
1973 | 2010_003236__20
1974 | 2010_003241__20
1975 | 2010_003287__20
1976 | 2010_003405__20
1977 | 2010_003437__20
1978 | 2010_003461__20
1979 | 2010_003674__20
1980 | 2010_003688__20
1981 | 2010_003719__20
1982 | 2010_003728__20
1983 | 2010_003770__20
1984 | 2010_003844__20
1985 | 2010_003857__20
1986 | 2010_003864__20
1987 | 2010_003874__20
1988 | 2010_003892__20
1989 | 2010_003942__20
1990 | 2010_004009__20
1991 | 2010_004050__20
1992 | 2010_004095__20
1993 | 2010_004102__20
1994 | 2010_004109__20
1995 | 2010_004137__20
1996 | 2010_004249__20
1997 | 2010_004254__20
1998 | 2010_004295__20
1999 | 2010_004306__20
2000 | 2010_004368__20
2001 | 2010_004460__20
2002 | 2010_004503__20
2003 | 2010_004523__20
2004 | 2010_004545__20
2005 | 2010_004586__20
2006 | 2010_004591__20
2007 | 2010_004816__20
2008 | 2010_004836__20
2009 | 2010_004944__20
2010 | 2010_004982__20
2011 | 2010_005049__20
2012 | 2010_005071__20
2013 | 2010_005133__20
2014 | 2010_005158__20
2015 | 2010_005190__20
2016 | 2010_005239__20
2017 | 2010_005345__20
2018 | 2010_005372__20
2019 | 2010_005450__20
2020 | 2010_005676__20
2021 | 2010_005678__20
2022 | 2010_005744__20
2023 | 2010_005805__20
2024 | 2010_005827__20
2025 | 2010_005841__20
2026 | 2010_006050__20
2027 | 2011_000009__20
2028 | 2011_000036__20
2029 | 2011_000037__20
2030 | 2011_000038__20
2031 | 2011_000061__20
2032 | 2011_000071__20
2033 | 2011_000077__20
2034 | 2011_000192__20
2035 | 2011_000253__20
2036 | 2011_000288__20
2037 | 2011_000290__20
2038 | 2011_000364__20
2039 | 2011_000382__20
2040 | 2011_000399__20
2041 | 2011_000400__20
2042 | 2011_000434__20
2043 | 2011_000444__20
2044 | 2011_000608__20
2045 | 2011_000685__20
2046 | 2011_000755__20
2047 | 2011_000965__20
2048 | 2011_001149__20
2049 | 2011_001223__20
2050 | 2011_001302__20
2051 | 2011_001357__20
2052 | 2011_001387__20
2053 | 2011_001394__20
2054 | 2011_001456__20
2055 | 2011_001466__20
2056 | 2011_001507__20
2057 | 2011_001573__20
2058 | 2011_001689__20
2059 | 2011_001705__20
2060 | 2011_001730__20
2061 | 2011_001833__20
2062 | 2011_001837__20
2063 | 2011_001856__20
2064 | 2011_001926__20
2065 | 2011_001928__20
2066 | 2011_002005__20
2067 | 2011_002154__20
2068 | 2011_002218__20
2069 | 2011_002292__20
2070 | 2011_002418__20
2071 | 2011_002462__20
2072 | 2011_002511__20
2073 | 2011_002514__20
2074 | 2011_002554__20
2075 | 2011_002560__20
2076 | 2011_002656__20
2077 | 2011_002756__20
2078 | 2011_002775__20
2079 | 2011_002802__20
2080 | 2011_002942__20
2081 | 2011_002966__20
2082 | 2011_002970__20
2083 | 2011_003047__20
2084 | 2011_003079__20
2085 | 2011_003194__20
2086 | 2011_003254__20
2087 |
--------------------------------------------------------------------------------