├── data ├── __init__.pyc ├── datamgr.pyc ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-36.pyc │ ├── datamgr.cpython-36.pyc │ ├── dataset.cpython-36.pyc │ ├── feature_loader.cpython-36.pyc │ └── additional_transforms.cpython-36.pyc ├── additional_transforms.py ├── feature_loader.py ├── dataset.py └── datamgr.py ├── methods ├── __pycache__ │ ├── gnn.cpython-36.pyc │ ├── gnnnet.cpython-36.pyc │ ├── backbone.cpython-36.pyc │ ├── baselinetrain.cpython-36.pyc │ ├── learnablemask.cpython-36.pyc │ ├── meta_template.cpython-36.pyc │ ├── student_MED2N.cpython-36.pyc │ ├── studnet_MED2N.cpython-36.pyc │ └── meta_template_student_MED2N.cpython-36.pyc ├── learnablemask.py ├── baselinetrain.py ├── gnnnet.py ├── meta_template.py ├── gnn.py ├── meta_template_student_MED2N.py ├── student_MED2N.py └── backbone.py ├── LICENSE ├── utils.py ├── README.md ├── train_metaTeacher.py ├── train_metaStudent.py ├── options.py ├── test.py ├── test_twoPaths.py └── output ├── labled_base_cars_5.json └── labled_base_plantae_5.json /data/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/data/__init__.pyc -------------------------------------------------------------------------------- /data/datamgr.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/data/datamgr.pyc -------------------------------------------------------------------------------- /data/__init__.py: -------------------------------------------------------------------------------- 1 | from . import datamgr 2 | from . import dataset 3 | from . import additional_transforms 4 | from . import feature_loader 5 | -------------------------------------------------------------------------------- /data/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/data/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /data/__pycache__/datamgr.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/data/__pycache__/datamgr.cpython-36.pyc -------------------------------------------------------------------------------- /data/__pycache__/dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/data/__pycache__/dataset.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/gnn.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/gnn.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/gnnnet.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/gnnnet.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/backbone.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/backbone.cpython-36.pyc -------------------------------------------------------------------------------- /data/__pycache__/feature_loader.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/data/__pycache__/feature_loader.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/baselinetrain.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/baselinetrain.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/learnablemask.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/learnablemask.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/meta_template.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/meta_template.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/student_MED2N.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/student_MED2N.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/studnet_MED2N.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/studnet_MED2N.cpython-36.pyc -------------------------------------------------------------------------------- /data/__pycache__/additional_transforms.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/data/__pycache__/additional_transforms.cpython-36.pyc -------------------------------------------------------------------------------- /methods/__pycache__/meta_template_student_MED2N.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lovelyqian/ME-D2N_for_CDFSL/HEAD/methods/__pycache__/meta_template_student_MED2N.cpython-36.pyc -------------------------------------------------------------------------------- /data/additional_transforms.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017-present, Facebook, Inc. 2 | # All rights reserved. 3 | # 4 | # This source code is licensed under the license found in the 5 | # LICENSE file in the root directory of this source tree. 6 | 7 | # This is the implementation from https://github.com/facebookresearch/low-shot-shrink-hallucinate. 8 | 9 | import torch 10 | from PIL import ImageEnhance 11 | 12 | transformtypedict=dict(Brightness=ImageEnhance.Brightness, Contrast=ImageEnhance.Contrast, Sharpness=ImageEnhance.Sharpness, Color=ImageEnhance.Color) 13 | 14 | class ImageJitter(object): 15 | def __init__(self, transformdict): 16 | self.transforms = [(transformtypedict[k], transformdict[k]) for k in transformdict] 17 | 18 | def __call__(self, img): 19 | out = img 20 | randtensor = torch.rand(len(self.transforms)) 21 | 22 | for i, (transformer, alpha) in enumerate(self.transforms): 23 | r = alpha*(randtensor[i]*2.0 -1.0) + 1 24 | out = transformer(out).enhance(r).convert('RGB') 25 | 26 | return out 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Fu Yuqian 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | 4 | def one_hot(y, num_class): 5 | return torch.zeros((len(y), num_class)).scatter_(1, y.unsqueeze(1), 1) 6 | 7 | def DBindex(cl_data_file): 8 | class_list = cl_data_file.keys() 9 | cl_num= len(class_list) 10 | cl_means = [] 11 | stds = [] 12 | DBs = [] 13 | for cl in class_list: 14 | cl_means.append( np.mean(cl_data_file[cl], axis = 0) ) 15 | stds.append( np.sqrt(np.mean( np.sum(np.square( cl_data_file[cl] - cl_means[-1]), axis = 1)))) 16 | 17 | mu_i = np.tile( np.expand_dims( np.array(cl_means), axis = 0), (len(class_list),1,1) ) 18 | mu_j = np.transpose(mu_i,(1,0,2)) 19 | mdists = np.sqrt(np.sum(np.square(mu_i - mu_j), axis = 2)) 20 | 21 | for i in range(cl_num): 22 | DBs.append( np.max([ (stds[i]+ stds[j])/mdists[i,j] for j in range(cl_num) if j != i ]) ) 23 | return np.mean(DBs) 24 | 25 | def sparsity(cl_data_file): 26 | class_list = cl_data_file.keys() 27 | cl_sparsity = [] 28 | for cl in class_list: 29 | cl_sparsity.append(np.mean([np.sum(x!=0) for x in cl_data_file[cl] ]) ) 30 | 31 | return np.mean(cl_sparsity) 32 | -------------------------------------------------------------------------------- /methods/learnablemask.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | 6 | class LearnableMaskLayer(nn.Module): 7 | def __init__(self, feature_dim): 8 | super(LearnableMaskLayer, self).__init__() 9 | self.mask = torch.nn.Parameter(torch.randn((2, feature_dim, 1, 1))) 10 | 11 | def forward(self, x, domain_flag): 12 | #soft mask to hard mask[0,1] 13 | if self.training: 14 | hard_mask = F.gumbel_softmax(self.mask, hard=True, dim=0) 15 | else: 16 | hard_mask = F.softmax(self.mask, dim=0) 17 | hard_mask = (hard_mask>0.5).float() 18 | #print('S:', torch.sum(hard_mask[0])) 19 | #print('A:', torch.sum(hard_mask[1])) 20 | if(domain_flag=='S'): 21 | hard_mask = hard_mask[0] 22 | elif(domain_flag=='A'): 23 | hard_mask = hard_mask[1] 24 | 25 | hard_mask = hard_mask.unsqueeze(0) 26 | x = x * hard_mask 27 | 28 | return x 29 | 30 | 31 | 32 | if __name__ == '__main__': 33 | myLearnableMaskLayer = LearnableMaskLayer(feature_dim = 8) 34 | x = torch.randn(2,8,64, 64) 35 | out_x, out_loss = myLearnableMaskLayer(x, domain_flag='S') 36 | print(out_x, out_loss) 37 | -------------------------------------------------------------------------------- /data/feature_loader.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import h5py 4 | 5 | class SimpleHDF5Dataset: 6 | def __init__(self, file_handle = None): 7 | if file_handle == None: 8 | self.f = '' 9 | self.all_feats_dset = [] 10 | self.all_labels = [] 11 | self.total = 0 12 | else: 13 | self.f = file_handle 14 | self.all_feats_dset = self.f['all_feats'][...] 15 | self.all_labels = self.f['all_labels'][...] 16 | self.total = self.f['count'][0] 17 | def __getitem__(self, i): 18 | return torch.Tensor(self.all_feats_dset[i,:]), int(self.all_labels[i]) 19 | 20 | def __len__(self): 21 | return self.total 22 | 23 | 24 | def init_loader(filename): 25 | with h5py.File(filename, 'r') as f: 26 | fileset = SimpleHDF5Dataset(f) 27 | 28 | feats = fileset.all_feats_dset 29 | labels = fileset.all_labels 30 | while np.sum(feats[-1]) == 0: 31 | feats = np.delete(feats,-1,axis = 0) 32 | labels = np.delete(labels,-1,axis = 0) 33 | 34 | class_list = np.unique(np.array(labels)).tolist() 35 | inds = range(len(labels)) 36 | 37 | cl_data_file = {} 38 | for cl in class_list: 39 | cl_data_file[cl] = [] 40 | for ind in inds: 41 | cl_data_file[labels[ind]].append( feats[ind]) 42 | 43 | return cl_data_file 44 | -------------------------------------------------------------------------------- /methods/baselinetrain.py: -------------------------------------------------------------------------------- 1 | from methods import backbone 2 | 3 | import torch.nn as nn 4 | from tensorboardX import SummaryWriter 5 | 6 | # --- conventional supervised training --- 7 | class BaselineTrain(nn.Module): 8 | def __init__(self, model_func, num_class, tf_path=None, loss_type = 'softmax'): 9 | super(BaselineTrain, self).__init__() 10 | 11 | # feature encoder 12 | self.feature = model_func() 13 | 14 | # loss function: use 'dist' to pre-train the encoder for matchingnet, and 'softmax' for others 15 | if loss_type == 'softmax': 16 | print('num classes:', num_class) 17 | self.classifier = nn.Linear(self.feature.final_feat_dim, num_class) 18 | self.classifier.bias.data.fill_(0) 19 | elif loss_type == 'dist': 20 | self.classifier = backbone.distLinear(self.feature.final_feat_dim, num_class) 21 | self.loss_type = loss_type 22 | self.loss_fn = nn.CrossEntropyLoss() 23 | 24 | self.num_class = num_class 25 | self.tf_writer = SummaryWriter(log_dir=tf_path) if tf_path is not None else None 26 | 27 | def forward(self,x): 28 | x = x.cuda() 29 | out = self.feature.forward(x) 30 | scores = self.classifier.forward(out) 31 | #print('scores:', scores) 32 | return scores 33 | 34 | def forward_loss(self, x, y): 35 | scores = self.forward(x) 36 | y = y.cuda() 37 | return self.loss_fn(scores, y ) 38 | 39 | def train_loop(self, epoch, train_loader, optimizer, total_it): 40 | print_freq = len(train_loader) // 10 41 | avg_loss=0 42 | 43 | for i, (x,y) in enumerate(train_loader): 44 | optimizer.zero_grad() 45 | loss = self.forward_loss(x, y) 46 | loss.backward() 47 | optimizer.step() 48 | 49 | avg_loss = avg_loss+loss.item()#data[0] 50 | 51 | if (i + 1) % print_freq==0: 52 | print('Epoch {:d} | Batch {:d}/{:d} | Loss {:f}'.format(epoch, i + 1, len(train_loader), avg_loss/float(i+1) )) 53 | if (total_it + 1) % 10 == 0: 54 | self.tf_writer.add_scalar('loss', loss.item(), total_it + 1) 55 | total_it += 1 56 | return total_it 57 | 58 | def test_loop(self, val_loader): 59 | return -1 #no validation, just save model during iteration 60 | 61 | -------------------------------------------------------------------------------- /methods/gnnnet.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | from methods.meta_template import MetaTemplate 5 | from methods.gnn import GNN_nl 6 | from methods import backbone 7 | 8 | 9 | class GnnNet(MetaTemplate): 10 | maml=False 11 | def __init__(self, model_func, n_way, n_support, tf_path=None): 12 | super(GnnNet, self).__init__(model_func, n_way, n_support, tf_path=tf_path) 13 | 14 | # loss function 15 | self.loss_fn = nn.CrossEntropyLoss() 16 | 17 | # metric function 18 | self.fc = nn.Sequential(nn.Linear(self.feat_dim, 128), nn.BatchNorm1d(128, track_running_stats=False)) if not self.maml else nn.Sequential(backbone.Linear_fw(self.feat_dim, 128), backbone.BatchNorm1d_fw(128, track_running_stats=False)) 19 | self.gnn = GNN_nl(128 + self.n_way, 96, self.n_way) 20 | self.method = 'GnnNet' 21 | 22 | # fix label for training the metric function 1*nw(1 + ns)*nw 23 | support_label = torch.from_numpy(np.repeat(range(self.n_way), self.n_support)).unsqueeze(1) 24 | support_label = torch.zeros(self.n_way*self.n_support, self.n_way).scatter(1, support_label, 1).view(self.n_way, self.n_support, self.n_way) 25 | support_label = torch.cat([support_label, torch.zeros(self.n_way, 1, n_way)], dim=1) 26 | self.support_label = support_label.view(1, -1, self.n_way) 27 | 28 | def cuda(self): 29 | self.feature.cuda() 30 | self.fc.cuda() 31 | self.gnn.cuda() 32 | self.support_label = self.support_label.cuda() 33 | return self 34 | 35 | def set_forward(self,x,is_feature=False): 36 | x = x.cuda() 37 | 38 | if is_feature: 39 | # reshape the feature tensor: n_way * n_s + 15 * f 40 | assert(x.size(1) == self.n_support + 15) 41 | z = self.fc(x.view(-1, *x.size()[2:])) 42 | z = z.view(self.n_way, -1, z.size(1)) 43 | else: 44 | # get feature using encoder 45 | x = x.view(-1, *x.size()[2:]) 46 | z = self.fc(self.feature(x)) 47 | z = z.view(self.n_way, -1, z.size(1)) 48 | # stack the feature for metric function: n_way * n_s + n_q * f -> n_q * [1 * n_way(n_s + 1) * f] 49 | z_stack = [torch.cat([z[:, :self.n_support], z[:, self.n_support + i:self.n_support + i + 1]], dim=1).view(1, -1, z.size(2)) for i in range(self.n_query)] 50 | assert(z_stack[0].size(1) == self.n_way*(self.n_support + 1)) 51 | scores = self.forward_gnn(z_stack) 52 | return scores 53 | 54 | def forward_gnn(self, zs): 55 | # gnn inp: n_q * n_way(n_s + 1) * f 56 | nodes = torch.cat([torch.cat([z, self.support_label], dim=2) for z in zs], dim=0) 57 | scores = self.gnn(nodes) 58 | # n_q * n_way(n_s + 1) * n_way -> (n_way * n_q) * n_way 59 | scores = scores.view(self.n_query, self.n_way, self.n_support + 1, self.n_way)[:, :, -1].permute(1, 0, 2).contiguous().view(-1, self.n_way) 60 | return scores 61 | 62 | def set_forward_loss(self, x): 63 | y_query = torch.from_numpy(np.repeat(range( self.n_way ), self.n_query)) 64 | y_query = y_query.cuda() 65 | scores = self.set_forward(x) 66 | loss = self.loss_fn(scores, y_query) 67 | return scores, loss 68 | -------------------------------------------------------------------------------- /methods/meta_template.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import numpy as np 3 | from abc import abstractmethod 4 | from tensorboardX import SummaryWriter 5 | 6 | class MetaTemplate(nn.Module): 7 | def __init__(self, model_func, n_way, n_support, flatten=True, leakyrelu=False, tf_path=None, change_way=True): 8 | super(MetaTemplate, self).__init__() 9 | self.n_way = n_way 10 | self.n_support = n_support 11 | self.n_query = -1 #(change depends on input) 12 | self.feature = model_func(flatten=flatten, leakyrelu=leakyrelu) 13 | self.feat_dim = self.feature.final_feat_dim 14 | self.change_way = change_way #some methods allow different_way classification during training and test 15 | self.tf_writer = SummaryWriter(log_dir=tf_path) if tf_path is not None else None 16 | 17 | @abstractmethod 18 | def set_forward(self,x,is_feature): 19 | pass 20 | 21 | @abstractmethod 22 | def set_forward_loss(self, x): 23 | pass 24 | 25 | def forward(self,x): 26 | out = self.feature.forward(x) 27 | return out 28 | 29 | def parse_feature(self,x,is_feature): 30 | x = x.cuda() 31 | if is_feature: 32 | z_all = x 33 | else: 34 | x = x.contiguous().view( self.n_way * (self.n_support + self.n_query), *x.size()[2:]) 35 | z_all = self.feature.forward(x) 36 | z_all = z_all.view( self.n_way, self.n_support + self.n_query, -1) 37 | z_support = z_all[:, :self.n_support] 38 | z_query = z_all[:, self.n_support:] 39 | 40 | return z_support, z_query 41 | 42 | def correct(self, x): 43 | scores, loss = self.set_forward_loss(x) 44 | y_query = np.repeat(range( self.n_way ), self.n_query ) 45 | 46 | topk_scores, topk_labels = scores.data.topk(1, 1, True, True) 47 | topk_ind = topk_labels.cpu().numpy() 48 | top1_correct = np.sum(topk_ind[:,0] == y_query) 49 | return float(top1_correct), len(y_query), loss.item()*len(y_query) 50 | 51 | def train_loop(self, epoch, train_loader, optimizer, total_it): 52 | print_freq = len(train_loader) // 10 53 | avg_loss=0 54 | for i, (x,_ ) in enumerate(train_loader): 55 | self.n_query = x.size(1) - self.n_support 56 | if self.change_way: 57 | self.n_way = x.size(0) 58 | optimizer.zero_grad() 59 | _, loss = self.set_forward_loss(x) 60 | loss.backward() 61 | optimizer.step() 62 | avg_loss = avg_loss+loss.item() 63 | 64 | if (i + 1) % print_freq==0: 65 | print('Epoch {:d} | Batch {:d}/{:d} | Loss {:f}'.format(epoch, i + 1, len(train_loader), avg_loss/float(i+1))) 66 | if (total_it + 1) % 10 == 0 and self.tf_writer is not None: 67 | self.tf_writer.add_scalar(self.method + '/query_loss', loss.item(), total_it + 1) 68 | total_it += 1 69 | return total_it 70 | 71 | def test_loop(self, test_loader, record = None): 72 | loss = 0. 73 | count = 0 74 | acc_all = [] 75 | 76 | iter_num = len(test_loader) 77 | for i, (x,_) in enumerate(test_loader): 78 | self.n_query = x.size(1) - self.n_support 79 | if self.change_way: 80 | self.n_way = x.size(0) 81 | correct_this, count_this, loss_this = self.correct(x) 82 | acc_all.append(correct_this/ count_this*100 ) 83 | loss += loss_this 84 | count += count_this 85 | 86 | acc_all = np.asarray(acc_all) 87 | acc_mean = np.mean(acc_all) 88 | acc_std = np.std(acc_all) 89 | print('--- %d Loss = %.6f ---' %(iter_num, loss/count)) 90 | print('--- %d Test Acc = %4.2f%% +- %4.2f%% ---' %(iter_num, acc_mean, 1.96* acc_std/np.sqrt(iter_num))) 91 | 92 | return acc_mean 93 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ME-D2N_for_CDFSL 2 | Repository for the paper : 3 | ME-D2N: Multi-Expert Domain Decompositional Network for Cross-Domain Few-Shot Learning (ACM MM 2022) 4 | 5 | [paper link](https://arxiv.org/pdf/2210.05280.pdf) 6 | 7 | [bilibili pre video](https://www.bilibili.com/video/BV1GG4y1p7if/?vd_source=668a0bb77d7d7b855bde68ecea1232e7) 8 | 9 | [youtube pre video](https://www.youtube.com/watch?v=crCoaBLuFeA) 10 | 11 | ![image.png](https://upload-images.jianshu.io/upload_images/9933353-4dbd80537b9d49a9.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240) 12 | 13 | If you have any questions/advices/potential ideas, welcome to contact me by fuyq20@fudan.edu.cn. 14 | 15 | 16 | # 1 Dependencies 17 | A anaconda envs is recommended: 18 | ``` 19 | conda create --name py36 python=3.6 20 | conda activate py36 21 | conda install pytorch torchvision -c pytorch 22 | pip3 install scipy>=1.3.2 23 | pip3 install tensorboardX>=1.4 24 | pip3 install h5py>=2.9.0 25 | ``` 26 | 27 | # 2 datasets 28 | We evaluate our methods on five datasets: mini-Imagenet works as source dataset, cub, cars, places, and plantae serve as the target datasets, respectively. 29 | 1. The datasets can be conviently downloaded and processed as in [FWT](https://github.com/hytseng0509/CrossDomainFewShot). 30 | 2. Remember to modify your own dataset dir in the 'options.py'. 31 | 3. We follow the same the same auxiliary target images as in our previous work [meta-FDMixup](https://github.com/lovelyqian/Meta-FDMixup), and the used jsons have been provided in the output dir of this repo. 32 | 33 | If you can't find the Plantae dataset, we provide it [here](https://drive.google.com/file/d/1e3TklMlVBCG0XRfEw6DKStJGdmmXgvq5/view?usp=drive_link), please cite its paper. 34 | 35 | # 3 pretraining 36 | As in most of the previous CD-FSL methods, a pretrained feature extractor `baseline` is used. 37 | - you can directly download it from [this link](https://drive.google.com/file/d/1iYu3lvYDixVNPYjmyi0MON8-X3aRN4n2/view), rename it as 399.tar, and put it to the `./output/checkpoints/baseline` 38 | - or you can pretrain it as follows: 39 | ``` 40 | python3 train_metaTeacher.py --modelType pretrain --dataset miniImagenet --name baseline --train_aug 41 | ``` 42 | 43 | # 4 Usages 44 | Our method is target set specific, and we take the cub target set under the 5-way 1-shot setting as an example. 45 | 46 | 1. Training St-Net 47 | ``` 48 | python3 train_metaTeacher.py --modelType St-Net --dataset miniImagenet --name St-Net-1shot --train_aug --warmup baseline --n_shot 1 49 | ``` 50 | 51 | 2. Training Tt-Net 52 | ``` 53 | python3 train_metaTeacher.py --modelType Tt-Net --dataset cub --name Tt-Net-target-set-cub-1shot --train_aug --warmup baseline --n_shot 1 --stop_epoch 100 54 | ``` 55 | - note: as stated in paper, only Tt-Net under 1-shot setting is trained 100 epochs. In other cases, 400 epochs are adopted. 56 | 57 | 3. Training the ME-D2N student model 58 | ``` 59 | python3 train_metaStudent.py --modelType Student --target_set cub --name ME-D2N-target-set-cub-1shot --train_aug --warmup baseline --n_shot 1 --ckp_S output/checkpoints/St-Net-1shot/399.tar --ckp_A output/checkpoints/Tt-Net-target-set-cub-1shot/99.tar 60 | ``` 61 | 62 | 4. testing for St-Net/Tt-Net 63 | ``` 64 | python test.py --name St-Net-1shot --dataset DATASET --save_epoch 399 --n_shot 1 65 | ``` 66 | - DATASET: miniImagenet/cub/cars/places/plantae 67 | 68 | ``` 69 | python test.py --name Tt-Net-target-set-cub-1shot --dataset DATASET --save_epoch 99 --n_shot 1 70 | ``` 71 | - DATASET: miniImagenet/cub 72 | 73 | 5. testing for ME-D2N 74 | ``` 75 | python test_twoPaths.py --name ME-D2N-target-set-cub-1shot --target_set cub --dataset DATASET --save_epoch 399 --n_shot 1 76 | ``` 77 | - DATASET: miniImagenet/cub 78 | 79 | 80 | # 5 pretrained models 81 | We also provide our pretrained models as follows: (coming soon 82 | 83 | 84 | 85 | - just take them in the right dir. Take ME-D2N for the 1-shot as an example, rename it as 399.tar, and move it to the `ouput/checkpoints/ME-D2N-target-set-cub-1shot/` 86 | 87 | # 6 citing 88 | If you find our work or codes useful, please consider citing our work ヘ|・∀・|ノ*~● 89 | ``` 90 | @inproceedings{fu2022me, 91 | title={ME-D2N: Multi-Expert Domain Decompositional Network for Cross-Domain Few-Shot Learning}, 92 | author={Fu, Yuqian and Xie, Yu and Fu, Yanwei and Chen, Jingjing and Jiang, Yu-Gang}, 93 | booktitle={Proceedings of the 30th ACM International Conference on Multimedia}, 94 | pages={6609--6617}, 95 | year={2022} 96 | } 97 | ``` 98 | 99 | -------------------------------------------------------------------------------- /train_metaTeacher.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.optim 4 | import os 5 | import random 6 | 7 | from methods.backbone import model_dict 8 | from data.datamgr import SimpleDataManager, SetDataManager 9 | from methods.baselinetrain import BaselineTrain 10 | from methods.gnnnet import GnnNet 11 | from options import parse_args, get_resume_file, load_warmup_state 12 | 13 | 14 | def train(base_loader, val_loader, model, start_epoch, stop_epoch, params): 15 | # get optimizer and checkpoint path 16 | optimizer = torch.optim.Adam(model.parameters()) 17 | if not os.path.isdir(params.checkpoint_dir): 18 | os.makedirs(params.checkpoint_dir) 19 | 20 | # for validation 21 | max_acc = 0 22 | total_it = 0 23 | 24 | # start 25 | for epoch in range(start_epoch,stop_epoch): 26 | model.train() 27 | total_it = model.train_loop(epoch, base_loader, optimizer, total_it) 28 | model.eval() 29 | 30 | acc = model.test_loop( val_loader) 31 | if acc > max_acc : 32 | print("best model! save...") 33 | max_acc = acc 34 | outfile = os.path.join(params.checkpoint_dir, 'best_model.tar') 35 | torch.save({'epoch':epoch, 'state':model.state_dict()}, outfile) 36 | else: 37 | print("GG! best accuracy {:f}".format(max_acc)) 38 | 39 | if ((epoch + 1) % params.save_freq==0) or (epoch==stop_epoch-1): 40 | outfile = os.path.join(params.checkpoint_dir, '{:d}.tar'.format(epoch)) 41 | torch.save({'epoch':epoch, 'state':model.state_dict()}, outfile) 42 | 43 | return model 44 | 45 | 46 | 47 | # --- main function --- 48 | if __name__=='__main__': 49 | # set numpy random seed 50 | seed = 0 51 | print("set seed = %d" % seed) 52 | random.seed(seed) 53 | np.random.seed(seed) 54 | torch.manual_seed(seed) 55 | torch.cuda.manual_seed_all(seed) 56 | torch.backends.cudnn.deterministic = True 57 | torch.backends.cudnn.benchmark = False 58 | 59 | # parser argument 60 | params = parse_args('train') 61 | print(params) 62 | 63 | # output and tensorboard dir 64 | params.tf_dir = '%s/log/%s'%(params.save_dir, params.name) 65 | params.checkpoint_dir = '%s/checkpoints/%s'%(params.save_dir, params.name) 66 | if not os.path.isdir(params.checkpoint_dir): 67 | os.makedirs(params.checkpoint_dir) 68 | 69 | # dataloader and model 70 | print('\n--- prepare dataloader ---') 71 | val_file = os.path.join(params.data_dir, 'miniImagenet', 'val.json') 72 | 73 | image_size = 224 74 | if params.modelType == 'pretrain': 75 | print(' pre-training the feature encoder {}'.format(params.model)) 76 | assert(params.dataset == 'miniImagenet') 77 | base_file = os.path.join(params.data_dir, params.dataset, 'base.json') 78 | base_datamgr = SimpleDataManager(image_size, batch_size=16) 79 | training_loader = base_datamgr.get_data_loader( base_file , aug=params.train_aug ) 80 | val_datamgr = SimpleDataManager(image_size, batch_size=64) 81 | val_loader = val_datamgr.get_data_loader(val_file, aug=False) 82 | model = BaselineTrain(model_dict[params.model], params.num_classes, tf_path=params.tf_dir) 83 | 84 | elif params.modelType == 'St-Net' or params.modelType == 'Tt-Net': 85 | if(params.modelType == 'St-Net'): 86 | print('meta-training the St-Net using {}'.format(params.dataset)) 87 | assert(params.dataset == 'miniImagenet') 88 | training_file = os.path.join(params.data_dir, params.dataset, 'base.json') 89 | elif(params.modelType == 'Tt-Net'): 90 | print('meta-training the Tt-Net using {} with num_target as {}'.format(params.dataset, params.target_num_label)) 91 | assert(params.dataset in ['cub', 'cars', 'places', 'plantae']) 92 | training_file = 'output/labled_base_' + params.dataset + '_' + str(params.target_num_label) + '.json' 93 | n_query = max(1, int(16* params.test_n_way/params.train_n_way)) 94 | train_few_shot_params = dict(n_way = params.train_n_way, n_support = params.n_shot) 95 | base_datamgr = SetDataManager(image_size, n_query = n_query, **train_few_shot_params) 96 | training_loader = base_datamgr.get_data_loader( training_file , aug = params.train_aug ) 97 | test_few_shot_params = dict(n_way = params.test_n_way, n_support = params.n_shot) 98 | val_datamgr = SetDataManager(image_size, n_query = n_query, **test_few_shot_params) 99 | val_loader = val_datamgr.get_data_loader( val_file, aug = False) 100 | model = GnnNet( model_dict[params.model], tf_path=params.tf_dir, **train_few_shot_params) 101 | 102 | model = model.cuda() 103 | 104 | #load model 105 | start_epoch = params.start_epoch 106 | stop_epoch = params.stop_epoch 107 | if params.resume != '': 108 | resume_file = get_resume_file('%s/checkpoints/%s'%(params.save_dir, params.resume), params.resume_epoch) 109 | if resume_file is not None: 110 | tmp = torch.load(resume_file) 111 | start_epoch = tmp['epoch']+1 112 | model.load_state_dict(tmp['state']) 113 | print(' resume the training with at {} epoch (model file {})'.format(start_epoch, params.resume)) 114 | elif params.modelType != 'pretrain': 115 | if params.warmup == 'gg3b0': 116 | raise Exception('Must provide the pre-trained feature encoder file using --warmup option!') 117 | state = load_warmup_state('%s/checkpoints/%s'%(params.save_dir, params.warmup)) 118 | model.feature.load_state_dict(state, strict=False) 119 | 120 | # training 121 | print('\n--- start the training ---') 122 | model = train(training_loader, val_loader, model, start_epoch, stop_epoch, params) 123 | -------------------------------------------------------------------------------- /train_metaStudent.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.optim 4 | import os 5 | import random 6 | 7 | from methods import backbone 8 | from methods.backbone import model_dict 9 | from data.datamgr import SimpleDataManager, SetDataManager 10 | from methods.gnnnet import GnnNet 11 | from methods.student_MED2N import GnnNetStudent 12 | from options import parse_args, get_resume_file, load_warmup_state 13 | 14 | 15 | def train(S_base_loader, A_base_loader, val_loader, model, Expert_S, Expert_A, start_epoch, stop_epoch, params): 16 | # get optimizer and checkpoint path 17 | optimizer = torch.optim.Adam(model.parameters()) 18 | if not os.path.isdir(params.checkpoint_dir): 19 | os.makedirs(params.checkpoint_dir) 20 | 21 | # for validation 22 | max_acc = 0 23 | total_it = 0 24 | 25 | # start 26 | for epoch in range(start_epoch,stop_epoch): 27 | model.train() 28 | total_it = model.train_loop(epoch, S_base_loader, A_base_loader, Expert_S, Expert_A, optimizer, total_it) 29 | model.eval() 30 | 31 | acc = model.test_loop( val_loader) 32 | if acc > max_acc : 33 | print("best model! save...") 34 | max_acc = acc 35 | outfile = os.path.join(params.checkpoint_dir, 'best_model.tar') 36 | torch.save({'epoch':epoch, 'state':model.state_dict()}, outfile) 37 | else: 38 | print("GG! best accuracy {:f}".format(max_acc)) 39 | 40 | if ((epoch + 1) % params.save_freq==0) or (epoch==stop_epoch-1): 41 | outfile = os.path.join(params.checkpoint_dir, '{:d}.tar'.format(epoch)) 42 | torch.save({'epoch':epoch, 'state':model.state_dict()}, outfile) 43 | 44 | return model 45 | 46 | 47 | 48 | # --- main function --- 49 | if __name__=='__main__': 50 | # set numpy random seed 51 | seed = 0 52 | print("set seed = %d" % seed) 53 | random.seed(seed) 54 | np.random.seed(seed) 55 | torch.manual_seed(seed) 56 | torch.cuda.manual_seed_all(seed) 57 | torch.backends.cudnn.deterministic = True 58 | torch.backends.cudnn.benchmark = False 59 | 60 | # parser argument 61 | params = parse_args('train') 62 | print('--- baseline training: {} ---\n'.format(params.name)) 63 | print(params) 64 | 65 | # output and tensorboard dir 66 | params.tf_dir = '%s/log/%s'%(params.save_dir, params.name) 67 | params.checkpoint_dir = '%s/checkpoints/%s'%(params.save_dir, params.name) 68 | if not os.path.isdir(params.checkpoint_dir): 69 | os.makedirs(params.checkpoint_dir) 70 | 71 | # dataloader 72 | print('\n--- prepare dataloader ---') 73 | image_size = 224 74 | n_query = max(1, int(16* params.test_n_way/params.train_n_way)) 75 | train_few_shot_params = dict(n_way = params.train_n_way, n_support = params.n_shot) 76 | base_datamgr = SetDataManager(image_size, n_query = n_query, **train_few_shot_params) 77 | 78 | test_few_shot_params = dict(n_way = params.test_n_way, n_support = params.n_shot) 79 | val_datamgr = SetDataManager(image_size, n_query = n_query, **test_few_shot_params) 80 | val_file = os.path.join(params.data_dir, 'miniImagenet', 'val.json') 81 | val_loader = val_datamgr.get_data_loader( val_file, aug = False) 82 | 83 | assert(params.modelType=='Student') 84 | print('meta-training the student model ME-D2N.') 85 | 86 | # source episode 87 | print('base source dataset: miniImagenet') 88 | base_file = os.path.join(params.data_dir, 'miniImagenet', 'base.json') 89 | S_base_loader = base_datamgr.get_data_loader( base_file , aug = params.train_aug ) 90 | 91 | # target episode 92 | print('auxiliary target dataset: {} with num_target as {}', format(params.target_set, str(params.target_num_label))) 93 | labeled_base_file = 'output/labled_base_' + params.target_set + '_' + str(params.target_num_label) + '.json' 94 | A_base_loader = base_datamgr.get_data_loader( labeled_base_file, aug = params.train_aug) 95 | 96 | 97 | # expert models 98 | print('--loading teacher models--') 99 | #define experts teacher model 100 | Expert_S = GnnNet( model_dict[params.model], tf_path=params.tf_dir, **train_few_shot_params) 101 | Expert_A = GnnNet( model_dict[params.model], tf_path=params.tf_dir, **train_few_shot_params) 102 | Expert_S = Expert_S.cuda() 103 | Expert_A = Expert_A.cuda() 104 | # load experts model 105 | ckp_S = torch.load(params.ckp_S) 106 | Expert_S.load_state_dict(ckp_S['state']) 107 | print('Expert_S loaded from:', params.ckp_S) 108 | ckp_A = torch.load(params.ckp_A) 109 | Expert_A.load_state_dict(ckp_A['state']) 110 | print('Expert_A loader from:', params.ckp_A) 111 | Expert_S.eval() 112 | Expert_A.eval() 113 | 114 | # student model 115 | assert(params.modelType=='Student') 116 | print('--meta-training the student model ME-D2N--') 117 | #define student model 118 | model = GnnNetStudent( model_dict[params.model], tf_path=params.tf_dir, target_set = params.target_set, **train_few_shot_params) 119 | model = model.cuda() 120 | model.train() 121 | # load student model 122 | start_epoch = params.start_epoch 123 | stop_epoch = params.stop_epoch 124 | if params.resume != '': 125 | resume_file = get_resume_file('%s/checkpoints/%s'%(params.save_dir, params.resume), params.resume_epoch) 126 | if resume_file is not None: 127 | tmp = torch.load(resume_file) 128 | start_epoch = tmp['epoch']+1 129 | model.load_state_dict(tmp['state']) 130 | print(' resume the training with at {} epoch (model file {})'.format(start_epoch, params.resume)) 131 | if params.warmup == 'gg3b0': 132 | raise Exception('Must provide the pre-trained feature encoder file using --warmup option!') 133 | state = load_warmup_state('%s/checkpoints/%s'%(params.save_dir, params.warmup)) 134 | model.feature.load_state_dict(state, strict=False) 135 | 136 | # training 137 | print('\n--- start the training ---') 138 | model = train(S_base_loader, A_base_loader, val_loader, model, Expert_S, Expert_A, start_epoch, stop_epoch, params) 139 | -------------------------------------------------------------------------------- /options.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import os 3 | import glob 4 | import torch 5 | import argparse 6 | 7 | def parse_args(script): 8 | parser = argparse.ArgumentParser(description= 'few-shot script %s' %(script)) 9 | parser.add_argument('--dataset', default='miniImagenet', help='miniImagenet/cub/cars/places/plantae') 10 | parser.add_argument('--model', default='ResNet10', help='model: Conv{4|6} / ResNet{10|18|34}') # we use ResNet10 in the paper 11 | parser.add_argument('--train_n_way' , default=5, type=int, help='class num to classify for training') 12 | parser.add_argument('--test_n_way' , default=5, type=int, help='class num to classify for testing (validation) ') 13 | parser.add_argument('--n_shot' , default=5, type=int, help='number of labeled data in each class, same as n_support') 14 | parser.add_argument('--train_aug' , action='store_true', help='perform data augmentation or not during training ') 15 | parser.add_argument('--name' , default='tmp', type=str, help='') 16 | parser.add_argument('--save_dir' , default='./output', type=str, help='') 17 | parser.add_argument('--data_dir' , default='/DATACENTER/2/lovelyqian/CROSS-DOMAIN-FSL-DATASETS', type=str, help='') #TO change your dataset here 18 | 19 | if script == 'train': 20 | parser.add_argument('--num_classes' , default=64, type=int, help='total number of classes in softmax, only used in baseline') 21 | parser.add_argument('--save_freq' , default=200, type=int, help='Save frequency') 22 | parser.add_argument('--target_set', default='cub', help='cub/cars/places/plantae, use the extremely labeled target data') 23 | parser.add_argument('--modelType', default='St-Net', help='pretrain/St-Net/Tt-Net/Student') 24 | parser.add_argument('--ckp_S' , default='', type=str,help='the ckp path of the expert St-Net model') 25 | parser.add_argument('--ckp_A' , default='', type=str,help='the ckp path of the expert Tt-Net model') 26 | parser.add_argument('--target_num_label', default=5, type=int, help='number of labeled target base images per class') 27 | parser.add_argument('--start_epoch' , default=0, type=int,help ='Starting epoch') 28 | parser.add_argument('--stop_epoch' , default=400, type=int, help ='Stopping epoch') 29 | parser.add_argument('--resume' , default='', type=str, help='continue from previous trained model with largest epoch') 30 | parser.add_argument('--resume_epoch', default=-1, type=int, help='') 31 | parser.add_argument('--warmup' , default='gg3b0', type=str, help='continue from baseline, neglected if resume is true') 32 | elif script == 'test': 33 | parser.add_argument('--target_set', default='cub', help='cub/cars/places/plantae, use the extremely labeled target data') 34 | parser.add_argument('--split' , default='novel', help='base/val/novel') 35 | parser.add_argument('--save_epoch', default=400, type=int,help ='load the model trained in x epoch, use the best model if x is -1') 36 | parser.add_argument('--warmup' , default='gg3bo', type = str, help = 'just for insert the test function into the training.') 37 | parser.add_argument('--stop_epoch' , default=400, type=int, help ='Stopping epoch') 38 | else: 39 | raise ValueError('Unknown script') 40 | 41 | return parser.parse_args() 42 | 43 | def get_assigned_file(checkpoint_dir,num): 44 | assign_file = os.path.join(checkpoint_dir, '{:d}.tar'.format(num)) 45 | return assign_file 46 | 47 | def get_resume_file(checkpoint_dir, resume_epoch=-1): 48 | filelist = glob.glob(os.path.join(checkpoint_dir, '*.tar')) 49 | print(filelist) 50 | if len(filelist) == 0: 51 | return None 52 | 53 | filelist = [ x for x in filelist if os.path.basename(x) != 'best_model.tar' ] 54 | epochs = np.array([int(os.path.splitext(os.path.basename(x))[0]) for x in filelist]) 55 | max_epoch = np.max(epochs) 56 | epoch = max_epoch if resume_epoch == -1 else resume_epoch 57 | resume_file = os.path.join(checkpoint_dir, '{:d}.tar'.format(epoch)) 58 | return resume_file 59 | 60 | def get_best_file(checkpoint_dir): 61 | best_file = os.path.join(checkpoint_dir, 'best_model.tar') 62 | if os.path.isfile(best_file): 63 | return best_file 64 | else: 65 | return get_resume_file(checkpoint_dir) 66 | 67 | def load_warmup_state(filename): 68 | print(' load pre-trained model file: {}'.format(filename)) 69 | warmup_resume_file = get_resume_file(filename) 70 | print(' warmup_resume_file:', warmup_resume_file) 71 | tmp = torch.load(warmup_resume_file) 72 | if tmp is not None: 73 | state = tmp['state'] 74 | state_keys = list(state.keys()) 75 | for i, key in enumerate(state_keys): 76 | if 'feature.' in key: 77 | newkey = key.replace("feature.","") 78 | state[newkey] = state.pop(key) 79 | else: 80 | state.pop(key) 81 | else: 82 | raise ValueError(' No pre-trained encoder file found!') 83 | return state 84 | 85 | 86 | ''' 87 | def load_warmup_state(filename, method): 88 | print(' load pre-trained model file: {}'.format(filename)) 89 | warmup_resume_file = get_resume_file(filename) 90 | print(' warmup_resume_file:', warmup_resume_file) 91 | tmp = torch.load(warmup_resume_file) 92 | if tmp is not None: 93 | state = tmp['state'] 94 | state_keys = list(state.keys()) 95 | for i, key in enumerate(state_keys): 96 | if 'relationnet' in method and "feature." in key: 97 | newkey = key.replace("feature.","") 98 | state[newkey] = state.pop(key) 99 | elif method == 'gnnnet' and 'feature.' in key: 100 | newkey = key.replace("feature.","") 101 | state[newkey] = state.pop(key) 102 | elif method == 'matchingnet' and 'feature.' in key and '.7.' not in key: 103 | newkey = key.replace("feature.","") 104 | state[newkey] = state.pop(key) 105 | else: 106 | state.pop(key) 107 | else: 108 | raise ValueError(' No pre-trained encoder file found!') 109 | return state 110 | ''' 111 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import os 3 | import h5py 4 | 5 | from methods import backbone 6 | from methods.backbone import model_dict 7 | from data.datamgr import SimpleDataManager 8 | from options import parse_args, get_best_file, get_assigned_file 9 | from methods.gnnnet import GnnNet 10 | import data.feature_loader as feat_loader 11 | import random 12 | import numpy as np 13 | 14 | # extract and save image features 15 | def save_features(model, data_loader, featurefile): 16 | f = h5py.File(featurefile, 'w') 17 | max_count = len(data_loader)*data_loader.batch_size 18 | all_labels = f.create_dataset('all_labels',(max_count,), dtype='i') 19 | all_feats=None 20 | count=0 21 | for i, (x,y) in enumerate(data_loader): 22 | if (i % 10) == 0: 23 | print(' {:d}/{:d}'.format(i, len(data_loader))) 24 | x = x.cuda() 25 | feats = model(x) 26 | if all_feats is None: 27 | all_feats = f.create_dataset('all_feats', [max_count] + list( feats.size()[1:]) , dtype='f') 28 | all_feats[count:count+feats.size(0)] = feats.data.cpu().numpy() 29 | all_labels[count:count+feats.size(0)] = y.cpu().numpy() 30 | count = count + feats.size(0) 31 | 32 | count_var = f.create_dataset('count', (1,), dtype='i') 33 | count_var[0] = count 34 | f.close() 35 | 36 | # evaluate using features 37 | def feature_evaluation(cl_data_file, model, n_way = 5, n_support = 5, n_query = 15): 38 | class_list = cl_data_file.keys() 39 | select_class = random.sample(class_list,n_way) 40 | z_all = [] 41 | for cl in select_class: 42 | img_feat = cl_data_file[cl] 43 | perm_ids = np.random.permutation(len(img_feat)).tolist() 44 | z_all.append( [ np.squeeze( img_feat[perm_ids[i]]) for i in range(n_support+n_query) ] ) 45 | z_all = torch.from_numpy(np.array(z_all) ) 46 | 47 | model.n_query = n_query 48 | scores = model.set_forward(z_all, is_feature = True) 49 | pred = scores.data.cpu().numpy().argmax(axis = 1) 50 | y = np.repeat(range( n_way ), n_query ) 51 | acc = np.mean(pred == y)*100 52 | return acc 53 | 54 | # --- main --- 55 | if __name__ == '__main__': 56 | # seed 57 | seed = 0 58 | print("set seed = %d" % seed) 59 | random.seed(seed) 60 | np.random.seed(seed) 61 | torch.manual_seed(seed) 62 | torch.cuda.manual_seed_all(seed) 63 | torch.backends.cudnn.deterministic = True 64 | torch.backends.cudnn.benchmark = False 65 | 66 | # parse argument 67 | params = parse_args('test') 68 | print('Testing! {} shots on {} dataset with {} epochs of {}'.format(params.n_shot, params.dataset, params.save_epoch, params.name)) 69 | remove_featurefile = True 70 | 71 | print('\nStage 1: saving features') 72 | # dataset 73 | print(' build dataset') 74 | image_size = 224 75 | split = params.split 76 | loadfile = os.path.join(params.data_dir, params.dataset, split + '.json') 77 | print('load file:', loadfile) 78 | datamgr = SimpleDataManager(image_size, batch_size = 64) 79 | data_loader = datamgr.get_data_loader(loadfile, aug = False) 80 | 81 | print(' build feature encoder') 82 | # feature encoder 83 | checkpoint_dir = '%s/checkpoints/%s'%(params.save_dir, params.name) 84 | if params.save_epoch != -1: 85 | modelfile = get_assigned_file(checkpoint_dir,params.save_epoch) 86 | else: 87 | modelfile = get_best_file(checkpoint_dir) 88 | model = model_dict[params.model]() 89 | model = model.cuda() 90 | tmp = torch.load(modelfile) 91 | try: 92 | state = tmp['state'] 93 | except KeyError: 94 | state = tmp['model_state'] 95 | except: 96 | raise 97 | state_keys = list(state.keys()) 98 | print('state_keys:', state_keys, len(state_keys)) 99 | for i, key in enumerate(state_keys): 100 | if "feature." in key and not 'gamma' in key and not 'beta' in key: 101 | newkey = key.replace("feature.","") 102 | state[newkey] = state.pop(key) 103 | else: 104 | state.pop(key) 105 | print('state keys:', list(state.keys()), len(list(state.keys()))) 106 | 107 | model.load_state_dict(state) 108 | model.eval() 109 | 110 | # save feature file 111 | print(' extract and save features...') 112 | if params.save_epoch != -1: 113 | featurefile = os.path.join( checkpoint_dir.replace("checkpoints","features"), split + "_" + str(params.save_epoch)+ ".hdf5") 114 | else: 115 | featurefile = os.path.join( checkpoint_dir.replace("checkpoints","features"), split + ".hdf5") 116 | dirname = os.path.dirname(featurefile) 117 | if not os.path.isdir(dirname): 118 | os.makedirs(dirname) 119 | save_features(model, data_loader, featurefile) 120 | 121 | print('\nStage 2: evaluate') 122 | acc_all = [] 123 | iter_num = 1000 124 | few_shot_params = dict(n_way = params.test_n_way , n_support = params.n_shot) 125 | # model 126 | print(' build metric-based model') 127 | model = GnnNet( model_dict[params.model], **few_shot_params) 128 | model = model.cuda() 129 | model.eval() 130 | 131 | # load model 132 | checkpoint_dir = '%s/checkpoints/%s'%(params.save_dir, params.name) 133 | if params.save_epoch != -1: 134 | modelfile = get_assigned_file(checkpoint_dir, params.save_epoch) 135 | else: 136 | modelfile = get_best_file(checkpoint_dir) 137 | if modelfile is not None: 138 | tmp = torch.load(modelfile) 139 | try: 140 | model.load_state_dict(tmp['state']) 141 | except RuntimeError: 142 | print('warning! RuntimeError when load_state_dict()!') 143 | model.load_state_dict(tmp['state'], strict=False) 144 | except KeyError: 145 | for k in tmp['model_state']: ##### revise latter 146 | if 'running' in k: 147 | tmp['model_state'][k] = tmp['model_state'][k].squeeze() 148 | model.load_state_dict(tmp['model_state'], strict=False) 149 | except: 150 | raise 151 | 152 | # load feature file 153 | print(' load saved feature file') 154 | cl_data_file = feat_loader.init_loader(featurefile) 155 | 156 | # start evaluate 157 | print(' evaluate') 158 | for i in range(iter_num): 159 | acc = feature_evaluation(cl_data_file, model, n_query=15, **few_shot_params) 160 | acc_all.append(acc) 161 | 162 | # statics 163 | print(' get statics') 164 | acc_all = np.asarray(acc_all) 165 | acc_mean = np.mean(acc_all) 166 | acc_std = np.std(acc_all) 167 | print(' %d test iterations: Acc = %4.2f%% +- %4.2f%%' %(iter_num, acc_mean, 1.96* acc_std/np.sqrt(iter_num))) 168 | 169 | # remove feature files [optional] 170 | if remove_featurefile: 171 | os.remove(featurefile) 172 | -------------------------------------------------------------------------------- /test_twoPaths.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import os 3 | import h5py 4 | 5 | from methods.backbone import model_dict 6 | from data.datamgr import SimpleDataManager 7 | from options import parse_args, get_best_file, get_assigned_file 8 | 9 | from methods.student_MED2N import GnnNetStudent 10 | import data.feature_loader as feat_loader 11 | import random 12 | import numpy as np 13 | 14 | import torch.nn.functional as F 15 | 16 | # extract and save image features 17 | def save_features(model, model2, data_loader, featurefile, featurefile2, data_flag): 18 | f = h5py.File(featurefile, 'w') 19 | f2 = h5py.File(featurefile2, 'w') 20 | max_count = len(data_loader)*data_loader.batch_size 21 | all_labels = f.create_dataset('all_labels',(max_count,), dtype='i') 22 | all_labels2 = f2.create_dataset('all_labels',(max_count,), dtype='i') 23 | all_feats=None 24 | all_feats2=None 25 | count=0 26 | count2 =0 27 | for i, (x,y) in enumerate(data_loader): 28 | if (i % 10) == 0: 29 | print(' {:d}/{:d}'.format(i, len(data_loader))) 30 | x = x.cuda() 31 | x_fea_block1 = model.feature.forward_block1(x) 32 | x_fea_block2 = model.feature.forward_block2(x_fea_block1) 33 | x_fea_block3 = model.feature.forward_block3(x_fea_block2) 34 | x_fea_block3 = model.mask_layer3(x_fea_block3, data_flag) 35 | x_fea_block4 = model.feature.forward_block4(x_fea_block3) 36 | x_fea_block4 = model.mask_layer4(x_fea_block4, data_flag) 37 | feats = model.feature.forward_rest(x_fea_block4) 38 | 39 | # model2 40 | feats2 = model2(x) 41 | 42 | if all_feats is None: 43 | all_feats = f.create_dataset('all_feats', [max_count] + list( feats.size()[1:]) , dtype='f') 44 | all_feats[count:count+feats.size(0)] = feats.data.cpu().numpy() 45 | all_labels[count:count+feats.size(0)] = y.cpu().numpy() 46 | count = count + feats.size(0) 47 | 48 | if all_feats2 is None: 49 | all_feats2 = f2.create_dataset('all_feats', [max_count] + list( feats2.size()[1:]) , dtype='f') 50 | all_feats2[count2:count2+feats2.size(0)] = feats2.data.cpu().numpy() 51 | all_labels2[count2:count2+feats2.size(0)] = y.cpu().numpy() 52 | count2 = count2 + feats2.size(0) 53 | 54 | count_var = f.create_dataset('count', (1,), dtype='i') 55 | count_var[0] = count 56 | f.close() 57 | 58 | count_var2 = f2.create_dataset('count', (1,), dtype='i') 59 | count_var2[0] = count2 60 | f2.close() 61 | 62 | # evaluate using features 63 | def feature_evaluation(cl_data_file, cl_data_file2, model, model2, n_way = 5, n_support = 5, n_query = 15): 64 | #print(cl_data_file.keys(), cl_data_file.keys()) 65 | class_list = cl_data_file.keys() 66 | select_class = random.sample(class_list,n_way) 67 | z_all = [] 68 | z_all2 = [] 69 | for cl in select_class: 70 | img_feat = cl_data_file[cl] 71 | img_feat2 = cl_data_file2[cl] 72 | perm_ids = np.random.permutation(len(img_feat)).tolist() 73 | z_all.append( [ np.squeeze( img_feat[perm_ids[i]]) for i in range(n_support+n_query) ] ) 74 | z_all2.append( [ np.squeeze( img_feat2[perm_ids[i]]) for i in range(n_support+n_query) ] ) 75 | 76 | z_all = torch.from_numpy(np.array(z_all) ) 77 | z_all2 = torch.from_numpy(np.array(z_all2) ) 78 | model.n_query = n_query 79 | model2.n_query = n_query 80 | scores = model.set_forward(z_all, is_feature = True) 81 | scores2 = model2.set_forward(z_all2, is_feature = True) 82 | scores = (scores + scores2)/2.0 83 | pred = scores.data.cpu().numpy().argmax(axis = 1) 84 | y = np.repeat(range( n_way ), n_query ) 85 | acc = np.mean(pred == y)*100 86 | return acc 87 | 88 | # --- main --- 89 | if __name__ == '__main__': 90 | # seed 91 | seed = 0 92 | print("set seed = %d" % seed) 93 | random.seed(seed) 94 | np.random.seed(seed) 95 | torch.manual_seed(seed) 96 | torch.cuda.manual_seed_all(seed) 97 | torch.backends.cudnn.deterministic = True 98 | torch.backends.cudnn.benchmark = False 99 | 100 | # parse argument 101 | params = parse_args('test') 102 | print('Testing! {} shots on {} dataset with {} epochs of {}'.format(params.n_shot, params.dataset, params.save_epoch, params.name)) 103 | remove_featurefile = True 104 | 105 | print('\nStage 1: saving features') 106 | # dataset 107 | print(' build dataset') 108 | image_size = 224 109 | split = params.split 110 | loadfile = os.path.join(params.data_dir, params.dataset, split + '.json') 111 | print('load file:', loadfile) 112 | datamgr = SimpleDataManager(image_size, batch_size = 64) 113 | data_loader = datamgr.get_data_loader(loadfile, aug = False) 114 | 115 | print(' build feature encoder') 116 | checkpoint_dir = '%s/checkpoints/%s'%(params.save_dir, params.name) 117 | if params.save_epoch != -1: 118 | modelfile = get_assigned_file(checkpoint_dir,params.save_epoch) 119 | else: 120 | modelfile = get_best_file(checkpoint_dir) 121 | 122 | # feature encoder 123 | few_shot_params = dict(n_way = params.test_n_way , n_support = params.n_shot) 124 | model = GnnNetStudent(model_dict[params.model], target_set = params.target_set, **few_shot_params) 125 | model = model.cuda() 126 | tmp = torch.load(modelfile) 127 | state = tmp['state'] 128 | model.load_state_dict(state) 129 | model.eval() 130 | 131 | # model2 132 | model2 = GnnNetStudent(model_dict[params.model], target_set = params.target_set, **few_shot_params) 133 | model2 = model2.cuda() 134 | model2.load_state_dict(state) 135 | model2.eval() 136 | 137 | # save feature file 138 | print(' extract and save features...') 139 | if params.save_epoch != -1: 140 | featurefile = os.path.join( checkpoint_dir.replace("checkpoints","features"), split + "_" + str(params.save_epoch)+ ".hdf5") 141 | featurefile2 = os.path.join( checkpoint_dir.replace("checkpoints","features"), split + "_" + str(params.save_epoch)+ "2.hdf5") 142 | else: 143 | featurefile = os.path.join( checkpoint_dir.replace("checkpoints","features"), split + ".hdf5") 144 | dirname = os.path.dirname(featurefile) 145 | if not os.path.isdir(dirname): 146 | os.makedirs(dirname) 147 | 148 | if(params.dataset=='miniImagenet'): 149 | domain_flag = 'S' 150 | else: 151 | domain_flag = 'A' 152 | save_features(model, model2, data_loader, featurefile, featurefile2, domain_flag) 153 | 154 | print('\nStage 2: evaluate') 155 | acc_all = [] 156 | iter_num = 1000 157 | # load feature file 158 | print(' load saved feature file') 159 | cl_data_file = feat_loader.init_loader(featurefile) 160 | cl_data_file2 = feat_loader.init_loader(featurefile2) 161 | 162 | # start evaluate 163 | print(' evaluate') 164 | for i in range(iter_num): 165 | acc = feature_evaluation(cl_data_file, cl_data_file2, model,model2, n_query=15, **few_shot_params) 166 | acc_all.append(acc) 167 | 168 | # statics 169 | print(' get statics') 170 | acc_all = np.asarray(acc_all) 171 | acc_mean = np.mean(acc_all) 172 | acc_std = np.std(acc_all) 173 | print(' %d test iterations: Acc = %4.2f%% +- %4.2f%%' %(iter_num, acc_mean, 1.96* acc_std/np.sqrt(iter_num))) 174 | 175 | # remove feature files [optional] 176 | if remove_featurefile: 177 | os.remove(featurefile) 178 | -------------------------------------------------------------------------------- /data/dataset.py: -------------------------------------------------------------------------------- 1 | # This code is modified from https://github.com/facebookresearch/low-shot-shrink-hallucinate 2 | 3 | import torch 4 | from PIL import Image 5 | import json 6 | import numpy as np 7 | import torchvision.transforms as transforms 8 | import os 9 | import random 10 | identity = lambda x:x 11 | 12 | 13 | class SimpleDataset: 14 | def __init__(self, data_file, transform, target_transform=identity): 15 | with open(data_file, 'r') as f: 16 | self.meta = json.load(f) 17 | self.transform = transform 18 | self.target_transform = target_transform 19 | 20 | def __getitem__(self,i): 21 | image_path = os.path.join(self.meta['image_names'][i]) 22 | 23 | image_path = image_path[:12]+'2'+image_path[13:] 24 | img = Image.open(image_path).convert('RGB') 25 | img = self.transform(img) 26 | target = self.target_transform(self.meta['image_labels'][i]) 27 | return img, target 28 | 29 | def __len__(self): 30 | return len(self.meta['image_names']) 31 | 32 | 33 | # added by fuyuqian in 20210108 34 | class RandomLabeledTargetDataset: 35 | def __init__(self, data_file,data_file_miniImagenet, transform, target_transform=identity): 36 | with open(data_file, 'r') as f: 37 | self.meta = json.load(f) 38 | with open(data_file_miniImagenet, 'r') as f_miniI: 39 | self.meta_miniImagenet = json.load(f_miniI) 40 | self.transform = transform 41 | self.target_transform = target_transform 42 | 43 | def __getitem__(self,i): 44 | idx = random.randint(0, len(self.meta['image_names'])-1) 45 | image_path = self.meta['image_names'][idx] 46 | 47 | image_path = image_path[:12]+'2'+image_path[13:] 48 | img = Image.open(image_path).convert('RGB') 49 | img = self.transform(img) 50 | target = self.target_transform(self.meta['image_labels'][idx]) 51 | return img, target 52 | 53 | def __len__(self): 54 | #return len(self.meta['image_names']) 55 | return len(self.meta_miniImagenet['image_names']) 56 | 57 | 58 | class SetDataset: 59 | def __init__(self, data_file, batch_size, transform): 60 | with open(data_file, 'r') as f: 61 | self.meta = json.load(f) 62 | 63 | self.cl_list = np.unique(self.meta['image_labels']).tolist() 64 | #print('dataset:', 'SetDataset:', 'cl_list:', self.cl_list) 65 | 66 | self.sub_meta = {} 67 | for cl in self.cl_list: 68 | self.sub_meta[cl] = [] 69 | 70 | for x,y in zip(self.meta['image_names'],self.meta['image_labels']): 71 | self.sub_meta[y].append(x) 72 | 73 | #print('dataset:', 'SetDataset:', 'sub_meta:', len(self.sub_meta)) 74 | #for i in range(len(self.sub_meta)): 75 | #print(i, len(self.sub_meta[i])) 76 | 77 | 78 | self.sub_dataloader = [] 79 | sub_data_loader_params = dict(batch_size = batch_size, 80 | shuffle = True, 81 | num_workers = 0, #use main thread only or may receive multiple batches 82 | pin_memory = False) 83 | for cl in self.cl_list: 84 | #print('dataset:', 'SetDataset:', 'cl:', cl) 85 | sub_dataset = SubDataset(self.sub_meta[cl], cl, transform = transform ) 86 | self.sub_dataloader.append( torch.utils.data.DataLoader(sub_dataset, **sub_data_loader_params) ) 87 | 88 | def __getitem__(self,i): 89 | return next(iter(self.sub_dataloader[i])) 90 | 91 | def __len__(self): 92 | #print('dataset:', 'SetDataset:', 'len:', len(self.cl_list)) 93 | return len(self.cl_list) 94 | 95 | 96 | class MultiSetDataset: 97 | def __init__(self, data_files, batch_size, transform): 98 | self.cl_list = np.array([]) 99 | self.sub_dataloader = [] 100 | self.n_classes = [] 101 | for data_file in data_files: 102 | with open(data_file, 'r') as f: 103 | meta = json.load(f) 104 | cl_list = np.unique(meta['image_labels']).tolist() 105 | self.cl_list = np.concatenate((self.cl_list, cl_list)) 106 | 107 | sub_meta = {} 108 | for cl in cl_list: 109 | sub_meta[cl] = [] 110 | 111 | for x,y in zip(meta['image_names'], meta['image_labels']): 112 | sub_meta[y].append(x) 113 | 114 | sub_data_loader_params = dict(batch_size = batch_size, 115 | shuffle = True, 116 | num_workers = 0, #use main thread only or may receive multiple batches 117 | pin_memory = False) 118 | for cl in cl_list: 119 | sub_dataset = SubDataset(sub_meta[cl], cl, transform = transform, min_size=batch_size) 120 | self.sub_dataloader.append( torch.utils.data.DataLoader(sub_dataset, **sub_data_loader_params) ) 121 | self.n_classes.append(len(cl_list)) 122 | 123 | def __getitem__(self,i): 124 | return next(iter(self.sub_dataloader[i])) 125 | 126 | def __len__(self): 127 | return len(self.cl_list) 128 | 129 | def lens(self): 130 | return self.n_classes 131 | 132 | 133 | class SubDataset: 134 | def __init__(self, sub_meta, cl, transform=transforms.ToTensor(), target_transform=identity, min_size=50): 135 | self.sub_meta = sub_meta 136 | self.cl = cl 137 | self.transform = transform 138 | self.target_transform = target_transform 139 | #print('dataset:', 'SubDatset:', 'sub_meta:', self.sub_meta) 140 | if len(self.sub_meta) < min_size: 141 | #print('dataset:', 'SubDataset:', 'len of self_meta:', len(self.sub_meta),' < 50') 142 | idxs = [i % len(self.sub_meta) for i in range(min_size)] 143 | #print('dataset:', 'SubDataset:', 'idxs:', idxs) 144 | self.sub_meta = np.array(self.sub_meta)[idxs].tolist() 145 | #print('dataset:', 'SubDataset:', 'sub_meat:', self.sub_meta) 146 | 147 | def __getitem__(self,i): 148 | #print('sub dataset:') 149 | image_path = os.path.join( self.sub_meta[i]) 150 | #print(image_path) 151 | image_path = image_path[:12]+'2'+image_path[13:] 152 | 153 | img = Image.open(image_path).convert('RGB') 154 | img = self.transform(img) 155 | target = self.target_transform(self.cl) 156 | #print('img:',img.size(), 'target:', target) 157 | return img, target 158 | 159 | def __len__(self): 160 | return len(self.sub_meta) 161 | 162 | 163 | class EpisodicBatchSampler(object): 164 | def __init__(self, n_classes, n_way, n_episodes): 165 | self.n_classes = n_classes 166 | self.n_way = n_way 167 | self.n_episodes = n_episodes 168 | 169 | def __len__(self): 170 | return self.n_episodes 171 | 172 | def __iter__(self): 173 | for i in range(self.n_episodes): 174 | yield torch.randperm(self.n_classes)[:self.n_way] 175 | 176 | 177 | class MultiEpisodicBatchSampler(object): 178 | def __init__(self, n_classes, n_way, n_episodes): 179 | self.n_classes = n_classes 180 | self.n_way = n_way 181 | self.n_episodes = n_episodes 182 | self.n_domains = len(n_classes) 183 | 184 | def __len__(self): 185 | return self.n_episodes 186 | 187 | def __iter__(self): 188 | domain_list = [i%self.n_domains for i in range(self.n_episodes)] 189 | random.shuffle(domain_list) 190 | for i in range(self.n_episodes): 191 | domain_idx = domain_list[i] 192 | start_idx = sum(self.n_classes[:domain_idx]) 193 | yield torch.randperm(self.n_classes[domain_idx])[:self.n_way] + start_idx 194 | -------------------------------------------------------------------------------- /methods/gnn.py: -------------------------------------------------------------------------------- 1 | # This code is modified from https://github.com/vgsatorras/few-shot-gnn/blob/master/models/gnn_iclr.py 2 | 3 | import torch 4 | import torch.nn as nn 5 | from torch.autograd import Variable 6 | import torch.nn.functional as F 7 | from methods.backbone import Linear_fw, Conv2d_fw, BatchNorm2d_fw, BatchNorm1d_fw 8 | 9 | if torch.cuda.is_available(): 10 | dtype = torch.cuda.FloatTensor 11 | dtype_l = torch.cuda.LongTensor 12 | else: 13 | dtype = torch.FloatTensor 14 | dtype_l = torch.cuda.LongTensor 15 | 16 | def gmul(input): 17 | W, x = input 18 | # x is a tensor of size (bs, N, num_features) 19 | # W is a tensor of size (bs, N, N, J) 20 | #x_size = x.size() 21 | W_size = W.size() 22 | N = W_size[-2] 23 | W = W.split(1, 3) 24 | W = torch.cat(W, 1).squeeze(3) # W is now a tensor of size (bs, J*N, N) 25 | output = torch.bmm(W, x) # output has size (bs, J*N, num_features) 26 | output = output.split(N, 1) 27 | output = torch.cat(output, 2) # output has size (bs, N, J*num_features) 28 | return output 29 | 30 | class Gconv(nn.Module): 31 | maml = False 32 | def __init__(self, nf_input, nf_output, J, bn_bool=True): 33 | super(Gconv, self).__init__() 34 | self.J = J 35 | self.num_inputs = J*nf_input 36 | self.num_outputs = nf_output 37 | self.fc = nn.Linear(self.num_inputs, self.num_outputs) if not self.maml else Linear_fw(self.num_inputs, self.num_outputs) 38 | 39 | self.bn_bool = bn_bool 40 | if self.bn_bool: 41 | self.bn = nn.BatchNorm1d(self.num_outputs, track_running_stats=False) if not self.maml else BatchNorm1d_fw(self.num_outputs, track_running_stats=False) 42 | 43 | def forward(self, input): 44 | W = input[0] 45 | x = gmul(input) # out has size (bs, N, num_inputs) 46 | #if self.J == 1: 47 | # x = torch.abs(x) 48 | x_size = x.size() 49 | x = x.contiguous() 50 | x = x.view(-1, self.num_inputs) 51 | x = self.fc(x) # has size (bs*N, num_outputs) 52 | 53 | if self.bn_bool: 54 | x = self.bn(x) 55 | x = x.view(*x_size[:-1], self.num_outputs) 56 | return W, x 57 | 58 | class Wcompute(nn.Module): 59 | maml = False 60 | def __init__(self, input_features, nf, operator='J2', activation='softmax', ratio=[2,2,1,1], num_operators=1, drop=False): 61 | super(Wcompute, self).__init__() 62 | self.num_features = nf 63 | self.operator = operator 64 | self.conv2d_1 = nn.Conv2d(input_features, int(nf * ratio[0]), 1, stride=1) if not self.maml else Conv2d_fw(input_features, int(nf * ratio[0]), 1, stride=1) 65 | self.bn_1 = nn.BatchNorm2d(int(nf * ratio[0]), track_running_stats=False) if not self.maml else BatchNorm2d_fw(int(nf * ratio[0]), track_running_stats=False) 66 | self.drop = drop 67 | if self.drop: 68 | self.dropout = nn.Dropout(0.3) 69 | self.conv2d_2 = nn.Conv2d(int(nf * ratio[0]), int(nf * ratio[1]), 1, stride=1) if not self.maml else Conv2d_fw(int(nf * ratio[0]), int(nf * ratio[1]), 1, stride=1) 70 | self.bn_2 = nn.BatchNorm2d(int(nf * ratio[1]), track_running_stats=False) if not self.maml else BatchNorm2d_fw(int(nf * ratio[1]), track_running_stats=False) 71 | self.conv2d_3 = nn.Conv2d(int(nf * ratio[1]), nf*ratio[2], 1, stride=1) if not self.maml else Conv2d_fw(int(nf * ratio[1]), nf*ratio[2], 1, stride=1) 72 | self.bn_3 = nn.BatchNorm2d(nf*ratio[2], track_running_stats=False) if not self.maml else BatchNorm2d_fw(nf*ratio[2], track_running_stats=False) 73 | self.conv2d_4 = nn.Conv2d(nf*ratio[2], nf*ratio[3], 1, stride=1) if not self.maml else Conv2d_fw(nf*ratio[2], nf*ratio[3], 1, stride=1) 74 | self.bn_4 = nn.BatchNorm2d(nf*ratio[3], track_running_stats=False) if not self.maml else BatchNorm2d_fw(nf*ratio[3], track_running_stats=False) 75 | self.conv2d_last = nn.Conv2d(nf, num_operators, 1, stride=1) if not self.maml else Conv2d_fw(nf, num_operators, 1, stride=1) 76 | self.activation = activation 77 | 78 | def forward(self, x, W_id): 79 | W1 = x.unsqueeze(2) 80 | W2 = torch.transpose(W1, 1, 2) #size: bs x N x N x num_features 81 | W_new = torch.abs(W1 - W2) #size: bs x N x N x num_features 82 | W_new = torch.transpose(W_new, 1, 3) #size: bs x num_features x N x N 83 | 84 | W_new = self.conv2d_1(W_new) 85 | W_new = self.bn_1(W_new) 86 | W_new = F.leaky_relu(W_new) 87 | if self.drop: 88 | W_new = self.dropout(W_new) 89 | 90 | W_new = self.conv2d_2(W_new) 91 | W_new = self.bn_2(W_new) 92 | W_new = F.leaky_relu(W_new) 93 | 94 | W_new = self.conv2d_3(W_new) 95 | W_new = self.bn_3(W_new) 96 | W_new = F.leaky_relu(W_new) 97 | 98 | W_new = self.conv2d_4(W_new) 99 | W_new = self.bn_4(W_new) 100 | W_new = F.leaky_relu(W_new) 101 | 102 | W_new = self.conv2d_last(W_new) 103 | W_new = torch.transpose(W_new, 1, 3) #size: bs x N x N x 1 104 | 105 | if self.activation == 'softmax': 106 | W_new = W_new - W_id.expand_as(W_new) * 1e8 107 | W_new = torch.transpose(W_new, 2, 3) 108 | # Applying Softmax 109 | W_new = W_new.contiguous() 110 | W_new_size = W_new.size() 111 | W_new = W_new.view(-1, W_new.size(3)) 112 | W_new = F.softmax(W_new, dim=1) 113 | W_new = W_new.view(W_new_size) 114 | # Softmax applied 115 | W_new = torch.transpose(W_new, 2, 3) 116 | 117 | elif self.activation == 'sigmoid': 118 | W_new = F.sigmoid(W_new) 119 | W_new *= (1 - W_id) 120 | elif self.activation == 'none': 121 | W_new *= (1 - W_id) 122 | else: 123 | raise (NotImplementedError) 124 | 125 | if self.operator == 'laplace': 126 | W_new = W_id - W_new 127 | elif self.operator == 'J2': 128 | W_new = torch.cat([W_id, W_new], 3) 129 | else: 130 | raise(NotImplementedError) 131 | 132 | return W_new 133 | 134 | class GNN_nl(nn.Module): 135 | def __init__(self, input_features, nf, train_N_way): 136 | super(GNN_nl, self).__init__() 137 | self.input_features = input_features 138 | self.nf = nf 139 | self.num_layers = 2 140 | 141 | for i in range(self.num_layers): 142 | if i == 0: 143 | module_w = Wcompute(self.input_features, nf, operator='J2', activation='softmax', ratio=[2, 2, 1, 1]) 144 | module_l = Gconv(self.input_features, int(nf / 2), 2) 145 | else: 146 | module_w = Wcompute(self.input_features + int(nf / 2) * i, nf, operator='J2', activation='softmax', ratio=[2, 2, 1, 1]) 147 | module_l = Gconv(self.input_features + int(nf / 2) * i, int(nf / 2), 2) 148 | self.add_module('layer_w{}'.format(i), module_w) 149 | self.add_module('layer_l{}'.format(i), module_l) 150 | 151 | self.w_comp_last = Wcompute(self.input_features + int(self.nf / 2) * self.num_layers, nf, operator='J2', activation='softmax', ratio=[2, 2, 1, 1]) 152 | self.layer_last = Gconv(self.input_features + int(self.nf / 2) * self.num_layers, train_N_way, 2, bn_bool=False) 153 | 154 | def forward(self, x): 155 | W_init = torch.eye(x.size(1), device=x.device).unsqueeze(0).repeat(x.size(0), 1, 1).unsqueeze(3) 156 | 157 | for i in range(self.num_layers): 158 | Wi = self._modules['layer_w{}'.format(i)](x, W_init) 159 | #print('Wi:', Wi.size()) 160 | 161 | x_new = F.leaky_relu(self._modules['layer_l{}'.format(i)]([Wi, x])[1]) 162 | x = torch.cat([x, x_new], 2) 163 | 164 | Wl=self.w_comp_last(x, W_init) 165 | out = self.layer_last([Wl, x])[1] 166 | 167 | return out 168 | -------------------------------------------------------------------------------- /data/datamgr.py: -------------------------------------------------------------------------------- 1 | # This code is modified from https://github.com/facebookresearch/low-shot-shrink-hallucinate 2 | import json 3 | import torch 4 | import random 5 | from PIL import Image 6 | import torchvision.transforms as transforms 7 | import data.additional_transforms as add_transforms 8 | from data.dataset import SimpleDataset, SetDataset, MultiSetDataset, EpisodicBatchSampler, MultiEpisodicBatchSampler, RandomLabeledTargetDataset 9 | from abc import abstractmethod 10 | 11 | class TransformLoader: 12 | def __init__(self, image_size, 13 | normalize_param = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), 14 | jitter_param = dict(Brightness=0.4, Contrast=0.4, Color=0.4)): 15 | self.image_size = image_size 16 | self.normalize_param = normalize_param 17 | self.jitter_param = jitter_param 18 | 19 | def parse_transform(self, transform_type): 20 | if transform_type=='ImageJitter': 21 | method = add_transforms.ImageJitter( self.jitter_param ) 22 | return method 23 | method = getattr(transforms, transform_type) 24 | 25 | if transform_type=='RandomResizedCrop': 26 | return method(self.image_size) 27 | elif transform_type=='CenterCrop': 28 | return method(self.image_size) 29 | elif transform_type=='Resize': 30 | return method([int(self.image_size*1.15), int(self.image_size*1.15)]) 31 | elif transform_type=='Normalize': 32 | return method(**self.normalize_param ) 33 | else: 34 | return method() 35 | 36 | def get_composed_transform(self, aug = False): 37 | if aug: 38 | transform_list = ['RandomResizedCrop', 'ImageJitter', 'RandomHorizontalFlip', 'ToTensor', 'Normalize'] 39 | else: 40 | transform_list = ['Resize','CenterCrop', 'ToTensor', 'Normalize'] 41 | 42 | transform_funcs = [ self.parse_transform(x) for x in transform_list] 43 | transform = transforms.Compose(transform_funcs) 44 | return transform 45 | 46 | 47 | 48 | 49 | # added by fuyuqian in 2021 0107 50 | class LabeledTargetDataset: 51 | def __init__(self, data_file,image_size, batch_size = 16, aug=True): 52 | with open(data_file, 'r') as f: 53 | self.meta = json.load(f) 54 | #print('len of labeled target data:', len(self.meta['image_names'])) 55 | # define transform 56 | self.batch_size = batch_size 57 | self.trans_loader = TransformLoader(image_size) 58 | self.transform = self.trans_loader.get_composed_transform(aug) 59 | 60 | def get_epoch(self): 61 | # return random 62 | idx_list = [i for i in range(len(self.meta['image_names']))] 63 | selected_idx_list = random.sample(idx_list, self.batch_size) 64 | 65 | img_list = [] 66 | img_label = [] 67 | 68 | for idx in selected_idx_list: 69 | image_path = self.meta['image_names'][idx] 70 | image_label = self.meta['image_labels'][idx] 71 | img = Image.open(image_path).convert('RGB') 72 | img = self.transform(img) 73 | img_list.append(img) 74 | img_label.append(image_label) 75 | #print(img_label) 76 | img_list = torch.stack(img_list) 77 | #img_label = torch.stack(img_label) 78 | img_label = torch.LongTensor(img_label) 79 | #print('img_list:', img_list.size()) 80 | #print('img_label:', img_label.size()) 81 | return img_list, img_label 82 | 83 | 84 | 85 | class DataManager: 86 | @abstractmethod 87 | def get_data_loader(self, data_file, aug): 88 | pass 89 | 90 | class SimpleDataManager(DataManager): 91 | def __init__(self, image_size, batch_size): 92 | super(SimpleDataManager, self).__init__() 93 | self.batch_size = batch_size 94 | self.trans_loader = TransformLoader(image_size) 95 | 96 | def get_data_loader(self, data_file, aug): #parameters that would change on train/val set 97 | transform = self.trans_loader.get_composed_transform(aug) 98 | dataset = SimpleDataset(data_file, transform) 99 | data_loader_params = dict(batch_size = self.batch_size, shuffle = True, num_workers = 4, pin_memory = True) 100 | data_loader = torch.utils.data.DataLoader(dataset, **data_loader_params) 101 | 102 | return data_loader 103 | 104 | 105 | # added in 20210108 106 | class RandomLabeledTargetDataManager(DataManager): 107 | def __init__(self, image_size, batch_size): 108 | super(RandomLabeledTargetDataManager, self).__init__() 109 | self.batch_size = batch_size 110 | self.trans_loader = TransformLoader(image_size) 111 | 112 | def get_data_loader(self, data_file, data_file_miniImagenet, aug): #parameters that would change on train/val set 113 | transform = self.trans_loader.get_composed_transform(aug) 114 | dataset = RandomLabeledTargetDataset(data_file, data_file_miniImagenet, transform) 115 | data_loader_params = dict(batch_size = self.batch_size, shuffle = True, num_workers = 4, pin_memory = True) 116 | data_loader = torch.utils.data.DataLoader(dataset, **data_loader_params) 117 | 118 | return data_loader 119 | 120 | class SetDataManager(DataManager): 121 | def __init__(self, image_size, n_way, n_support, n_query, n_eposide=100): 122 | super(SetDataManager, self).__init__() 123 | self.image_size = image_size 124 | self.n_way = n_way 125 | self.batch_size = n_support + n_query 126 | self.n_eposide = n_eposide 127 | 128 | self.trans_loader = TransformLoader(image_size) 129 | #print('datamgr:', 'SetDataManager:', 'n_way:', self.n_way, 'batch_size:', self.batch_size) 130 | 131 | def get_data_loader(self, data_file, aug): #parameters that would change on train/val set 132 | transform = self.trans_loader.get_composed_transform(aug) 133 | if isinstance(data_file, list): 134 | dataset = MultiSetDataset( data_file , self.batch_size, transform ) 135 | sampler = MultiEpisodicBatchSampler(dataset.lens(), self.n_way, self.n_eposide ) 136 | else: 137 | dataset = SetDataset( data_file , self.batch_size, transform ) 138 | sampler = EpisodicBatchSampler(len(dataset), self.n_way, self.n_eposide ) 139 | data_loader_params = dict(batch_sampler = sampler, num_workers=4) 140 | data_loader = torch.utils.data.DataLoader(dataset, **data_loader_params) 141 | return data_loader 142 | 143 | ''' 144 | 145 | # added in 20210109 146 | class RandomLabeledTargetSetDataManager(DataManager): 147 | def __init__(self, image_size, n_way, n_support, n_query, n_eposide=100): 148 | super(RandomLabeledTargetSetDataManager, self).__init__() 149 | self.image_size = image_size 150 | self.n_way = n_way 151 | self.batch_size = n_support + n_query 152 | self.n_eposide = n_eposide 153 | 154 | self.trans_loader = TransformLoader(image_size) 155 | 156 | def get_data_loader(self, data_file, aug): #parameters that would change on train/val set 157 | transform = self.trans_loader.get_composed_transform(aug) 158 | if isinstance(data_file, list): 159 | dataset = MultiSetDataset( data_file , self.batch_size, transform ) 160 | sampler = MultiEpisodicBatchSampler(dataset.lens(), self.n_way, self.n_eposide ) 161 | else: 162 | dataset = SetDataset( data_file , self.batch_size, transform ) 163 | sampler = EpisodicBatchSampler(len(dataset), self.n_way, self.n_eposide ) 164 | data_loader_params = dict(batch_sampler = sampler, num_workers=4) 165 | data_loader = torch.utils.data.DataLoader(dataset, **data_loader_params) 166 | return data_loader 167 | ''' 168 | -------------------------------------------------------------------------------- /methods/meta_template_student_MED2N.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import numpy as np 3 | from abc import abstractmethod 4 | from tensorboardX import SummaryWriter 5 | import torch.nn.functional as F 6 | 7 | class MetaTemplate(nn.Module): 8 | def __init__(self, model_func, n_way, n_support, flatten=True, leakyrelu=False, tf_path=None, change_way=True): 9 | super(MetaTemplate, self).__init__() 10 | self.n_way = n_way 11 | self.n_support = n_support 12 | self.n_query = -1 #(change depends on input) 13 | self.feature = model_func(flatten=flatten, leakyrelu=leakyrelu) 14 | self.feat_dim = self.feature.final_feat_dim 15 | self.change_way = change_way #some methods allow different_way classification during training and test 16 | self.tf_writer = SummaryWriter(log_dir=tf_path) if tf_path is not None else None 17 | 18 | @abstractmethod 19 | def set_forward(self,x,is_feature): 20 | pass 21 | 22 | @abstractmethod 23 | def set_forward_loss(self, x): 24 | pass 25 | 26 | def forward(self,x): 27 | out = self.feature.forward(x) 28 | return out 29 | 30 | def parse_feature(self,x,is_feature): 31 | x = x.cuda() 32 | if is_feature: 33 | z_all = x 34 | else: 35 | x = x.contiguous().view( self.n_way * (self.n_support + self.n_query), *x.size()[2:]) 36 | z_all = self.feature.forward(x) 37 | z_all = z_all.view( self.n_way, self.n_support + self.n_query, -1) 38 | z_support = z_all[:, :self.n_support] 39 | z_query = z_all[:, self.n_support:] 40 | 41 | return z_support, z_query 42 | 43 | def correct(self, x): 44 | scores, loss = self.set_forward_loss(x) 45 | y_query = np.repeat(range( self.n_way ), self.n_query ) 46 | 47 | topk_scores, topk_labels = scores.data.topk(1, 1, True, True) 48 | topk_ind = topk_labels.cpu().numpy() 49 | top1_correct = np.sum(topk_ind[:,0] == y_query) 50 | return float(top1_correct), len(y_query), loss.item()*len(y_query) 51 | 52 | def train_loop(self, epoch, S_train_loader, A_train_loader, Expert_S, Expert_A, optimizer, total_it): 53 | print_freq = len(S_train_loader) // 10 54 | avg_loss=0 55 | for ((i, (S_x, S_y_global)), (i, (A_x, A_y_global))) in zip(enumerate(S_train_loader), enumerate(A_train_loader)): 56 | self.n_query = S_x.size(1) - self.n_support 57 | if self.change_way: 58 | self.n_way = S_x.size(0) 59 | optimizer.zero_grad() 60 | 61 | #hyper-parameters 62 | k1 = 0.2 #S:A 63 | k2_fsl = 0.05 #loss_fsl 64 | k3_cls = 0.05 #loss-global-cls 65 | k4_std = 0.2 #loss-STD 66 | 67 | # forward Teachers 68 | Expert_S.n_way = self.n_way 69 | Expert_S.n_query = self.n_query 70 | Expert_A.n_way = self.n_way 71 | Expert_A.n_query = self.n_query 72 | Expert_S_scores, Expert_S_loss = Expert_S.set_forward_loss(S_x) 73 | Expert_A_scores, Expert_A_loss = Expert_A.set_forward_loss(A_x) 74 | 75 | # DSG: Forward Student wth Mask 76 | S_scores, S_loss_fsl, S_loss_cls = self.set_forward_loss_withGlobalCls(S_x, S_y_global, data_flag='S') 77 | A_scores, A_loss_fsl, A_loss_cls = self.set_forward_loss_withGlobalCls(A_x, A_y_global, data_flag='A') 78 | 79 | # DSG: KD loss 80 | T = 5.0 81 | lossKD = nn.KLDivLoss() 82 | S_loss_KD = lossKD(F.log_softmax(S_scores / T, dim=1), F.softmax(Expert_S_scores / T, dim=1)) 83 | A_loss_KD = lossKD(F.log_softmax(A_scores / T, dim=1), F.softmax(Expert_A_scores / T, dim=1)) 84 | 85 | # DSG: loss 86 | S_loss = S_loss_KD + k2_fsl * S_loss_fsl + k3_cls * S_loss_cls 87 | A_loss = A_loss_KD + k2_fsl * A_loss_fsl + k3_cls * A_loss_cls 88 | loss_Masked = k1 * S_loss + (1-k1) * A_loss 89 | 90 | # STD: Forward Student with STD (not masked) 91 | S_scores_STD, S_loss_fsl_STD, S_loss_cls_STD = self.set_forward_loss_withGlobalCls_STD(S_x, S_y_global, data_flag='S') 92 | A_scores_STD, A_loss_fsl_STD, A_loss_cls_STD = self.set_forward_loss_withGlobalCls_STD(A_x, A_y_global, data_flag='A') 93 | 94 | # STD: KD loss 95 | T = 5.0 96 | lossKD = nn.KLDivLoss() 97 | S_loss_KD_STD = lossKD(F.log_softmax(S_scores_STD / T, dim=1), F.softmax(Expert_S_scores / T, dim=1)) 98 | A_loss_KD_STD = lossKD(F.log_softmax(A_scores_STD / T, dim=1), F.softmax(Expert_A_scores / T, dim=1)) 99 | 100 | # STD: loss 101 | S_loss_STD = S_loss_KD_STD + k2_fsl * S_loss_fsl_STD + k3_cls * S_loss_cls_STD 102 | A_loss_STD = A_loss_KD_STD + k2_fsl * A_loss_fsl_STD + k3_cls * A_loss_cls_STD 103 | loss_STD = k1 * S_loss_STD + (1-k1) * A_loss_STD 104 | 105 | # final loss 106 | loss = loss_Masked + k4_std * loss_STD 107 | loss.backward() 108 | optimizer.step() 109 | avg_loss = avg_loss+loss.item() 110 | 111 | if (i + 1) % print_freq==0: 112 | print('Epoch {:d} | Batch {:d}/{:d} | Loss {:f}'.format(epoch, i + 1, len(S_train_loader), avg_loss/float(i+1))) 113 | if (total_it + 1) % 10 == 0 and self.tf_writer is not None: 114 | self.tf_writer.add_scalar(self.method + '/total_loss', loss.item(), total_it + 1) 115 | self.tf_writer.add_scalar(self.method + '/loss_Masked', loss_Masked.item(), total_it + 1) 116 | self.tf_writer.add_scalar(self.method + '/loss_STD', loss_STD.item(), total_it + 1) 117 | self.tf_writer.add_scalar(self.method + '/S_loss', S_loss.item(), total_it + 1) 118 | self.tf_writer.add_scalar(self.method + '/A_loss', A_loss.item(), total_it + 1) 119 | self.tf_writer.add_scalar(self.method + '/S_loss_FSL', S_loss_fsl.item(), total_it + 1) 120 | self.tf_writer.add_scalar(self.method + '/S_loss_KD', S_loss_KD.item(), total_it + 1) 121 | self.tf_writer.add_scalar(self.method + '/S_loss_cls', S_loss_cls.item(), total_it + 1) 122 | self.tf_writer.add_scalar(self.method + '/A_loss_FSL', A_loss_fsl.item(), total_it + 1) 123 | self.tf_writer.add_scalar(self.method + '/A_loss_KD', A_loss_KD.item(), total_it + 1) 124 | self.tf_writer.add_scalar(self.method + '/A_loss_cls', A_loss_cls.item(), total_it + 1) 125 | self.tf_writer.add_scalar(self.method + '/S_loss_STD', S_loss_STD.item(), total_it + 1) 126 | self.tf_writer.add_scalar(self.method + '/A_loss_STD', A_loss_STD.item(), total_it + 1) 127 | self.tf_writer.add_scalar(self.method + '/S_loss_FSL_STD', S_loss_fsl_STD.item(), total_it + 1) 128 | self.tf_writer.add_scalar(self.method + '/S_loss_KD_STD', S_loss_KD_STD.item(), total_it + 1) 129 | self.tf_writer.add_scalar(self.method + '/S_loss_cls_STD', S_loss_cls_STD.item(), total_it + 1) 130 | self.tf_writer.add_scalar(self.method + '/A_loss_FSL_STD', A_loss_fsl_STD.item(), total_it + 1) 131 | self.tf_writer.add_scalar(self.method + '/A_loss_KD_STD', A_loss_KD_STD.item(), total_it + 1) 132 | self.tf_writer.add_scalar(self.method + '/A_loss_cls_STD', A_loss_cls_STD.item(), total_it + 1) 133 | total_it += 1 134 | return total_it 135 | 136 | def test_loop(self, test_loader, record = None): 137 | loss = 0. 138 | count = 0 139 | acc_all = [] 140 | 141 | iter_num = len(test_loader) 142 | for i, (x,_) in enumerate(test_loader): 143 | self.n_query = x.size(1) - self.n_support 144 | if self.change_way: 145 | self.n_way = x.size(0) 146 | correct_this, count_this, loss_this = self.correct(x) 147 | acc_all.append(correct_this/ count_this*100 ) 148 | loss += loss_this 149 | count += count_this 150 | 151 | acc_all = np.asarray(acc_all) 152 | acc_mean = np.mean(acc_all) 153 | acc_std = np.std(acc_all) 154 | print('--- %d Loss = %.6f ---' %(iter_num, loss/count)) 155 | print('--- %d Test Acc = %4.2f%% +- %4.2f%% ---' %(iter_num, acc_mean, 1.96* acc_std/np.sqrt(iter_num))) 156 | 157 | return acc_mean 158 | -------------------------------------------------------------------------------- /methods/student_MED2N.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | from methods.meta_template_student_MED2N import MetaTemplate 5 | from methods.gnn import GNN_nl 6 | from methods import backbone 7 | from methods.learnablemask import LearnableMaskLayer 8 | 9 | class_categories={} 10 | class_categories['source']=64 11 | class_categories['cub']=99 12 | class_categories['cars']=97 13 | class_categories['places']=182 14 | class_categories['plantae']=99 15 | EPS=0.00001 16 | 17 | 18 | class GnnNetStudent(MetaTemplate): 19 | maml=False 20 | def __init__(self, model_func, n_way, n_support, tf_path=None, target_set='None'): 21 | super(GnnNetStudent, self).__init__(model_func, n_way, n_support, tf_path=tf_path) 22 | 23 | # loss function 24 | self.loss_fn = nn.CrossEntropyLoss() 25 | 26 | # metric function 27 | self.fc = nn.Sequential(nn.Linear(self.feat_dim, 128), nn.BatchNorm1d(128, track_running_stats=False)) if not self.maml else nn.Sequential(backbone.Linear_fw(self.feat_dim, 128), backbone.BatchNorm1d_fw(128, track_running_stats=False)) 28 | self.gnn = GNN_nl(128 + self.n_way, 96, self.n_way) 29 | self.method = 'GnnNet' 30 | 31 | # define global fc classifiers 32 | self.classifier_source = nn.Linear(self.feat_dim, class_categories['source']) 33 | self.classifier_target = nn.Linear(self.feat_dim, class_categories[target_set]) 34 | 35 | # define learnablMaskLayers 36 | mask_thred = 0.0 37 | self.mask_layer1 = LearnableMaskLayer(64) 38 | self.mask_layer2 = LearnableMaskLayer(128) 39 | self.mask_layer3 = LearnableMaskLayer(256) 40 | self.mask_layer4 = LearnableMaskLayer(512) 41 | 42 | # fix label for training the metric function 1*nw(1 + ns)*nw 43 | support_label = torch.from_numpy(np.repeat(range(self.n_way), self.n_support)).unsqueeze(1) 44 | support_label = torch.zeros(self.n_way*self.n_support, self.n_way).scatter(1, support_label, 1).view(self.n_way, self.n_support, self.n_way) 45 | support_label = torch.cat([support_label, torch.zeros(self.n_way, 1, n_way)], dim=1) 46 | self.support_label = support_label.view(1, -1, self.n_way) 47 | 48 | def cuda(self): 49 | self.feature.cuda() 50 | self.fc.cuda() 51 | self.gnn.cuda() 52 | self.support_label = self.support_label.cuda() 53 | self.classifier_source.cuda() 54 | self.classifier_target.cuda() 55 | self.mask_layer1.cuda() 56 | self.mask_layer2.cuda() 57 | self.mask_layer3.cuda() 58 | self.mask_layer4.cuda() 59 | return self 60 | 61 | def set_forward(self,x,is_feature=False): 62 | x = x.cuda() 63 | 64 | if is_feature: 65 | # reshape the feature tensor: n_way * n_s + 15 * f 66 | assert(x.size(1) == self.n_support + 15) 67 | z = self.fc(x.view(-1, *x.size()[2:])) 68 | z = z.view(self.n_way, -1, z.size(1)) 69 | else: 70 | # get feature using encoder 71 | x = x.view(-1, *x.size()[2:]) 72 | z = self.fc(self.feature(x)) 73 | z = z.view(self.n_way, -1, z.size(1)) 74 | #print('z:', z.size()) 75 | # stack the feature for metric function: n_way * n_s + n_q * f -> n_q * [1 * n_way(n_s + 1) * f] 76 | z_stack = [torch.cat([z[:, :self.n_support], z[:, self.n_support + i:self.n_support + i + 1]], dim=1).view(1, -1, z.size(2)) for i in range(self.n_query)] 77 | assert(z_stack[0].size(1) == self.n_way*(self.n_support + 1)) 78 | #print('z_stack:', 'len:', len(z_stack), 'z_stack[0]:', z_stack[0].size()) 79 | scores = self.forward_gnn(z_stack) 80 | return scores 81 | 82 | 83 | def get_classification_scores(self, z, classifier): 84 | z_norm = torch.norm(z, p=2, dim=1).unsqueeze(1).expand_as(z) 85 | z_normalized = z.div(z_norm + EPS) 86 | L_norm = torch.norm(classifier.weight.data, p=2, dim=1).unsqueeze(1).expand_as(classifier.weight.data) 87 | classifier.weight.data = classifier.weight.data.div(L_norm + EPS) 88 | cos_dist = classifier(z_normalized) 89 | cos_fac = 1.0 90 | scores = cos_fac * cos_dist 91 | return scores 92 | 93 | 94 | def set_forward_withGlobalCls(self,x,data_flag, is_feature=False): 95 | x = x.cuda() 96 | 97 | if is_feature: 98 | # reshape the feature tensor: n_way * n_s + 15 * f 99 | assert(x.size(1) == self.n_support + 15) 100 | z = self.fc(x.view(-1, *x.size()[2:])) 101 | z = z.view(self.n_way, -1, z.size(1)) 102 | else: 103 | # get feature using encoder 104 | x = x.view(-1, *x.size()[2:]) 105 | x_fea_block1 = self.feature.forward_block1(x) 106 | x_fea_block2 = self.feature.forward_block2(x_fea_block1) 107 | x_fea_block3 = self.feature.forward_block3(x_fea_block2) 108 | #mask 109 | x_fea_block3 = self.mask_layer3(x_fea_block3, data_flag) 110 | x_fea_block4 = self.feature.forward_block4(x_fea_block3) 111 | #mask 112 | x_fea_block4 = self.mask_layer4(x_fea_block4, data_flag) 113 | x_fea = self.feature.forward_rest(x_fea_block4) 114 | z = self.fc(x_fea) 115 | z = z.view(self.n_way, -1, z.size(1)) 116 | # for FSL- GNN classifer 117 | # stack the feature for metric function: n_way * n_s + n_q * f -> n_q * [1 * n_way(n_s + 1) * f] 118 | z_stack = [torch.cat([z[:, :self.n_support], z[:, self.n_support + i:self.n_support + i + 1]], dim=1).view(1, -1, z.size(2)) for i in range(self.n_query)] 119 | assert(z_stack[0].size(1) == self.n_way*(self.n_support + 1)) 120 | fsl_scores = self.forward_gnn(z_stack) 121 | 122 | # for FC - global FC classifier 123 | classifier_mode = 'v2' 124 | if(data_flag=='S'): 125 | if(classifier_mode == 'v1'): 126 | cls_scores = self.classifier_source(x_fea) 127 | elif(classifier_mode == 'v2'): 128 | cls_scores = self.get_classification_scores(x_fea, self.classifier_source) 129 | elif(data_flag=='A'): 130 | if(classifier_mode == 'v1'): 131 | cls_scores = self.classifier_target(x_fea) 132 | elif(classifier_mode == 'v2'): 133 | cls_scores = self.get_classification_scores(x_fea, self.classifier_target) 134 | return fsl_scores, cls_scores 135 | 136 | 137 | def set_forward_withGlobalCls_STD(self,x, data_flag, is_feature=False): 138 | x = x.cuda() 139 | 140 | if is_feature: 141 | # reshape the feature tensor: n_way * n_s + 15 * f 142 | assert(x.size(1) == self.n_support + 15) 143 | z = self.fc(x.view(-1, *x.size()[2:])) 144 | z = z.view(self.n_way, -1, z.size(1)) 145 | else: 146 | # get feature using encoder 147 | x = x.view(-1, *x.size()[2:]) 148 | x_fea = self.feature(x) 149 | z = self.fc(x_fea) 150 | z = z.view(self.n_way, -1, z.size(1)) 151 | #for FSL- GNN classifer 152 | # stack the feature for metric function: n_way * n_s + n_q * f -> n_q * [1 * n_way(n_s + 1) * f] 153 | z_stack = [torch.cat([z[:, :self.n_support], z[:, self.n_support + i:self.n_support + i + 1]], dim=1).view(1, -1, z.size(2)) for i in range(self.n_query)] 154 | assert(z_stack[0].size(1) == self.n_way*(self.n_support + 1)) 155 | fsl_scores = self.forward_gnn(z_stack) 156 | 157 | # for FC - global FC classifier 158 | classifier_mode = 'v2' 159 | if(data_flag=='S'): 160 | if(classifier_mode == 'v1'): 161 | cls_scores = self.classifier_source(x_fea) 162 | elif(classifier_mode == 'v2'): 163 | cls_scores = self.get_classification_scores(x_fea, self.classifier_source) 164 | elif(data_flag=='A'): 165 | if(classifier_mode == 'v1'): 166 | cls_scores = self.classifier_target(x_fea) 167 | elif(classifier_mode == 'v2'): 168 | cls_scores = self.get_classification_scores(x_fea, self.classifier_target) 169 | return fsl_scores, cls_scores 170 | 171 | 172 | def forward_gnn(self, zs): 173 | # gnn inp: n_q * n_way(n_s + 1) * f 174 | nodes = torch.cat([torch.cat([z, self.support_label], dim=2) for z in zs], dim=0) 175 | scores = self.gnn(nodes) 176 | # n_q * n_way(n_s + 1) * n_way -> (n_way * n_q) * n_way 177 | scores = scores.view(self.n_query, self.n_way, self.n_support + 1, self.n_way)[:, :, -1].permute(1, 0, 2).contiguous().view(-1, self.n_way) 178 | return scores 179 | 180 | def set_forward_loss(self, x): 181 | y_query = torch.from_numpy(np.repeat(range( self.n_way ), self.n_query)) 182 | y_query = y_query.cuda() 183 | scores = self.set_forward(x) 184 | loss = self.loss_fn(scores, y_query) 185 | return scores, loss 186 | 187 | def set_forward_loss_withGlobalCls(self, x, y_cls, data_flag): 188 | y_query = torch.from_numpy(np.repeat(range( self.n_way ), self.n_query)) 189 | y_query = y_query.cuda() 190 | fsl_scores, cls_scores = self.set_forward_withGlobalCls(x, data_flag) 191 | fsl_loss = self.loss_fn(fsl_scores, y_query) 192 | y_cls = y_cls.view(cls_scores.size()[0]).cuda() 193 | cls_loss = self.loss_fn(cls_scores, y_cls) 194 | return fsl_scores, fsl_loss, cls_loss 195 | 196 | def set_forward_loss_withGlobalCls_STD(self, x, y_cls, data_flag): 197 | y_query = torch.from_numpy(np.repeat(range( self.n_way ), self.n_query)) 198 | y_query = y_query.cuda() 199 | fsl_scores, cls_scores = self.set_forward_withGlobalCls_STD(x, data_flag) 200 | fsl_loss = self.loss_fn(fsl_scores, y_query) 201 | y_cls = y_cls.view(cls_scores.size()[0]).cuda() 202 | cls_loss = self.loss_fn(cls_scores, y_cls) 203 | return fsl_scores, fsl_loss, cls_loss 204 | 205 | 206 | -------------------------------------------------------------------------------- /methods/backbone.py: -------------------------------------------------------------------------------- 1 | # This code is modified from https://github.com/facebookresearch/low-shot-shrink-hallucinate 2 | 3 | import torch 4 | import torch.nn as nn 5 | import math 6 | import torch.nn.functional as F 7 | from torch.nn.utils import weight_norm 8 | 9 | # --- gaussian initialize --- 10 | def init_layer(L): 11 | # Initialization using fan-in 12 | if isinstance(L, nn.Conv2d): 13 | n = L.kernel_size[0]*L.kernel_size[1]*L.out_channels 14 | L.weight.data.normal_(0,math.sqrt(2.0/float(n))) 15 | elif isinstance(L, nn.BatchNorm2d): 16 | L.weight.data.fill_(1) 17 | L.bias.data.fill_(0) 18 | 19 | class distLinear(nn.Module): 20 | def __init__(self, indim, outdim): 21 | super(distLinear, self).__init__() 22 | self.L = weight_norm(nn.Linear(indim, outdim, bias=False), name='weight', dim=0) 23 | self.relu = nn.ReLU() 24 | 25 | def forward(self, x): 26 | x_norm = torch.norm(x, p=2, dim =1).unsqueeze(1).expand_as(x) 27 | x_normalized = x.div(x_norm + 0.00001) 28 | L_norm = torch.norm(self.L.weight.data, p=2, dim =1).unsqueeze(1).expand_as(self.L.weight.data) 29 | self.L.weight.data = self.L.weight.data.div(L_norm + 0.00001) 30 | cos_dist = self.L(x_normalized) 31 | scores = 10 * cos_dist 32 | return scores 33 | 34 | # --- flatten tensor --- 35 | class Flatten(nn.Module): 36 | def __init__(self): 37 | super(Flatten, self).__init__() 38 | 39 | def forward(self, x): 40 | return x.view(x.size(0), -1) 41 | 42 | # --- LSTMCell module for matchingnet --- 43 | class LSTMCell(nn.Module): 44 | maml = False 45 | def __init__(self, input_size, hidden_size, bias=True): 46 | super(LSTMCell, self).__init__() 47 | self.input_size = input_size 48 | self.hidden_size = hidden_size 49 | self.bias = bias 50 | if self.maml: 51 | self.x2h = Linear_fw(input_size, 4 * hidden_size, bias=bias) 52 | self.h2h = Linear_fw(hidden_size, 4 * hidden_size, bias=bias) 53 | else: 54 | self.x2h = nn.Linear(input_size, 4 * hidden_size, bias=bias) 55 | self.h2h = nn.Linear(hidden_size, 4 * hidden_size, bias=bias) 56 | self.reset_parameters() 57 | 58 | def reset_parameters(self): 59 | std = 1.0 / math.sqrt(self.hidden_size) 60 | for w in self.parameters(): 61 | w.data.uniform_(-std, std) 62 | 63 | def forward(self, x, hidden=None): 64 | if hidden is None: 65 | hx = torch.zeors_like(x) 66 | cx = torch.zeros_like(x) 67 | else: 68 | hx, cx = hidden 69 | 70 | gates = self.x2h(x) + self.h2h(hx) 71 | ingate, forgetgate, cellgate, outgate = torch.split(gates, self.hidden_size, dim=1) 72 | 73 | ingate = torch.sigmoid(ingate) 74 | forgetgate = torch.sigmoid(forgetgate) 75 | cellgate = torch.tanh(cellgate) 76 | outgate = torch.sigmoid(outgate) 77 | 78 | cy = torch.mul(cx, forgetgate) + torch.mul(ingate, cellgate) 79 | hy = torch.mul(outgate, torch.tanh(cy)) 80 | return (hy, cy) 81 | 82 | # --- LSTM module for matchingnet --- 83 | class LSTM(nn.Module): 84 | def __init__(self, input_size, hidden_size, num_layers=1, bias=True, batch_first=False, bidirectional=False): 85 | super(LSTM, self).__init__() 86 | 87 | self.input_size = input_size 88 | self.hidden_size = hidden_size 89 | self.num_layers = num_layers 90 | self.bias = bias 91 | self.batch_first = batch_first 92 | self.num_directions = 2 if bidirectional else 1 93 | assert(self.num_layers == 1) 94 | 95 | self.lstm = LSTMCell(input_size, hidden_size, self.bias) 96 | 97 | def forward(self, x, hidden=None): 98 | # swap axis if batch first 99 | if self.batch_first: 100 | x = x.permute(1, 0 ,2) 101 | 102 | # hidden state 103 | if hidden is None: 104 | h0 = torch.zeros(self.num_directions, x.size(1), self.hidden_size, dtype=x.dtype, device=x.device) 105 | c0 = torch.zeros(self.num_directions, x.size(1), self.hidden_size, dtype=x.dtype, device=x.device) 106 | else: 107 | h0, c0 = hidden 108 | 109 | # forward 110 | outs = [] 111 | hn = h0[0] 112 | cn = c0[0] 113 | for seq in range(x.size(0)): 114 | hn, cn = self.lstm(x[seq], (hn, cn)) 115 | outs.append(hn.unsqueeze(0)) 116 | outs = torch.cat(outs, dim=0) 117 | 118 | # reverse foward 119 | if self.num_directions == 2: 120 | outs_reverse = [] 121 | hn = h0[1] 122 | cn = c0[1] 123 | for seq in range(x.size(0)): 124 | seq = x.size(1) - 1 - seq 125 | hn, cn = self.lstm(x[seq], (hn, cn)) 126 | outs_reverse.append(hn.unsqueeze(0)) 127 | outs_reverse = torch.cat(outs_reverse, dim=0) 128 | outs = torch.cat([outs, outs_reverse], dim=2) 129 | 130 | # swap axis if batch first 131 | if self.batch_first: 132 | outs = outs.permute(1, 0, 2) 133 | return outs 134 | 135 | # --- Linear module --- 136 | class Linear_fw(nn.Linear): #used in MAML to forward input with fast weight 137 | def __init__(self, in_features, out_features, bias=True): 138 | super(Linear_fw, self).__init__(in_features, out_features, bias=bias) 139 | self.weight.fast = None #Lazy hack to add fast weight link 140 | self.bias.fast = None 141 | 142 | def forward(self, x): 143 | if self.weight.fast is not None and self.bias.fast is not None: 144 | out = F.linear(x, self.weight.fast, self.bias.fast) 145 | else: 146 | out = super(Linear_fw, self).forward(x) 147 | return out 148 | 149 | # --- Conv2d module --- 150 | class Conv2d_fw(nn.Conv2d): #used in MAML to forward input with fast weight 151 | def __init__(self, in_channels, out_channels, kernel_size, stride=1,padding=0, bias = True): 152 | super(Conv2d_fw, self).__init__(in_channels, out_channels, kernel_size, stride=stride, padding=padding, bias=bias) 153 | self.weight.fast = None 154 | if not self.bias is None: 155 | self.bias.fast = None 156 | 157 | def forward(self, x): 158 | if self.bias is None: 159 | if self.weight.fast is not None: 160 | out = F.conv2d(x, self.weight.fast, None, stride= self.stride, padding=self.padding) 161 | else: 162 | out = super(Conv2d_fw, self).forward(x) 163 | else: 164 | if self.weight.fast is not None and self.bias.fast is not None: 165 | out = F.conv2d(x, self.weight.fast, self.bias.fast, stride= self.stride, padding=self.padding) 166 | else: 167 | out = super(Conv2d_fw, self).forward(x) 168 | return out 169 | 170 | # --- softplus module --- 171 | def softplus(x): 172 | return torch.nn.functional.softplus(x, beta=100) 173 | 174 | # --- feature-wise transformation layer --- 175 | class FeatureWiseTransformation2d_fw(nn.BatchNorm2d): 176 | feature_augment = False 177 | def __init__(self, num_features, momentum=0.1, track_running_stats=True): 178 | super(FeatureWiseTransformation2d_fw, self).__init__(num_features, momentum=momentum, track_running_stats=track_running_stats) 179 | self.weight.fast = None 180 | self.bias.fast = None 181 | if self.track_running_stats: 182 | self.register_buffer('running_mean', torch.zeros(num_features)) 183 | self.register_buffer('running_var', torch.zeros(num_features)) 184 | if self.feature_augment: # initialize {gamma, beta} with {0.3, 0.5} 185 | self.gamma = torch.nn.Parameter(torch.ones(1, num_features, 1, 1)*0.3) 186 | self.beta = torch.nn.Parameter(torch.ones(1, num_features, 1, 1)*0.5) 187 | self.reset_parameters() 188 | 189 | def reset_running_stats(self): 190 | if self.track_running_stats: 191 | self.running_mean.zero_() 192 | self.running_var.fill_(1) 193 | 194 | def forward(self, x, step=0): 195 | if self.weight.fast is not None and self.bias.fast is not None: 196 | weight = self.weight.fast 197 | bias = self.bias.fast 198 | else: 199 | weight = self.weight 200 | bias = self.bias 201 | if self.track_running_stats: 202 | out = F.batch_norm(x, self.running_mean, self.running_var, weight, bias, training=self.training, momentum=self.momentum) 203 | else: 204 | out = F.batch_norm(x, torch.zeros_like(x), torch.ones_like(x), weight, bias, training=True, momentum=1) 205 | 206 | # apply feature-wise transformation 207 | if self.feature_augment and self.training: 208 | gamma = (1 + torch.randn(1, self.num_features, 1, 1, dtype=self.gamma.dtype, device=self.gamma.device)*softplus(self.gamma)).expand_as(out) 209 | beta = (torch.randn(1, self.num_features, 1, 1, dtype=self.beta.dtype, device=self.beta.device)*softplus(self.beta)).expand_as(out) 210 | out = gamma*out + beta 211 | return out 212 | 213 | # --- BatchNorm2d --- 214 | class BatchNorm2d_fw(nn.BatchNorm2d): 215 | def __init__(self, num_features, momentum=0.1, track_running_stats=True): 216 | super(BatchNorm2d_fw, self).__init__(num_features, momentum=momentum, track_running_stats=track_running_stats) 217 | self.weight.fast = None 218 | self.bias.fast = None 219 | if self.track_running_stats: 220 | self.register_buffer('running_mean', torch.zeros(num_features)) 221 | self.register_buffer('running_var', torch.zeros(num_features)) 222 | self.reset_parameters() 223 | 224 | def reset_running_stats(self): 225 | if self.track_running_stats: 226 | self.running_mean.zero_() 227 | self.running_var.fill_(1) 228 | 229 | def forward(self, x, step=0): 230 | if self.weight.fast is not None and self.bias.fast is not None: 231 | weight = self.weight.fast 232 | bias = self.bias.fast 233 | else: 234 | weight = self.weight 235 | bias = self.bias 236 | if self.track_running_stats: 237 | out = F.batch_norm(x, self.running_mean, self.running_var, weight, bias, training=self.training, momentum=self.momentum) 238 | else: 239 | out = F.batch_norm(x, torch.zeros(x.size(1), dtype=x.dtype, device=x.device), torch.ones(x.size(1), dtype=x.dtype, device=x.device), weight, bias, training=True, momentum=1) 240 | return out 241 | 242 | # --- BatchNorm1d --- 243 | class BatchNorm1d_fw(nn.BatchNorm1d): 244 | def __init__(self, num_features, momentum=0.1, track_running_stats=True): 245 | super(BatchNorm1d_fw, self).__init__(num_features, momentum=momentum, track_running_stats=track_running_stats) 246 | self.weight.fast = None 247 | self.bias.fast = None 248 | if self.track_running_stats: 249 | self.register_buffer('running_mean', torch.zeros(num_features)) 250 | self.register_buffer('running_var', torch.zeros(num_features)) 251 | self.reset_parameters() 252 | 253 | def reset_running_stats(self): 254 | if self.track_running_stats: 255 | self.running_mean.zero_() 256 | self.running_var.fill_(1) 257 | 258 | def forward(self, x, step=0): 259 | if self.weight.fast is not None and self.bias.fast is not None: 260 | weight = self.weight.fast 261 | bias = self.bias.fast 262 | else: 263 | weight = self.weight 264 | bias = self.bias 265 | if self.track_running_stats: 266 | out = F.batch_norm(x, self.running_mean, self.running_var, weight, bias, training=self.training, momentum=self.momentum) 267 | else: 268 | out = F.batch_norm(x, torch.zeros(x.size(1), dtype=x.dtype, device=x.device), torch.ones(x.size(1), dtype=x.dtype, device=x.device), weight, bias, training=True, momentum=1) 269 | return out 270 | 271 | # --- Simple Conv Block --- 272 | class ConvBlock(nn.Module): 273 | maml = False 274 | def __init__(self, indim, outdim, pool = True, padding = 1): 275 | super(ConvBlock, self).__init__() 276 | self.indim = indim 277 | self.outdim = outdim 278 | if self.maml: 279 | self.C = Conv2d_fw(indim, outdim, 3, padding = padding) 280 | self.BN = FeatureWiseTransformation2d_fw(outdim) 281 | else: 282 | self.C = nn.Conv2d(indim, outdim, 3, padding= padding) 283 | self.BN = nn.BatchNorm2d(outdim) 284 | self.relu = nn.ReLU(inplace=True) 285 | 286 | self.parametrized_layers = [self.C, self.BN, self.relu] 287 | if pool: 288 | self.pool = nn.MaxPool2d(2) 289 | self.parametrized_layers.append(self.pool) 290 | 291 | for layer in self.parametrized_layers: 292 | init_layer(layer) 293 | self.trunk = nn.Sequential(*self.parametrized_layers) 294 | 295 | def forward(self,x): 296 | out = self.trunk(x) 297 | return out 298 | 299 | # --- Simple ResNet Block --- 300 | class SimpleBlock(nn.Module): 301 | maml = False 302 | def __init__(self, indim, outdim, half_res, leaky=False): 303 | super(SimpleBlock, self).__init__() 304 | self.indim = indim 305 | self.outdim = outdim 306 | if self.maml: 307 | self.C1 = Conv2d_fw(indim, outdim, kernel_size=3, stride=2 if half_res else 1, padding=1, bias=False) 308 | self.BN1 = BatchNorm2d_fw(outdim) 309 | self.C2 = Conv2d_fw(outdim, outdim,kernel_size=3, padding=1,bias=False) 310 | self.BN2 = FeatureWiseTransformation2d_fw(outdim) # feature-wise transformation at the end of each residual block 311 | else: 312 | self.C1 = nn.Conv2d(indim, outdim, kernel_size=3, stride=2 if half_res else 1, padding=1, bias=False) 313 | self.BN1 = nn.BatchNorm2d(outdim) 314 | self.C2 = nn.Conv2d(outdim, outdim,kernel_size=3, padding=1,bias=False) 315 | self.BN2 = nn.BatchNorm2d(outdim) 316 | self.relu1 = nn.ReLU(inplace=True) if not leaky else nn.LeakyReLU(0.2, inplace=True) 317 | self.relu2 = nn.ReLU(inplace=True) if not leaky else nn.LeakyReLU(0.2, inplace=True) 318 | 319 | self.parametrized_layers = [self.C1, self.C2, self.BN1, self.BN2] 320 | 321 | self.half_res = half_res 322 | 323 | # if the input number of channels is not equal to the output, then need a 1x1 convolution 324 | if indim!=outdim: 325 | if self.maml: 326 | self.shortcut = Conv2d_fw(indim, outdim, 1, 2 if half_res else 1, bias=False) 327 | self.BNshortcut = FeatureWiseTransformation2d_fw(outdim) 328 | else: 329 | self.shortcut = nn.Conv2d(indim, outdim, 1, 2 if half_res else 1, bias=False) 330 | self.BNshortcut = nn.BatchNorm2d(outdim) 331 | 332 | self.parametrized_layers.append(self.shortcut) 333 | self.parametrized_layers.append(self.BNshortcut) 334 | self.shortcut_type = '1x1' 335 | else: 336 | self.shortcut_type = 'identity' 337 | 338 | for layer in self.parametrized_layers: 339 | init_layer(layer) 340 | 341 | def forward(self, x): 342 | out = self.C1(x) 343 | out = self.BN1(out) 344 | out = self.relu1(out) 345 | out = self.C2(out) 346 | out = self.BN2(out) 347 | short_out = x if self.shortcut_type == 'identity' else self.BNshortcut(self.shortcut(x)) 348 | out = out + short_out 349 | out = self.relu2(out) 350 | return out 351 | 352 | # --- ConvNet module --- 353 | class ConvNet(nn.Module): 354 | def __init__(self, depth, flatten = True): 355 | super(ConvNet,self).__init__() 356 | self.grads = [] 357 | self.fmaps = [] 358 | trunk = [] 359 | for i in range(depth): 360 | indim = 3 if i == 0 else 64 361 | outdim = 64 362 | B = ConvBlock(indim, outdim, pool = ( i <4 ) ) #only pooling for fist 4 layers 363 | trunk.append(B) 364 | 365 | if flatten: 366 | trunk.append(Flatten()) 367 | 368 | self.trunk = nn.Sequential(*trunk) 369 | self.final_feat_dim = 1600 370 | 371 | def forward(self,x): 372 | out = self.trunk(x) 373 | return out 374 | 375 | # --- ConvNetNopool module --- 376 | class ConvNetNopool(nn.Module): #Relation net use a 4 layer conv with pooling in only first two layers, else no pooling 377 | def __init__(self, depth): 378 | super(ConvNetNopool,self).__init__() 379 | self.grads = [] 380 | self.fmaps = [] 381 | trunk = [] 382 | for i in range(depth): 383 | indim = 3 if i == 0 else 64 384 | outdim = 64 385 | B = ConvBlock(indim, outdim, pool = ( i in [0,1] ), padding = 0 if i in[0,1] else 1 ) #only first two layer has pooling and no padding 386 | trunk.append(B) 387 | 388 | self.trunk = nn.Sequential(*trunk) 389 | self.final_feat_dim = [64,19,19] 390 | 391 | def forward(self,x): 392 | out = self.trunk(x) 393 | return out 394 | 395 | # --- ResNet module --- 396 | class ResNet(nn.Module): 397 | maml = False 398 | def __init__(self,block,list_of_num_layers, list_of_out_dims, flatten=True, leakyrelu=False): 399 | # list_of_num_layers specifies number of layers in each stage 400 | # list_of_out_dims specifies number of output channel for each stage 401 | super(ResNet,self).__init__() 402 | self.grads = [] 403 | self.fmaps = [] 404 | assert len(list_of_num_layers)==4, 'Can have only four stages' 405 | if self.maml: 406 | conv1 = Conv2d_fw(3, 64, kernel_size=7, stride=2, padding=3, bias=False) 407 | bn1 = BatchNorm2d_fw(64) 408 | else: 409 | conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False) 410 | bn1 = nn.BatchNorm2d(64) 411 | 412 | relu = nn.ReLU(inplace=True) if not leakyrelu else nn.LeakyReLU(0.2, inplace=True) 413 | pool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) 414 | 415 | init_layer(conv1) 416 | init_layer(bn1) 417 | 418 | trunk = [conv1, bn1, relu, pool1] 419 | 420 | indim = 64 421 | for i in range(4): 422 | for j in range(list_of_num_layers[i]): 423 | half_res = (i>=1) and (j==0) 424 | B = block(indim, list_of_out_dims[i], half_res, leaky=leakyrelu) 425 | trunk.append(B) 426 | indim = list_of_out_dims[i] 427 | 428 | if flatten: 429 | avgpool = nn.AvgPool2d(7) 430 | trunk.append(avgpool) 431 | trunk.append(Flatten()) 432 | self.final_feat_dim = indim 433 | else: 434 | self.final_feat_dim = [ indim, 7, 7] 435 | 436 | self.trunk = nn.Sequential(*trunk) 437 | 438 | def forward(self,x): 439 | out = self.trunk(x) 440 | return out 441 | 442 | 443 | def forward_block1(self, x): 444 | out = self.trunk[:5](x) 445 | return out 446 | 447 | def forward_block2(self, x): 448 | out = self.trunk[5:6](x) 449 | return out 450 | 451 | def forward_block3(self, x): 452 | out = self.trunk[6:7](x) 453 | return out 454 | 455 | def forward_block4(self, x): 456 | out = self.trunk[7:8](x) 457 | return out 458 | 459 | def forward_rest(self,x): 460 | out = self.trunk[8:](x) 461 | return out 462 | 463 | # --- Conv networks --- 464 | def Conv4(): 465 | return ConvNet(4) 466 | def Conv6(): 467 | return ConvNet(6) 468 | def Conv4NP(): 469 | return ConvNetNopool(4) 470 | def Conv6NP(): 471 | return ConvNetNopool(6) 472 | 473 | # --- ResNet networks --- 474 | def ResNet10(flatten=True, leakyrelu=False): 475 | print('backbone:', 'return resnet10') 476 | return ResNet(SimpleBlock, [1,1,1,1],[64,128,256,512], flatten, leakyrelu) 477 | def ResNet18(flatten=True, leakyrelu=False): 478 | return ResNet(SimpleBlock, [2,2,2,2],[64,128,256,512], flatten, leakyrelu) 479 | def ResNet34(flatten=True, leakyrelu=False): 480 | return ResNet(SimpleBlock, [3,4,6,3],[64,128,256,512], flatten, leakyrelu) 481 | 482 | model_dict = dict(Conv4 = Conv4, 483 | Conv6 = Conv6, 484 | ResNet10 = ResNet10, 485 | ResNet18 = ResNet18, 486 | ResNet34 = ResNet34) 487 | 488 | 489 | if __name__ == '__main__': 490 | model_func = model_dict['ResNet10'] 491 | net = model_func(flatten = True, leakyrelu= False) 492 | from torch.autograd import Variable 493 | x = Variable(torch.randn([16,3,224,224])) 494 | out = net(x) 495 | print(out.size()) 496 | 497 | print('------------------') 498 | model_func = model_dict['ResNet10'] 499 | net = model_func(flatten = True, leakyrelu= False) 500 | from torch.autograd import Variable 501 | x = Variable(torch.randn([16,3,224,224])) 502 | out = net(x) 503 | print(out.size()) 504 | 505 | print(net) 506 | block1 = net.forward_block1(x) 507 | print('block1:', block1.size()) 508 | 509 | block2 = net.forward_block2(block1) 510 | print('block2:', block2.size()) 511 | 512 | block3 = net.forward_block3(block2) 513 | print('block3:', block3.size()) 514 | 515 | block4 = net.forward_block4(block3) 516 | print('block4:', block4.size()) 517 | 518 | -------------------------------------------------------------------------------- /output/labled_base_cars_5.json: -------------------------------------------------------------------------------- 1 | {"image_names": ["/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05657.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06174.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04578.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03008.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04669.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01716.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00002.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05967.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03152.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04816.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04392.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07450.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01392.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04484.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02004.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01537.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04825.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02438.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07769.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01322.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06539.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03662.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03239.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03659.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02378.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07610.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01581.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03184.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00660.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03208.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03221.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03756.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01755.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05083.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03233.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04155.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01157.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07103.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04241.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02625.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08144.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05968.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05177.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08137.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03253.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03383.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02267.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00723.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06071.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07142.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05114.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06716.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06678.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06969.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03644.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03824.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00825.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05085.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02059.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01022.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01948.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03422.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03830.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06593.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05146.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06078.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03162.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02451.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01838.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07689.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00125.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07669.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02787.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06486.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00358.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04472.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07994.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03957.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04138.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04966.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05058.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07417.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05314.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07767.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01580.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03399.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03951.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05834.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05087.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07051.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08082.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02336.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06308.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01079.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00099.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06035.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03676.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05278.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03860.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01324.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05722.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06729.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03285.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02718.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00589.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05068.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01399.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06222.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03975.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04924.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01077.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07451.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07296.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05760.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07867.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04870.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01433.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00759.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05374.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06575.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03872.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05468.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00439.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07951.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00011.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07029.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07017.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04619.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05483.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07665.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05738.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01198.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05564.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04668.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05702.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00265.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07570.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00751.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08125.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07431.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02891.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03606.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01316.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04875.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07831.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06801.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05726.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04280.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00607.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05152.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02744.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03831.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00968.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06618.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02436.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07302.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03304.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00346.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03811.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01827.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05368.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04653.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02107.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05309.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04713.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02442.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02308.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04626.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03411.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06512.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00264.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00273.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07019.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03959.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01884.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01114.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01712.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05334.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05032.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02564.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04259.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03216.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00925.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06178.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01542.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02775.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02079.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02808.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02871.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06091.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02016.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02075.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01241.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04377.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02614.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07643.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07561.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03962.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01057.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05080.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07429.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07267.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03657.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07264.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05524.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02479.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06353.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06477.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07216.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07397.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06119.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02956.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03529.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03384.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07782.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01732.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02512.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01789.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00867.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01836.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06062.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04161.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00585.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00349.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05291.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05826.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06913.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00051.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07818.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03683.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02719.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01058.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02281.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00469.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03047.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07883.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03110.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06750.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05252.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03105.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03160.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05879.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02574.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01153.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04522.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05709.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02384.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01611.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01407.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07253.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05608.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03907.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02118.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00637.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00332.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03911.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06162.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00801.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06521.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03658.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08076.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03013.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07840.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06303.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05136.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00440.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00220.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06148.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07655.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03029.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07937.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03378.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00121.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05612.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07222.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00810.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07813.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00226.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01238.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02298.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05732.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06464.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01131.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02444.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01175.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00703.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01296.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00441.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04064.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02119.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03018.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07612.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04536.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07041.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08126.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00361.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04559.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00862.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08131.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04558.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03134.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07738.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01376.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06642.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04222.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03365.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06943.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02182.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04139.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03431.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05242.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02962.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02031.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03097.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03843.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06932.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00309.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07178.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07072.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06607.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00729.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00504.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04400.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02986.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05828.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04740.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07662.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04974.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07436.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01695.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00986.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02555.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07015.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04850.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05137.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07336.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01035.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05523.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00013.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03913.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05940.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00744.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02228.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00704.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01304.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07470.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02356.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06877.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04855.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02469.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06389.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06936.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04202.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07742.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00531.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01445.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02187.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07946.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00443.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06789.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02218.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06621.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06311.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03586.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/08060.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06753.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00394.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03727.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02779.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02403.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05154.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01657.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05110.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02514.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06808.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05990.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04983.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02471.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01196.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05675.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07571.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03983.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06341.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04295.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01090.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01896.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03530.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03801.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01740.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03939.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01633.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01350.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04781.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07959.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05505.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05823.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01231.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05999.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07255.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07127.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06352.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06840.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03001.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01976.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03005.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00828.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05430.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06232.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06329.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07505.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01565.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03302.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02631.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01463.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03067.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00009.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04135.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07936.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05582.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00506.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05678.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02983.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05785.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02358.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05703.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00939.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01713.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04542.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05147.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05635.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07777.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00277.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03540.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01569.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01237.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06207.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06580.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00655.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03720.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07827.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02572.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07807.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00298.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02263.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03333.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04276.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02236.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05629.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07984.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02208.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06672.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02198.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07414.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00984.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01759.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05215.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04582.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04153.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06123.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06247.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01104.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07435.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06867.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02525.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05459.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04726.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01930.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01139.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06730.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07358.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07770.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01111.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/04801.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01449.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/05417.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03003.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00786.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/00042.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/07779.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01140.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/02262.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/01464.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06743.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/06442.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/cars/source/cars_train/03759.jpg"], "image_labels": [0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 20, 20, 20, 20, 20, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 30, 30, 30, 30, 30, 31, 31, 31, 31, 31, 32, 32, 32, 32, 32, 33, 33, 33, 33, 33, 34, 34, 34, 34, 34, 35, 35, 35, 35, 35, 36, 36, 36, 36, 36, 37, 37, 37, 37, 37, 38, 38, 38, 38, 38, 39, 39, 39, 39, 39, 40, 40, 40, 40, 40, 41, 41, 41, 41, 41, 42, 42, 42, 42, 42, 43, 43, 43, 43, 43, 44, 44, 44, 44, 44, 45, 45, 45, 45, 45, 46, 46, 46, 46, 46, 47, 47, 47, 47, 47, 48, 48, 48, 48, 48, 49, 49, 49, 49, 49, 50, 50, 50, 50, 50, 51, 51, 51, 51, 51, 52, 52, 52, 52, 52, 53, 53, 53, 53, 53, 54, 54, 54, 54, 54, 55, 55, 55, 55, 55, 56, 56, 56, 56, 56, 57, 57, 57, 57, 57, 58, 58, 58, 58, 58, 59, 59, 59, 59, 59, 60, 60, 60, 60, 60, 61, 61, 61, 61, 61, 62, 62, 62, 62, 62, 63, 63, 63, 63, 63, 64, 64, 64, 64, 64, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96]} -------------------------------------------------------------------------------- /output/labled_base_plantae_5.json: -------------------------------------------------------------------------------- 1 | {"image_names": ["/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5431/08fbdb71002b5f6e1d7e08eb3e62a091.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5431/90274ea5ca69d5fdae7b9bef2573a42c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5431/01df554bc353d71cf82992d2e6876174.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5431/7d3c48624950aba187d5e07fec2e7d72.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5431/b7d7d89c736ca52bccf5c9138b6eb98b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6869/a320d0551a093f4167d978be02351de0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6869/0a0abd851a8d5b51984350d4e8ba2d8d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6869/6193d46f2cbbf5cba472e71459bd033e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6869/711b4ef868cdddf50b241ae8061d34ec.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6869/d9b5ceb623ede16138367ec3f3a36528.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7053/4c8a0bb6c38913e7632c0638804ac3d2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7053/8c8d58f9c0c93634c8b3be9591f69faa.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7053/78760b54cf5ad6c50e123a5bf49300ff.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7053/765666a6acb5ae745b4cea2573257ed9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7053/f906f59bf83e183a323374f6fb97452e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7836/bccd95aab14abddc59fa35fed056eaca.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7836/0cbc0402a0d71e68dcd5c9f24916b956.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7836/5b79dad636707b40533425f7eea59bc9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7836/f5acc1ce5a7b11e35cf596fc12b135fa.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7836/56052205c179861f5eafc23c6f2eb088.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6524/d144d39bc76cbe61ecb26afb64b21027.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6524/d2011b52fbc1836b4bdef3c27d41e244.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6524/f18c70dee69cd152647475c8106145ea.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6524/2fe6daf7247c6133362275a778cd002a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6524/d2cf27725066e24f2506e72cfd6185c0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5569/77d0aa0556d13d536da1af6a2c7112e7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5569/89dd92f479df7d87954347a217c3b7f4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5569/4b95c1b005192350d5388dddfbc22938.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5569/7a738a21a762019f5a2240249163bb8d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5569/daeb3a727960e8dc6f759c0c3387f827.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6680/b8b59a1ac33891d21ba7490ae526de0c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6680/550bbf1330af009bcbe2717b3cb1802b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6680/e1e96cd4b20cc06f5c658b20fa9ea405.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6680/4089b452657d2383c7398f1149534b96.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6680/6260a310c473b4c0177569c2f9a1e883.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7451/99ae3a97153050ee9f4451b7592f8b25.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7451/4a2996e2bc768ed5008a62ecf5f6654a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7451/a8e9315b59ff80b8426e0634f91001cf.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7451/93041f4a4fbfc2f36c1215b6be6cf210.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7451/186100c29c6d8fc0db8b6a71774de79e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6572/1113c695f4777eaa3f8a1f623d963d0c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6572/0fd0a172dd007584fef6f907a32862b9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6572/6435028e26ae4b97bba10c7e8b8604a8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6572/86955805e777704582abe428d4bfbfe6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6572/b8b7829ef980afe6b4f29ded3b7b797e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7250/efc4845b74e7a5d71a562e74890458a5.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7250/d605dcfe0c5a59bda0f83ef8d67b014d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7250/21b3f24f7a4789ab0651d62fc8a84889.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7250/97ca15d39c5b8063725621dfef9896c1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7250/4ee393c2bad0b7567b327393bfcbfc49.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5388/5b6133c889bfe2ee42cbb2e05b653b38.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5388/3bd19b10f3b4a6a281e32d7c62732f19.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5388/e80097a04f348aa12f6209c390db9151.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5388/c9bd636babc38b36f6d7b706b0b13a69.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5388/036d2c2043afbfdd6994726e796989c7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7234/bdbc620c07176161a2e4170afdfa7705.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7234/e4248728d55a1dcdff4df4f43e33c484.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7234/8fb79ffde5f612c117217fcd06371691.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7234/f450273ba3c7f7f9b2547aaf7920d10f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7234/1bfd17b5799bc4cd77227ec737cc65e2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7522/98b72e92cbc0b2ff7fa437514e35d963.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7522/a813a6073587e77c33a9e4dffdc9a02a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7522/7f3f4d71826d2e43c5403f93f4bbae17.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7522/6c690d1dd15502c75bd93f8139a2155f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7522/3f5cea6eecda1bf5ef8a3123fa8bc43b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5519/6d3c380c75d7b93f9dc336a112412833.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5519/57221616ecab1a918f9a44ecc103c7bf.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5519/a656c7e0f74e481f0cce91217725ab70.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5519/3dffd986cdc3c1f93aa498e76b51ff9a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5519/d5ae18a111507e51ec43a87c84a64f89.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5567/250b7181395e37081350898268afa3a8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5567/416f2c9ecb6830f8bc1b2a9ead48bcbb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5567/3241ea6df454fedb5243a48846838b9e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5567/fa53db3dfea096041525c7f5c6d94c14.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5567/655ea0fb548cebf02c10de1cc06f5d28.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5498/6791189c78264994b2f39961c1b4d3de.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5498/9ff2046ffa77c463a95d369a59c13201.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5498/4b9ef195384142f84d5f88008aaa48b6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5498/de6bb88cc908f778c78588d1dd1f2b60.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5498/8f326f96a4e66a2624cae764d3adc429.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6781/6677eac070872b4bf1a70b1c2c296e28.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6781/d793e2d400accef347c431ad8d6d56cf.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6781/43c9732268a1652897e6f76752006f8b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6781/8c9a12f90405bf80c0eeccd1fa56ccbc.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6781/03bbab1e3d484568c28d196be49f926b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7008/76a44797ac7b7d8c2373ccf00c00c233.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7008/a2b33b0470500ad0cc27733d48866106.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7008/087af321847fa1381274d6727d084e15.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7008/07b3879f1f69563eb0bbd71f6e3a0742.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7008/65b0cfb846117420c32481f6d16da583.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6528/f9c1e72b3e2d6e22d29747e5ba5d2be4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6528/8a96d48001938c695374014b2c23af5e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6528/8c6b7a0fa8e97a552fa9628d206970d4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6528/e395124864f0f4975e3937a5ab3abb97.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6528/2e8c8dd60b53766c0366d5e3a5f99354.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5840/de388a23611e96a617d49c102705aec0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5840/a25b2c3c5da2e8865e1d96e18fe5feb3.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5840/a867cbfc141366feb8217ecd8d2d9670.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5840/325c3ec60afa070539f2d2c98fa03a3d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5840/a8e32e7b7941e56efa036aea930a0835.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5824/19f55b494abbb3b423ce181a0be5d920.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5824/fd6842267d47bb453465322d06857388.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5824/5113631d39c869632c77eebf95b3b56a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5824/038c85b46da4976cb99cb49552a18a02.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5824/9b9afaec705bda2a4e058a6e5ac0a30e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8006/a354c813310408caf5fda2a7f56ea7de.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8006/2e89b463979f2bb4df01c8ca7b234c11.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8006/a36fdb7e01aaa39993b19102d8497fde.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8006/c7d2ff44f1cfd0c893727e14b3a0262a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8006/976e02b1a2280911c386662bb4f60834.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7034/ea61e22fec57656c6cb9e538a18a2dcf.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7034/6af1f258314a356c2ef2652832bbaf03.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7034/6ea616c5f87d87bb764d8c515c28c869.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7034/a1a60c98bfebb97b0eb94199bcfb65e1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7034/47e0a98e91b0e769c65c56451c59290e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6766/35a36260929da229bb78db658d6b92f9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6766/a8f4ca0c02df68328e41611b207c1f41.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6766/cdfae2037109a4a19e69657b3cddc28f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6766/992842e0171c016edc26b1cb04ff7147.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6766/0c6bc9060e9483dc7ed9e43ca478756c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7605/9622a68988f26918f9d4fa843a34f46d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7605/bfd4aa16f6a0e8c3d8eb23b5bc156712.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7605/638fa3e10ec67fcaf516ae9935fe70d7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7605/ec18fdff3c491ac03324e3a8d3706873.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7605/da123b5b48e6f9c5d6794e93412b61ce.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6017/e5df437c7c5a7f04b04dda45133df9fd.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6017/dd299b6c5e1614a7fc98fcc731ef6bd8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6017/13218a3f759b1b456be25168fe084523.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6017/7270608eff2accaedcaad29ab6d583d8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6017/7b5dfbc1dba172b75ad48b337975a3d1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8005/9eaed39d6de14ea40003d9ff981bab38.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8005/fdddd566938533a63e5e8d35d3708b52.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8005/858799f2c0a2f5ce4913437d141a02a3.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8005/9bd229047d4df7682d7e611623611e79.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8005/528e098b4d496cfbb22e61f4eaa2163b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7166/1f159cc842cd1e08415111300d36d42d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7166/a100c56ad353bd076af9f0c9c0e65b77.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7166/612395ea5e45b2db758ac7c444b6fc25.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7166/476bdb55e8040ec614aa5a50edda785c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7166/8c9b31394185c22929b04edc7953754b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6499/40bc0b6c9ee2bb817000ac865207dad4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6499/4ebb0f8e9887ca138289e755cece5572.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6499/4ef7d3bb352938d4b7b345c8068f65d8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6499/b667b8efd7e66c84fe657e2a4fc413b8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6499/435e3b179dab1c41b1623c05c44527c5.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6786/e5e9ada7ae9519647c65f05c6c9bedee.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6786/489c7eb50e454f66cf2da278d4077d9d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6786/1e5f9095e26c541b8bae0a0d627ddda7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6786/2244c679deb921ede6a913fffbc14d3f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6786/d20e6b7521ab64b86f7ac0d30dff613d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7469/e5c486fb05de4b4158355bb397ebb98c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7469/70d1117b49a3c0b71295fdf82ada9b44.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7469/a6cdc862dde8946bfac7550108c1c471.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7469/c630735e008c968a7928332f309ba253.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7469/5406c125aee8cddf326021760176bd9a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6711/dca0d2909b15743cf4f590da677afee7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6711/4d646af31e0641781cdb810e4e3fb25d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6711/a88212e1bd90e47e00d1b28824a8b700.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6711/47cf35ec31b380204e58b0b8c24436d1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6711/131b5bfcd0a780784ad02fb5669beb7c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7913/3e65ee9bd67e0d0d37578085f7818f7a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7913/49ac087ce6a37a857d893f5a72d0f05b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7913/13dc97e8ad12526319affaf1387f8c5e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7913/603157c90a1b740d1b347ee08bc129f4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7913/f62a06bacba3cd1bc49fac4b86f800fd.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7886/899791c5a5ca3b28a1b42ff93022ace8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7886/d8edf05730eb98dd3a9cb0f0c273ab7c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7886/c5d77945ee74a422e9eccfa33cd8af9c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7886/57221e80b4200c735f24c50c8c6a6ffb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7886/802ed2d68645b9801b651afa7a527f94.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5743/fe514acd29e30565cac69b3c443992d1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5743/8768c5f91f90bee8dba94d8634b3de1b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5743/9bb56fba3f057af8a2b476be4c0274e9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5743/686b6b3769e914a2d578fc5a48a4dbe1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5743/3f6d56149b7e8037b47c16bbebccb72d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7948/bb80060391f1e4392e82e6b7ee64bb31.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7948/99409748c78a006f6e3fdf7fea2822a7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7948/18d07bbf3ee077cdec6f2ec9ff313224.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7948/c2768bb8acbd3feb904fd9148ebd0839.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7948/547e3b00c5c62a3614ea53e294a3c9fc.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6665/965c8a549b8d2c6926bd96f2f374278c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6665/accf7b4167661803bb3dae8b236ab4e6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6665/93276a938989fc54a901b853010ed8f6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6665/ba2dfe012df7023b1167c1b8e4814c61.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6665/adfc8e5a57e5d25b27dc297c368dc106.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7508/d196f2807a517f130e2a8ba36808494d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7508/ab2d6d6f1cf87462aea91efe23135f5f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7508/bebfe9be629fe8c8e16d8a78b5c1da73.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7508/ea5f4aeaccb85dbed11c264c93d3f0f1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7508/428da5eb7f30729c3eb9d91746cca7c5.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5857/ce3041099bf147c6e7b29258ed9b3147.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5857/255f8702eaec51cbb45335d181029d93.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5857/6f4ec01bb13c99d82caef2aadeb945ec.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5857/10e1ff95c6cbe485fc25255116fc929c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5857/b1d5642a74966ff086de8ff17ce1ca08.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6995/86111b1165dd66c002f645e460e3a6aa.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6995/59a11a03ac1bfadda1e90d272359a1ac.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6995/3400abdb58149cb576f4dcf8ea1736dc.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6995/20247039367ed286af933f47b94dc67e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6995/15e3f77436c7c9216362547137de459e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7534/4f7b752bf7676f938d5baab3e12f0bc2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7534/8a8fd7177fbe413de525fc3ca85b6021.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7534/ea35d47cfee65454f237a864bb81741d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7534/d1b1a7b89328f802eb3cab244ed51e6c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7534/c3af91cea40051a399f6dc9be659dd5c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7491/6ac3ddef15d5cba5248c9e9b8b633714.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7491/b78435df08882d3c2203e816fca1132f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7491/af6cb06defdd14214a7389fad98739a8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7491/6f02b60c925d3fa2951ef8cbb2ce217e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7491/a053f24231939915b63eb4b2adf9c0ca.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5687/dca14dae00a4ccf858301f49799693d0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5687/e3325d4677a0c369fe06b5b38d68000e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5687/5d890bb3567eca75c2b562ecf92f9a91.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5687/dce6a77d393e110a117099a5812f5d29.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5687/24a4b98e5b2c07b9ef432d7b934929b2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7938/7aba11165f545c52333df94583a13c19.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7938/af76844f328c6eaa25013f0d966a2031.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7938/06cbd48b86e89453ee95cc25214367f0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7938/feb9f9e3ac27773f2422567f27d833a8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7938/ab06391db20aefe89fc9ed020b2c4ed4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7677/a49c5bfd0a3c922a17b561f9ff993334.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7677/ae96295b274217694e6a04383e457bb8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7677/c96c11e8aa458fdd44334f4a6c499b70.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7677/eb07a13f6a27af488f655aa3849226f6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7677/817e1450a26071876167e2f89917b77e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7618/d8401a7e69a64bd58ff119d1d7345725.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7618/1c9ca19ecbf8a0ee18732c96cd340ea7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7618/e777f054abb7be71816561e669589c1e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7618/0d7849db00e4ed6bcf0e520bc6e1ef01.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7618/c08e901e05ffcc8878544e9d0cf60f1e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7537/7bb66801ff4900b8f23e67ea894ea9a2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7537/df607a93d2d7115248dcd9bd9097ec59.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7537/83ffbd4687becaa9476337a91a562890.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7537/594d61583bcbbcd3a4676dca4b357d41.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7537/87bf2b383881ffb548cf94b1707ccfce.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7197/b0cb98a2fe814e217a7bbc3528249758.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7197/384cbbc437641a037ca2573d1a702e38.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7197/c33d8b9318203fa7bec226796c9bf294.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7197/8849f72185dc2281b38cdd8882b97b8d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7197/07842da043fe26497ded4a213efa5499.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7226/1967e3e8b1630cc64a6b99ffa92af5f6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7226/7b0ff41b81b9012b81a0e446b513b5ff.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7226/96f3c2b473ebdf7950c67e790807b401.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7226/7ee9e54b23af37017f29308174c6bd0f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7226/e25f8f4e33d1e264659bb7d5385be00a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7478/b4f4245476aac4f541776561b585200d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7478/e96917f829d3de781f4a651f21a979d0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7478/3b2e01d0b71bddc6b8f539c28ff7bae0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7478/920f7b53e6f5cba7219181b5cbee4b7a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7478/2bdf8fe42e642c172ef9943c3296ee37.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7062/740eeeef8d8e8f373c57d91b13c281df.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7062/ec1664b801b50b5e2c69c4d219d39ce2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7062/0694650ed526f2ae8edac5e657101571.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7062/3e4ba01c768d8111b7a8ed0f11b3dc2a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7062/bb42b59207ca7052185277bd960db90c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7448/2f286f082c421d40bbd7454959dc319f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7448/c4f85117ce16f5bd91eaaab8275bec2c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7448/30127acd4353841c3dc4721d6330a775.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7448/cce24f69e84ae890a52b74f08c55b45c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7448/f2bb0f35e75fc33b950c502d471cd664.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6494/b99eefc94eb13236ee3c102ad4123f20.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6494/188557187d4595ebc6366697076e7611.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6494/8566e9b8b880314cf6d16472184a43d0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6494/dd9df2e61f6d3c52f83e9e3760f06181.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6494/f0d41d9701d0a3000aca0d1304ebf3c8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5395/8c9f6a882b0d20858872b875a110aa4b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5395/b624b7eed335060b4e4c4df9760b68dd.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5395/9def13b11095a453629662dcd6ecb0c2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5395/d5040a034694afccdc39584a615ffb40.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5395/b411ff5916ef2740276f7aea68d5964d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6911/a7a7257c257facea497d70831ce6d491.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6911/c39a7196db8c8c8d53e5b8214030f23e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6911/fa8e3c71af7bb039b193768151a6bd10.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6911/b979d0f127018f0ff02be86d9de45785.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6911/caac2c86610dea4d70d5e11cc6f3da1c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7932/c241ec815b3ee302937afc84a172220b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7932/7d45bb7130d8c368ccd34caeee8f7834.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7932/23fff00f9d7bb319ed443b5ff3ab32f2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7932/2272245b56f578839c8f8250816d75af.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7932/87f690661d6f5c50509c03fa7fb1a480.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6144/6e3c10a92cada3e118ce360870546765.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6144/9685d725a384d755aefecc9850224f11.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6144/6739617a465d43ed43f0140e0c2dad42.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6144/71cdb3765714c4e194bc43e785f52b90.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6144/de9b93c68a48fdf413f1ea7cc70dd63d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7539/7f9115ed2a7213279a2baec2f799861b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7539/085a5e2664b2abe0c3a63c2969cd3d30.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7539/a234de08f309953675b311d65228484e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7539/c0feccc08a6310d95fb967468641ed3d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7539/739a2e808bc289b67757ac84015a8a6c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7760/f04c47df53a59035c0f96c0997506134.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7760/c9756e917fe27514e0aeda58c818c847.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7760/fcc069c1cc4555e7fdb4f44e968296c4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7760/2923fda56aa0d6dd1289188811866909.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7760/25ddb1a2c1c0a069e2292bab577b6a26.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6005/1d331eb5dac5d74339bd9b4a9ec98789.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6005/532ba55dffed37c1a24c8b66eca88281.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6005/1e8795639cbcf97d7fbc4e7fedee9f1b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6005/bbb38fe90ca615dc1e91a887973279cb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6005/1ba08c427c56942ae0677f4005c7dbea.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5419/444c93b416a43260b19245ceb7fba076.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5419/25147be93adf9734cd6dd31c34c3e6f2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5419/df50504ca5b3dbebb1e5097a25e2c3ce.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5419/2b3b2724c75a695e2305e9f3c66b2b27.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5419/38d62ee05befc3124319d32cc7e292a4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7611/8e9fb58b7b4fe18acc5a7b22197c163a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7611/da97f986a107a0e1968b00a831a32669.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7611/85d7e4f5f5d965deb7d1aa06db21c3be.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7611/5c7c46531031d14b15e569ca57927a93.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7611/8748cf7570c08379b8765c7b79b9ba41.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7767/bd49be7fb79d95a14d27817f126f1040.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7767/dddd73971dbd0f0c8a874f952e362857.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7767/b84aa3e855c248d03341bef7b481a6d9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7767/ecbfeaac44c7fb6d5f2db43c83c0a6a6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7767/570cd917e645868af8c1f0e86f14f1d9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6968/516f65eb185e7b7590121f22b28310f5.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6968/f5f60ee14fa29cf7542c16ed72119307.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6968/f0bd307ee1ab6bc7081ab14e087c0ad4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6968/67fb964efcfc0047b37a814bc2de6029.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6968/299e30c6b4ba27417b3c63e2fc226ad9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6980/569fc4127be9c109ef38cdd9518daab2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6980/ae582b97c34226c72dd4bc5a526f6152.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6980/169691dd61095f81c4a244618d403528.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6980/29597f5112ec4353745f285d2bcc3a2a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6980/7898c08408c77ead8c30181ad684fca9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7245/fa857ca718e709f51a34d333a8490d27.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7245/df6b20139f4a14e31e93b66a288b362d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7245/4b2b4cfd36c9abdf1ac73f02b4c075ce.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7245/b6cd67912180d5d7c07bc6024cc6fcb9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7245/80e063164ab0a83a9c2847d8c6834dee.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6741/80591e34ad4e62b97f72c85ae1b0f244.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6741/de2cf19402c824d5cc026f4bd66d86c4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6741/98a852755ec8cf22de79307841d8898f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6741/03a2cc44dea2407fb36fc5fff32f3ded.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6741/2783501668546957311c8f875b237ec3.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5956/0277f003f9b05dbf04ae7d9546938431.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5956/4d5f772612b3e1b0207a9159e46401af.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5956/aabd8525612429ecbfa3e2ffd190c25f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5956/542635d3ba5718df72c80487d7320cd8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5956/2855fbec02c658f663e828a6fd83304c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6568/e9f2e3cf8474df1910b85088cb991bc9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6568/a223db199caf2d508b6e5d0ca7bd0524.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6568/ed5f63b6a17ea91e671b768b3fd0d9f7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6568/d4a762fef7bf068c0910af3e46538787.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6568/b54796ecd638469154995f65901e7b23.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6068/a09f0070ed49a0ada7827965033fd951.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6068/c085e2f14aea02cf3e74a51c41ab05fb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6068/12b3fbd6500e29a88ec6a5a6b29a4f01.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6068/a70ba0f4a9a84bb3a5e3d1965356faa3.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6068/0c737fa8d52a9270869550f43e03cfc1.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7150/a1f0b24aa212941c9946b751573fea59.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7150/1c3cbb3a6d75b65719290f221e13dc89.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7150/d5ec6704bd0bcdda23976ca4be69dedc.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7150/fbaa49ad0cb531a46b1798cfae91a3ac.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7150/23f17af713421abfe8355c87cf728419.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6709/af01ef13a5423b07ede2481eaf2f0573.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6709/a605ae72a6cc9cd036f70e27cf2df6d9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6709/b03b5c870c03465cca94a94cbfcb63d6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6709/c57dfebdf20091549d5b71d0f1d3ea14.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6709/3fd8206b819f857f61e11c1b738f43b8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7410/82fb29d2eff6b50af9fd56cf169ae8bf.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7410/24a3bec3bd08ea6bb6899ab4a86f8a46.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7410/90cad0fed4f2882514e1585b339c604b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7410/3f51b3320d1ba1f83effc66dbd19be03.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7410/0b5cb81672a7e8f314be829752be1520.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5544/215c9ffbd0888d3510febf36a67958c4.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5544/457102f5003d6ca3d264eb8d5cc102cb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5544/e0a30419fa725286b40b0c1af1e1aaec.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5544/d73572b6704653bab0aad7099666f5f7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5544/c1757e7a1a32a223ee8b29dac62cf582.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7302/fe96a39984995f499d8040d50369b56e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7302/8018635235eccbb4a2414e78ed98d5ea.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7302/f8be584e952c1ea4a09b45d359a83d8b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7302/5d33e0562f4ce71d013268d2805e1f38.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7302/cc5e631c333c650286276b2451d9877e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6839/27d496c66806550b5198a2f1056192b9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6839/e57916869c3b19d4a76703999ec19f67.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6839/834d2504e120eb4b29140ca88719cff0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6839/0a7f0d58101e77940e49b6fb90636659.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6839/b7b04c9c7b8818815520d1155cd84423.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5838/55a4879cfc3f83585f2673836144fbfd.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5838/2a19b341102413ca411ad410dcf42a07.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5838/717d3866ed6af92a89cb04e58a4f48ad.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5838/8a6f66c18c470a6870d42986c48280c9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5838/863370c465fa76bb31fb4b0947686a9b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6523/f0a2dca87a54bbe6b0cf3072fbb5b451.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6523/e0aaf333c5188f8cb547880dc7c97c52.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6523/08c4ab690e76b32deeb90755cf27f98e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6523/d2fbeb3d1a7abed2365f218195ed202e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6523/cdccd7d2da499f3de005d4aebf56a5d7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6828/97aaf3857d54e3c8b70ca89debd57a30.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6828/9b294ce03d167293e54342ed337f5257.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6828/699a99cb6735c68e5194acf49b47fd20.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6828/309cb2e0e94eefe861e6c2dae97cb76b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6828/56805961e6901e4fb3305e3e8abeb04b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7758/dae4df12aa0e97f71b716bfe96f2d6c5.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7758/e3400618e69731214a5587e843745de7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7758/0f888b6e78cb15ecad98c64123bef37d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7758/976b1a6efb365da766d178b466c410eb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7758/58790e08c5df0013cf76ca98d7743347.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7251/9ff5bc659a28746a90b32c3341a5fede.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7251/ba2eab2c901cc3ca03cc6e2200686f4a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7251/448eb51dd5fbe6db2b3516a9f9c01148.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7251/7335007a03baba99a753dab101b52318.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7251/0540d181cef9d08b57a998e94c5f5ceb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6803/ac874562d05471c48b26f3a90407f5b0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6803/403ed1497151fdb35e6d891cf44d7127.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6803/71093230fbfe0564e1b70d8ac47dd0f9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6803/316873a1cac53fb6c2e8e4a53111f483.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6803/1339366761c84f40a70db3f82fad3440.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5894/b87556a2af95f6269c27c17bebab0dec.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5894/e5382ff92e87c7b874d41b918088738c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5894/e1a2d87dbab7a53c45b68af62de1e143.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5894/3ff701036ee5cd27d6a3cec77132df67.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5894/80554390974465402726a1970f125e44.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8049/ba197657c58d3eecc4db76f860f990ba.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8049/ea84f5bcbe1d36cfd5366843aae16f38.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8049/e5c2886ef2fbcb4858f23c926b0c831e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8049/56f7113a7fc625423cfaac39fb55f7f7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/8049/fe8b706f5a3123ab95598aadb0ad0b63.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5850/2b1a858038bd9ea85ddcc9818efb7d6c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5850/add31df5881b4fe162bc3a5f99268874.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5850/ff2c79b4f153a10e569906ff70a8a1cd.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5850/491dc49b8794b1aa9bd10f2405b4ff35.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5850/ed1d8c661eaaa68226270cd0eee0a291.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7935/77fa4cc29eb53b5de54f0f50b0033f89.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7935/f4ff169ed8b51c0b8d0ecb182e0daa3c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7935/5393c37aa9eb7d21e461f043bec15655.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7935/256638a301cc6b9521676a9593e8aec7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7935/8c9d16e16e6893396593146bafe1c1f6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5774/54c568a37487ed181dbf88f46f21a691.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5774/4430b460aefef1697cb6d6f1e7d345af.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5774/62d9887e7d032e87172467fa9952c116.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5774/576bb67b0205b08df7dce1cc2f7e8a3a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5774/601e5d7fc9e094e8eb01f2716234d95b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6453/0c3a37f9c499d7a5ae95f857e9948cd9.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6453/a7f11d30933435adc4d33874ee2e7b5b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6453/332efde419f8cd2fb92c3fb20ef0518a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6453/c6049422029becd8442f6fa0bcf65804.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6453/0f17757856c228d4d4e1da518d0a9704.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7645/c8963c9e53ff9a5e6daf51831952e451.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7645/37d3e16b228445cb74933b5d7d19be25.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7645/6dd9d3b740d1e8cab0b056f305ff9dbb.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7645/aa3ac10727d5933516565200afbecaf3.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7645/8572941f0b20d9d10ec791c919395ee7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7496/f0fda515321b7c439a845b83a20393ef.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7496/6d5cc3cbf4172fb4138a1757004e4b81.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7496/2e48a4dc43fe73f86547d5722d7471bf.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7496/4b3ffcb4beebd4490d2373d3aa3fff5d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7496/40777d371dfa9968200f9ce24db49bae.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6859/317bad81a0ab42f66be8a54d437e52a8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6859/dccdc23b8ccb1173a94ce8895cb1ebf7.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6859/87b87756d51f2a332e2ae9ca91d36f22.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6859/3eb6e8daa1949a0387624b2d0b3e761e.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6859/c82dd750f673701197b7cafd4b624f96.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6559/ba5193cef5ac71511e722682bb672436.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6559/48ac3878394a2ecddcc052f44c28b4ba.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6559/50142fb61e0f4f3017e9b557fe6878ac.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6559/2b2b171cd34672cf593a7683da864b90.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6559/b8fe71e7319396e18cb5b802d14972e8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6436/c032094e093542b99ad0a524f067786b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6436/6fe4cb649657c6cfa18fb9698750d436.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6436/2a0e311bf64e6499613342ac013604b0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6436/bdd95062fdeaac4f41198e14a689bc7c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6436/e751b966ca7d6d9ec2698e7d8cc46e0a.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7145/073a5f0b8cdc679b52ec8f1eb95240ce.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7145/98d6bc921a58716f7a53a00b74922ee6.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7145/b4fd4ccd7fd93058196973d513d2a632.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7145/0facf32629cf4c0e7351b03ba33c5852.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7145/43a5062f38f9820ea9d50d0253d8d4b8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6804/db1ae2e899569be9bdc4b0fc4ff3fd8f.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6804/70dadadcd793f61557bfbf307887ef58.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6804/d7fa0ad0a1c89c01bcb707958dbc8b32.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6804/12bdae8b332138c993db1ba43ec9bec2.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6804/371edd395950062d19748d2af803d49c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6909/6888d7aae4259448212c754443d14d81.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6909/04db7a9fd44fead6e789b656c821243d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6909/9bcde7a9bb799a2c1dc16cbbd634ce5c.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6909/01f7664da60dbb900feddf4032ef88e8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/6909/acc1741b4013879aa3672dde1d94bba5.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5265/3457f0777ba15e33a581d6f7891581c0.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5265/40853fa12876e3e3b37498a11ced6506.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5265/b24d0667e168773f5a4066b4a6f3dacd.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5265/15e673c483be7b0918aba329e987d619.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/5265/77973485d202a3e6b4cb24ca1fee313b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7967/1f25021ff6882c2bfcc73f270637b308.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7967/0447cd3adff9b7511a2cee692cf8db72.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7967/b563035b3ad734c80946f101aae5ecc8.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7967/9bcf1317855d4f2518e16e9a90f730ce.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7967/51d9e1390a4238454f3a2d032739c34b.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7742/d9ce2be8a1c195cf974eb41aaf4f5041.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7742/100d547d53d144917947c3522c118c42.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7742/0503dd65bd4058426d38e6362f16940d.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7742/f4f58c883df4083cac68b314ee5399cc.jpg", "/DATACENTER/4/lovelyqian/CROSS-DOMAIN-FSL-DATASETS/plantae/images/7742/5c0609cfb73c62af0f527625929dc6b1.jpg"], "image_labels": [0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 20, 20, 20, 20, 20, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 30, 30, 30, 30, 30, 31, 31, 31, 31, 31, 32, 32, 32, 32, 32, 33, 33, 33, 33, 33, 34, 34, 34, 34, 34, 35, 35, 35, 35, 35, 36, 36, 36, 36, 36, 37, 37, 37, 37, 37, 38, 38, 38, 38, 38, 39, 39, 39, 39, 39, 40, 40, 40, 40, 40, 41, 41, 41, 41, 41, 42, 42, 42, 42, 42, 43, 43, 43, 43, 43, 44, 44, 44, 44, 44, 45, 45, 45, 45, 45, 46, 46, 46, 46, 46, 47, 47, 47, 47, 47, 48, 48, 48, 48, 48, 49, 49, 49, 49, 49, 50, 50, 50, 50, 50, 51, 51, 51, 51, 51, 52, 52, 52, 52, 52, 53, 53, 53, 53, 53, 54, 54, 54, 54, 54, 55, 55, 55, 55, 55, 56, 56, 56, 56, 56, 57, 57, 57, 57, 57, 58, 58, 58, 58, 58, 59, 59, 59, 59, 59, 60, 60, 60, 60, 60, 61, 61, 61, 61, 61, 62, 62, 62, 62, 62, 63, 63, 63, 63, 63, 64, 64, 64, 64, 64, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98]} --------------------------------------------------------------------------------