├── RN.png
├── other_VS_ours.png
├── examples
├── test.sh
├── v-run.sh
├── run_A-C.sh
├── run_C-R.sh
├── run_A-P.sh
├── run_A-R.sh
├── run_C-P.sh
└── run_P-R.sh
├── lr_schedule.py
├── loss.py
├── README.md
├── option.py
├── data_list.py
├── pre_process.py
├── resnet_rn.py
├── data
└── ImageCLEF
│ ├── cList.txt
│ ├── pList.txt
│ ├── iList.txt
│ └── bList.txt
├── test_image.py
├── network.py
├── train_image.py
└── reciprocal_norm.py
/RN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Openning07/reciprocal-normalization-for-DA/HEAD/RN.png
--------------------------------------------------------------------------------
/other_VS_ours.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Openning07/reciprocal-normalization-for-DA/HEAD/other_VS_ours.png
--------------------------------------------------------------------------------
/examples/test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## A -> C
6 | # mTN
7 | s_name="train"
8 | t_name="val"
9 | echo "--- ${s_name} to ${t_name} | 1"
10 | python test_image.py --gpu_id ${gpu_id} --method RN --net ResNet50 --dset visda --seed 2021
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/examples/v-run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## A -> C
6 | # mTN
7 |
8 | ## C -> A
9 | # mTN
10 | s_name="train"
11 | t_name="val"
12 | echo "--- ${s_name} to ${t_name} | 1"
13 | python train_image.py --gpu_id ${gpu_id} --net ResNet50 --method RN --num_iteration 20004 --dset visda --output_dir RN/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 2021 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn_50_visda
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/lr_schedule.py:
--------------------------------------------------------------------------------
1 | def inv_lr_scheduler(optimizer, iter_num, gamma, power, lr=0.001, weight_decay=0.0005):
2 | """Decay learning rate by a factor of 0.1 every lr_decay_epoch epochs."""
3 | lr = lr * (1 + gamma * iter_num) ** (-power)
4 | i=0
5 | for param_group in optimizer.param_groups:
6 | param_group['lr'] = lr * param_group['lr_mult']
7 | param_group['weight_decay'] = weight_decay * param_group['decay_mult']
8 | i+=1
9 |
10 | return optimizer
11 |
12 |
13 | schedule_dict = {"inv":inv_lr_scheduler}
14 |
--------------------------------------------------------------------------------
/examples/run_A-C.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## A -> C
6 | # mTN
7 | s_name="A"
8 | t_name="C"
9 | echo "--- ${s_name} to ${t_name} | 1"
10 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir RN/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 164 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
11 |
12 | ## C -> A
13 | # mTN
14 | s_name="C"
15 | t_name="A"
16 | cho "--- ${s_name} to ${t_name} | 1"
17 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir NO/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 4661 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_l2
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/examples/run_C-R.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## C -> R
6 | # mTN
7 | s_name="R"
8 | t_name="C"
9 | echo "--- ${s_name} to ${t_name} | 1"
10 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir RN/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 8346 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
11 |
12 | ## R -> C
13 | # mTN
14 | s_name="C"
15 | t_name="R"
16 | echo "--- ${s_name} to ${t_name} | 1"
17 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir NO/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 1209 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn --dist l2
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/examples/run_A-P.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## A -> P
6 | # mTN
7 | s_dset_path="${art_txt}"
8 | t_dset_path="${clipart_txt}"
9 | s_name="A"
10 | t_name="P"
11 | echo "--- ${s_name} to ${t_name} | 1"
12 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir RN/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 6659 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
13 |
14 | ## P -> A
15 | # mTN
16 | s_dset_path="${clipart_txt}"
17 | t_dset_path="${art_txt}"
18 | s_name="P"
19 | t_name="A"
20 | echo "--- ${s_name} to ${t_name} | 1"
21 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir NO/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 4556 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/examples/run_A-R.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## A -> R
6 | # mTN
7 | s_dset_path="${art_txt}"
8 | t_dset_path="${clipart_txt}"
9 | s_name="R"
10 | t_name="A"
11 | echo "--- ${s_name} to ${t_name} | 1"
12 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir RN/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 8981 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
13 |
14 | ## R -> A
15 | # mTN
16 | #s_dset_path="${clipart_txt}"
17 | #t_dset_path="${art_txt}"
18 | s_name="A"
19 | t_name="R"
20 | echo "--- ${s_name} to ${t_name} | 1"
21 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir NO/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 3904 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_l2_1
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/examples/run_C-P.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## C -> P
6 | # mTN
7 | s_dset_path="${art_txt}"
8 | t_dset_path="${clipart_txt}"
9 | s_name="C"
10 | t_name="P"
11 | echo "--- ${s_name} to ${t_name} | 1"
12 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir RN/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 5000 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
13 |
14 | ## P -> C
15 | # mTN
16 | s_dset_path="${clipart_txt}"
17 | t_dset_path="${art_txt}"
18 | s_name="P"
19 | t_name="C"
20 | echo "--- ${s_name} to ${t_name} | 1"
21 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir NO/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 1380 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/examples/run_P-R.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | gpu_id=$1
3 | date_info="200911"
4 |
5 | ## P -> R
6 | # mTN
7 | s_dset_path="${art_txt}"
8 | t_dset_path="${clipart_txt}"
9 | s_name="P"
10 | t_name="R"
11 | echo "--- ${s_name} to ${t_name} | 1"
12 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir RN/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 8542 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
13 |
14 | ## R -> P
15 | # mTN
16 | s_dset_path="${clipart_txt}"
17 | t_dset_path="${art_txt}"
18 | s_name="R"
19 | t_name="P"
20 | echo "--- ${s_name} to ${t_name} | 1"
21 | python train_image.py --gpu_id ${gpu_id} --method RN --num_iteration 20004 --dset office-home --output_dir NO/${s_name}2${t_name} --source ${s_name} --target ${t_name} --norm_type rn --seed 3947 --run_num rn_offhome_${s_name}2${t_name}_${date_info}_rn
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/loss.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import torch
3 | import torch.nn as nn
4 | from torch.autograd import Variable
5 | import math
6 | import torch.nn.functional as F
7 | import pdb
8 | from option import args
9 |
10 | def Entropy(input_):
11 | bs = input_.size(0)
12 | epsilon = 1e-5
13 | entropy = -input_ * torch.log(input_ + epsilon)
14 | entropy = torch.sum(entropy, dim=1)
15 | return entropy
16 |
17 | def grl_hook(coeff):
18 | def fun1(grad):
19 | return -coeff*grad.clone()
20 | return fun1
21 |
22 | def CDAN(input_list, ad_net, entropy=None, coeff=None, random_layer=None):
23 | softmax_output = input_list[1].detach()
24 | feature = input_list[0]
25 | batch_size = softmax_output.size(0) // 2
26 |
27 | if random_layer is None:
28 | op_out = torch.bmm(softmax_output.unsqueeze(2), feature.unsqueeze(1))
29 | ad_out = ad_net(op_out.view(-1, softmax_output.size(1) * feature.size(1)))
30 | else:
31 | random_out = random_layer.forward([feature, softmax_output])
32 | ad_out = ad_net(random_out.view(-1, random_out.size(1)))
33 | ad_out = nn.Sigmoid()(ad_out)
34 | batch_size = softmax_output.size(0) // 2
35 | dc_target = torch.from_numpy(np.array([[1]] * batch_size + [[0]] * batch_size)).float().cuda()
36 | if entropy is not None:
37 | entropy.register_hook(grl_hook(coeff))
38 | entropy = 1.0+torch.exp(-entropy)
39 | source_mask = torch.ones_like(entropy)
40 | source_mask[feature.size(0)//2:] = 0
41 | source_weight = entropy*source_mask
42 | target_mask = torch.ones_like(entropy)
43 | target_mask[0:feature.size(0)//2] = 0
44 | target_weight = entropy*target_mask
45 | weight = source_weight / torch.sum(source_weight).detach().item() + \
46 | target_weight / torch.sum(target_weight).detach().item()
47 | #print( torch.sum(weight).detach().item())
48 | #print(nn.BCELoss(reduction='none')(ad_out, dc_target))
49 | #print(weight, weight.shape)
50 | #exit(0)
51 | return torch.sum(weight.view(-1, 1) * nn.BCELoss(reduction='none')(ad_out, dc_target)) / torch.sum(weight).detach().item()
52 | else:
53 | return nn.BCELoss()(ad_out, dc_target)
54 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # reciprocal-normalization-for-DA [[pdf]](https://arxiv.org/pdf/2112.10474.pdf)
2 | - Batch normalization (BN) is widely used in modern deep neural networks, which has been shown to represent the domain-related knowledge, and thus is ineffective for cross-domain tasks like unsupervised domain adaptation (UDA). Existing BN variant methods aggregate source and target domain knowledge in the same channel in normalization module. However, the misalignment between the features of corresponding channels across domains often leads to a suboptimal transferability. In this paper, we exploit the cross-domain relation and propose a novel normalization method, Reciprocal Normalization (RN). Specifically, RN first presents a Reciprocal Compensation (RC) module to acquire the compensatory for each channel in both domains based on the cross-domain channel-wise correlation. Then RN develops a Reciprocal Aggregation (RA) module to adaptively aggregate the feature with its cross-domain compensatory components. As an alternative to BN, RN is more suitable for UDA problems and can be easily integrated into popular domain adaptation methods. Experiments show that the proposed RN outperforms existing normalization counterparts by a large margin and helps state-of-the-art adaptation approaches achieve better results.
3 |
4 | ## The problem and the motivation
5 | - In the context of **domain adaptation**, the misalignment of visual features between source domain and target domain can lead to poor adaptation performance.
6 | - TODO.
7 |
8 | ## Compared with the existing normalization methods
9 | - TODO.
10 |
11 |
12 |
13 |
14 |
15 | ## Reciprocal Normalization
16 | - TODO.
17 |
18 |
19 |
20 |
21 |
22 | ## About the code
23 | - Requirements
24 | - python == 3.6.2
25 | - pytorch == 0.4.0
26 | - torchvision == 0.2.2
27 | - numpy == 1.18.1
28 | - CUDA == 10.1.105
29 |
30 | - Data
31 | - Office-Home.
32 |
33 | - The example shell files.
34 | - xxx.sh
35 |
36 |
37 | - The expected outputs.
38 |
39 | ## Citation
40 | Please cite the following paper if you use this repository in your reseach~ Thank you ^ . ^
41 | ```
42 | @article{huang2021reciprocal,
43 | title={Reciprocal Normalization for Domain Adaptation},
44 | author={Huang, Zhiyong and Sheng, Kekai and Li, Ke and Liang, Jian and Yao, Taiping and Dong, Weiming and Zhou, Dengwen and Sun, Xing},
45 | journal={arXiv preprint arXiv:2112.10474},
46 | year={2021}
47 | }
48 | ```
49 |
--------------------------------------------------------------------------------
/option.py:
--------------------------------------------------------------------------------
1 | import argparse
2 |
3 |
4 | parser = argparse.ArgumentParser(description='Conditional Domain Adversarial Network')
5 | parser.add_argument('--CDAN', type=str, default='CDAN+E', choices=['CDAN', 'CDAN+E'])
6 | parser.add_argument('--method', type=str, default='BNM', choices=['ENT','NO', 'RN'])
7 | parser.add_argument('--gpu_id', type=str, nargs='?', default='0', help="device id to run")
8 | parser.add_argument('--net', type=str, default='ResNet50', choices=["ResNet18", "ResNet34", "ResNet50", "ResNet101", "ResNet152", "VGG11", "VGG13", "VGG16", "VGG19", "VGG11BN", "VGG13BN", "VGG16BN", "VGG19BN"])
9 | parser.add_argument('--dset', type=str, default='office-home', choices=['office31', 'image-clef', 'visda', 'office-home'], help="The dataset or source dataset used")
10 | parser.add_argument('--s_dset_path', type=str, default='./data/office-home/Art.txt', help="The source dataset path list")
11 | parser.add_argument('--t_dset_path', type=str, default='./data/office-home/Clipart.txt', help="The target dataset path list")
12 | parser.add_argument('--test_interval', type=int, default=500, help="interval of two continuous test phase")
13 | parser.add_argument('--print_num', type=int, default=100, help="print num ")
14 | parser.add_argument('--batch_size', type=int, default=36, help="number of batch size ")
15 | parser.add_argument('--num_iterations', type=int, default=20004, help="total iterations")
16 | parser.add_argument('--snapshot_interval', type=int, default=5000, help="interval of two continuous output model")
17 | parser.add_argument('--bottle_dim', type=int, default=256, help="the dim of the bottleneck in the FC")
18 | parser.add_argument('--output_dir', type=str, default='RN', help="output directory of our model (in ../snapshot directory)")
19 | parser.add_argument('--run_num', type=str, default='', help=" the name of output files")
20 | parser.add_argument('--lr', type=float, default=0.001, help="learning rate")
21 | parser.add_argument('--trade_off', type=float, default=1.0, help="parameter for CDAN")
22 | parser.add_argument('--lambda_method', type=float, default=0.1, help="parameter for method")
23 | parser.add_argument('--random', type=bool, default=False, help="whether use random projection")
24 | parser.add_argument('--show', type=bool, default=True, help="whether show the loss functions")
25 | parser.add_argument('--norm_type', type=str, default='rn', help="the type of normalization")
26 | parser.add_argument('--source', type=str, default='P', help="the name of source domain")
27 | parser.add_argument('--target', type=str, default='RW', help="the name of target domain")
28 | parser.add_argument('--dist', type=str, default='l2', help="the measures of edge strength")
29 | parser.add_argument('--root', type=str, default='', help="the root path of data")
30 | parser.add_argument('--lr_mult', type=float, default=1, help="parameter for rn")
31 | parser.add_argument('--seed', type=int, default=None, help="seed")
32 | parser.add_argument('--ent', action='store_true', default=False, help="whether use the entropy")
33 |
34 |
35 | args = parser.parse_args()
36 |
37 |
--------------------------------------------------------------------------------
/data_list.py:
--------------------------------------------------------------------------------
1 | #from __future__ import print_function, division
2 |
3 | import torch
4 | import numpy as np
5 | import random
6 | from PIL import Image
7 | from torch.utils.data import Dataset
8 | import os
9 | import os.path
10 | from option import args
11 |
12 |
13 | def make_dataset(image_list, labels):
14 | if labels:
15 | len_ = len(image_list)
16 | images = [(image_list[i].strip(), labels[i, :]) for i in range(len_)]
17 | else:
18 | if len(image_list[0].split()) > 2:
19 | images = [(val.split()[0], np.array([int(la) for la in val.split()[1:]])) for val in image_list]
20 | else:
21 | images = [(val.split()[0], int(val.split()[1])) for val in image_list]
22 | return images
23 |
24 |
25 | def rgb_loader(path):
26 | with open(path, 'rb') as f:
27 | with Image.open(f) as img:
28 | return img.convert('RGB')
29 |
30 | def l_loader(path):
31 | with open(path, 'rb') as f:
32 | with Image.open(f) as img:
33 | return img.convert('L')
34 |
35 | class ImageList(Dataset):
36 | def __init__(self, image_list, labels=None, transform=None, target_transform=None, mode='RGB'):
37 | imgs = make_dataset(image_list, labels)
38 | if len(imgs) == 0:
39 | raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
40 | "Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
41 |
42 | self.imgs = imgs
43 | self.transform = transform
44 | self.target_transform = target_transform
45 | self.root = args.root
46 |
47 | if mode == 'RGB':
48 | self.loader = rgb_loader
49 | elif mode == 'L':
50 | self.loader = l_loader
51 |
52 | def __getitem__(self, index):
53 | path, target = self.imgs[index]
54 | if len(self.root) > 0:
55 | path = self.root + path
56 | img = self.loader(path)
57 | if self.transform is not None:
58 | img = self.transform(img)
59 | if self.target_transform is not None:
60 | target = self.target_transform(target)
61 |
62 | return img, target
63 |
64 | def __len__(self):
65 | return len(self.imgs)
66 |
67 | class ImageValueList(Dataset):
68 | def __init__(self, image_list, labels=None, transform=None, target_transform=None,
69 | loader=rgb_loader):
70 | imgs = make_dataset(image_list, labels)
71 | if len(imgs) == 0:
72 | raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
73 | "Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
74 |
75 | self.imgs = imgs
76 | self.values = [1.0] * len(imgs)
77 | self.transform = transform
78 | self.target_transform = target_transform
79 | self.loader = loader
80 |
81 | def set_values(self, values):
82 | self.values = values
83 |
84 | def __getitem__(self, index):
85 | path, target = self.imgs[index]
86 | img = self.loader(path)
87 | if self.transform is not None:
88 | img = self.transform(img)
89 | if self.target_transform is not None:
90 | target = self.target_transform(target)
91 |
92 | return img, target
93 |
94 | def __len__(self):
95 | return len(self.imgs)
96 |
97 |
--------------------------------------------------------------------------------
/pre_process.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from torchvision import transforms
3 | import os
4 | from PIL import Image, ImageOps
5 | import numbers
6 | import torch
7 |
8 | class ResizeImage():
9 | def __init__(self, size):
10 | if isinstance(size, int):
11 | self.size = (int(size), int(size))
12 | else:
13 | self.size = size
14 | def __call__(self, img):
15 | th, tw = self.size
16 | return img.resize((th, tw))
17 |
18 | class RandomSizedCrop(object):
19 | """Crop the given PIL.Image to random size and aspect ratio.
20 | A crop of random size of (0.08 to 1.0) of the original size and a random
21 | aspect ratio of 3/4 to 4/3 of the original aspect ratio is made. This crop
22 | is finally resized to given size.
23 | This is popularly used to train the Inception networks.
24 | Args:
25 | size: size of the smaller edge
26 | interpolation: Default: PIL.Image.BILINEAR
27 | """
28 |
29 | def __init__(self, size, interpolation=Image.BILINEAR):
30 | self.size = size
31 | self.interpolation = interpolation
32 |
33 | def __call__(self, img):
34 | h_off = random.randint(0, img.shape[1]-self.size)
35 | w_off = random.randint(0, img.shape[2]-self.size)
36 | img = img[:, h_off:h_off+self.size, w_off:w_off+self.size]
37 | return img
38 |
39 |
40 | class Normalize(object):
41 | """Normalize an tensor image with mean and standard deviation.
42 | Given mean: (R, G, B),
43 | will normalize each channel of the torch.*Tensor, i.e.
44 | channel = channel - mean
45 | Args:
46 | mean (sequence): Sequence of means for R, G, B channels respecitvely.
47 | """
48 |
49 | def __init__(self, mean=None, meanfile=None):
50 | if mean:
51 | self.mean = mean
52 | else:
53 | arr = np.load(meanfile)
54 | self.mean = torch.from_numpy(arr.astype('float32')/255.0)[[2,1,0],:,:]
55 |
56 | def __call__(self, tensor):
57 | """
58 | Args:
59 | tensor (Tensor): Tensor image of size (C, H, W) to be normalized.
60 | Returns:
61 | Tensor: Normalized image.
62 | """
63 | # TODO: make efficient
64 | for t, m in zip(tensor, self.mean):
65 | t.sub_(m)
66 | return tensor
67 |
68 |
69 |
70 | class PlaceCrop(object):
71 | """Crops the given PIL.Image at the particular index.
72 | Args:
73 | size (sequence or int): Desired output size of the crop. If size is an
74 | int instead of sequence like (w, h), a square crop (size, size) is
75 | made.
76 | """
77 |
78 | def __init__(self, size, start_x, start_y):
79 | if isinstance(size, int):
80 | self.size = (int(size), int(size))
81 | else:
82 | self.size = size
83 | self.start_x = start_x
84 | self.start_y = start_y
85 |
86 | def __call__(self, img):
87 | """
88 | Args:
89 | img (PIL.Image): Image to be cropped.
90 | Returns:
91 | PIL.Image: Cropped image.
92 | """
93 | th, tw = self.size
94 | return img.crop((self.start_x, self.start_y, self.start_x + tw, self.start_y + th))
95 |
96 |
97 | class ForceFlip(object):
98 | """Horizontally flip the given PIL.Image randomly with a probability of 0.5."""
99 |
100 | def __call__(self, img):
101 | """
102 | Args:
103 | img (PIL.Image): Image to be flipped.
104 | Returns:
105 | PIL.Image: Randomly flipped image.
106 | """
107 | return img.transpose(Image.FLIP_LEFT_RIGHT)
108 |
109 | class CenterCrop(object):
110 | """Crops the given PIL.Image at the center.
111 | Args:
112 | size (sequence or int): Desired output size of the crop. If size is an
113 | int instead of sequence like (h, w), a square crop (size, size) is
114 | made.
115 | """
116 |
117 | def __init__(self, size):
118 | if isinstance(size, numbers.Number):
119 | self.size = (int(size), int(size))
120 | else:
121 | self.size = size
122 |
123 | def __call__(self, img):
124 | """
125 | Args:
126 | img (PIL.Image): Image to be cropped.
127 | Returns:
128 | PIL.Image: Cropped image.
129 | """
130 | w, h = (img.shape[1], img.shape[2])
131 | th, tw = self.size
132 | w_off = int((w - tw) / 2.)
133 | h_off = int((h - th) / 2.)
134 | img = img[:, h_off:h_off+th, w_off:w_off+tw]
135 | return img
136 |
137 | def image_train31(resize_size=256, crop_size=224, alexnet=False):
138 | if not alexnet:
139 | normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
140 | std=[0.229, 0.224, 0.225])
141 | else:
142 | normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')
143 | return transforms.Compose([
144 | transforms.Resize((resize_size,resize_size)),
145 | transforms.RandomResizedCrop(crop_size),
146 | transforms.RandomHorizontalFlip(),
147 | transforms.ToTensor(),
148 | normalize
149 | ])
150 |
151 | def image_train(resize_size=256, crop_size=224, alexnet=False):
152 | if not alexnet:
153 | normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
154 | std=[0.229, 0.224, 0.225])
155 | else:
156 | normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')
157 | return transforms.Compose([
158 | transforms.Resize((resize_size,resize_size)),
159 | transforms.RandomCrop(crop_size),
160 | transforms.RandomHorizontalFlip(),
161 | transforms.ToTensor(),
162 | normalize
163 | ])
164 |
165 | def image_test(resize_size=256, crop_size=224, alexnet=False):
166 | if not alexnet:
167 | normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
168 | std=[0.229, 0.224, 0.225])
169 | else:
170 | normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')
171 | start_first = 0
172 | start_center = (resize_size - crop_size - 1) / 2
173 | start_last = resize_size - crop_size - 1
174 |
175 | return transforms.Compose([
176 | transforms.Resize((resize_size,resize_size)),
177 | transforms.CenterCrop(224),
178 | transforms.ToTensor(),
179 | normalize
180 | ])
181 |
182 | def image_test_10crop(resize_size=256, crop_size=224, alexnet=False):
183 | if not alexnet:
184 | normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
185 | std=[0.229, 0.224, 0.225])
186 | else:
187 | normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')
188 | start_first = 0
189 | start_center = (resize_size - crop_size - 1) / 2
190 | start_last = resize_size - crop_size - 1
191 | data_transforms = [
192 | transforms.Compose([
193 | ResizeImage(resize_size),ForceFlip(),
194 | PlaceCrop(crop_size, start_first, start_first),
195 | transforms.ToTensor(),
196 | normalize
197 | ]),
198 | transforms.Compose([
199 | ResizeImage(resize_size),ForceFlip(),
200 | PlaceCrop(crop_size, start_last, start_last),
201 | transforms.ToTensor(),
202 | normalize
203 | ]),
204 | transforms.Compose([
205 | ResizeImage(resize_size),ForceFlip(),
206 | PlaceCrop(crop_size, start_last, start_first),
207 | transforms.ToTensor(),
208 | normalize
209 | ]),
210 | transforms.Compose([
211 | ResizeImage(resize_size),ForceFlip(),
212 | PlaceCrop(crop_size, start_first, start_last),
213 | transforms.ToTensor(),
214 | normalize
215 | ]),
216 | transforms.Compose([
217 | ResizeImage(resize_size),ForceFlip(),
218 | PlaceCrop(crop_size, start_center, start_center),
219 | transforms.ToTensor(),
220 | normalize
221 | ]),
222 | transforms.Compose([
223 | ResizeImage(resize_size),
224 | PlaceCrop(crop_size, start_first, start_first),
225 | transforms.ToTensor(),
226 | normalize
227 | ]),
228 | transforms.Compose([
229 | ResizeImage(resize_size),
230 | PlaceCrop(crop_size, start_last, start_last),
231 | transforms.ToTensor(),
232 | normalize
233 | ]),
234 | transforms.Compose([
235 | ResizeImage(resize_size),
236 | PlaceCrop(crop_size, start_last, start_first),
237 | transforms.ToTensor(),
238 | normalize
239 | ]),
240 | transforms.Compose([
241 | ResizeImage(resize_size),
242 | PlaceCrop(crop_size, start_first, start_last),
243 | transforms.ToTensor(),
244 | normalize
245 | ]),
246 | transforms.Compose([
247 | ResizeImage(resize_size),
248 | PlaceCrop(crop_size, start_center, start_center),
249 | transforms.ToTensor(),
250 | normalize
251 | ])
252 | ]
253 | return data_transforms
254 |
--------------------------------------------------------------------------------
/resnet_rn.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import math
3 | import torch.utils.model_zoo as model_zoo
4 | from option import args
5 | import torch
6 | from torch.nn.parameter import Parameter
7 | from torch.autograd import Variable
8 | import torch.nn.functional as F
9 | from reciprocal_norm import RN1d, RN2d
10 |
11 | __all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152']
12 |
13 |
14 | model_urls = {
15 | 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
16 | 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
17 | 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
18 | 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
19 | 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
20 | }
21 |
22 |
23 | def conv3x3(in_planes, out_planes, stride=1):
24 | """3x3 convolution with padding"""
25 | return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
26 | padding=1, bias=False)
27 |
28 | def init_weights(m):
29 | classname = m.__class__.__name__
30 | if classname.find('Conv2d') != -1 or classname.find('ConvTranspose2d') != -1:
31 | nn.init.kaiming_uniform_(m.weight)
32 | nn.init.zeros_(m.bias)
33 | elif classname.find('BatchNorm') != -1:
34 | nn.init.normal_(m.weight, 1.0, 0.02)
35 | nn.init.zeros_(m.bias)
36 | elif classname.find('Linear') != -1:
37 | nn.init.xavier_normal_(m.weight)
38 | nn.init.zeros_(m.bias)
39 |
40 |
41 | def init_weights_c(m):
42 | if isinstance(m, nn.Conv2d):
43 | nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
44 | elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
45 | nn.init.constant_(m.weight, 1)
46 | nn.init.constant_(m.bias, 0)
47 |
48 |
49 | class BasicBlock(nn.Module):
50 | expansion = 1
51 |
52 | def __init__(self, inplanes, planes, stride=1, downsample=None):
53 | super(BasicBlock, self).__init__()
54 | self.conv1 = conv3x3(inplanes, planes, stride)
55 | self.bn1 = RN2d(planes)
56 | self.relu = nn.ReLU(inplace=True)
57 | self.conv2 = conv3x3(planes, planes)
58 | self.bn2 = RN2d(planes)
59 | self.downsample = downsample
60 | self.stride = stride
61 |
62 | def forward(self, x):
63 | residual = x
64 |
65 | out = self.conv1(x)
66 | out = self.bn1(out)
67 | out = self.relu(out)
68 | out = self.conv2(out)
69 | out = self.bn2(out)
70 | if self.downsample is not None:
71 | residual = self.downsample(x)
72 |
73 | out += residual
74 | out = self.relu(out)
75 |
76 | return out
77 |
78 |
79 | class Bottleneck(nn.Module):
80 | expansion = 4
81 |
82 | def __init__(self, inplanes, planes, stride=1, downsample=None):
83 | super(Bottleneck, self).__init__()
84 |
85 | self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
86 | self.bn1 = RN2d(planes) ## replace
87 | self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
88 | padding=1, bias=False)
89 | self.bn2 = RN2d(planes) ## replace
90 | self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
91 | self.bn3 = RN2d(planes * 4) ## replace
92 | self.relu = nn.ReLU(inplace=True)
93 | self.downsample = downsample
94 | self.stride = stride
95 |
96 | def forward(self, x):
97 | residual = x # 0: [36,64,56,56]
98 |
99 | out = self.conv1(x)
100 | out = self.bn1(out)
101 | out = self.relu(out)
102 |
103 | out = self.conv2(out)
104 | out = self.bn2(out)
105 | out = self.relu(out)
106 |
107 | out = self.conv3(out)
108 | out = self.bn3(out)
109 |
110 | if self.downsample is not None:
111 | residual = self.downsample(x)
112 |
113 | out += residual
114 | out = self.relu(out)
115 |
116 | return out
117 |
118 | class ResNet(nn.Module):
119 |
120 | def __init__(self, block, layers, num_classes=1000):
121 | self.inplanes = 64
122 | super(ResNet, self).__init__()
123 | self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
124 | bias=False)
125 |
126 | self.bn1 = RN2d(64)
127 | self.relu = nn.ReLU(inplace=True)
128 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
129 | self.layer1 = self._make_layer(block, 64, layers[0])
130 | self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
131 | self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
132 | self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
133 | self.avgpool = nn.AvgPool2d(7, stride=1)
134 | self.fc = nn.Linear(512 * block.expansion, num_classes)
135 |
136 | for m in self.modules():
137 | if isinstance(m, nn.Conv2d):
138 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
139 | m.weight.data.normal_(0, math.sqrt(2. / n))
140 | elif isinstance(m, nn.BatchNorm2d) or isinstance(m, RN2d):
141 | m.weight.data.fill_(1)
142 | m.bias.data.zero_()
143 |
144 | def _make_layer(self, block, planes, blocks, stride=1,double=False):
145 | downsample = None
146 | if stride != 1 or self.inplanes != planes * block.expansion:
147 | downsample = nn.Sequential(
148 | nn.Conv2d(self.inplanes, planes * block.expansion,
149 | kernel_size=1, stride=stride, bias=False),
150 | RN2d(planes * block.expansion)
151 | )
152 |
153 |
154 | layers = []
155 | layers.append(block(self.inplanes, planes, stride, downsample))
156 | self.inplanes = planes * block.expansion
157 | for i in range(1, blocks):
158 | layers.append(block(self.inplanes, planes))
159 |
160 | return nn.Sequential(*layers)
161 |
162 | def forward(self, x):
163 | x = self.conv1(x)
164 | x = self.bn1(x)
165 | x = self.relu(x)
166 | x = self.maxpool(x)
167 | x = self.layer1(x)
168 | x = self.layer2(x)
169 | x = self.layer3(x)
170 | x = self.layer4(x)
171 |
172 | x = self.avgpool(x)
173 | x = x.view(x.size(0), -1)
174 | x = self.fc(x)
175 |
176 | return x
177 |
178 |
179 | def resnet18(pretrained=False, **kwargs):
180 | """Constructs a ResNet-18 model.
181 |
182 | Args:
183 | pretrained (bool): If True, returns a model pre-trained on ImageNet
184 | """
185 | model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
186 | if pretrained:
187 | model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
188 | return model
189 |
190 |
191 | def resnet34(pretrained=False, **kwargs):
192 | """Constructs a ResNet-34 model.
193 |
194 | Args:
195 | pretrained (bool): If True, returns a model pre-trained on ImageNet
196 | """
197 | model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
198 | if pretrained:
199 | model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))
200 | return model
201 |
202 |
203 | def resnet50(pretrained=False, **kwargs):
204 | """Constructs a ResNet-50 model.
205 |
206 | Args:
207 | pretrained (bool): If True, returns a model pre-trained on ImageNet
208 | """
209 | model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
210 | if pretrained:
211 | model.load_state_dict(model_zoo.load_url(model_urls['resnet50']), strict=False)
212 | model_dict = model.state_dict()
213 | return model
214 |
215 |
216 | def resnet101(pretrained=False, **kwargs):
217 | """Constructs a ResNet-101 model.
218 |
219 | Args:
220 | pretrained (bool): If True, returns a model pre-trained on ImageNet
221 | """
222 | model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
223 | if pretrained:
224 | print('resnet101')
225 | model.load_state_dict(model_zoo.load_url(model_urls['resnet101']), strict=False)
226 | return model
227 |
228 |
229 | def resnet152(pretrained=False, **kwargs):
230 | """Constructs a ResNet-152 model.
231 |
232 | Args:
233 | pretrained (bool): If True, returns a model pre-trained on ImageNet
234 | """
235 | model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
236 | if pretrained:
237 | model.load_state_dict(model_zoo.load_url(model_urls['resnet152']))
238 | return model
239 |
240 |
241 | def _update_initial_weights_mtn(state_dict, num_classes=1000, num_domains=2, dsbn_type='all'):
242 | new_state_dict = state_dict.copy()
243 |
244 | for key, val in state_dict.items():
245 | update_dict = False
246 | if (('bn' in key or 'downsample.1' in key) and norm_type == 'mtn'):
247 | update_dict = True
248 |
249 | if (update_dict):
250 | if 'gate' in key:
251 | print(key)
252 | # for d in range(num_domains):
253 | new_state_dict[key[0:-6] + 'bn.{}.weight'.format('lamda')] = torch.tensor([0.1], dtype=torch.long)
254 |
255 | # elif 'bias' in key:
256 | # for d in range(num_domains):
257 | # new_state_dict[key[0:-4] + 'bns.{}.bias'.format(d)] = val.data.clone()
258 |
259 | # if 'running_mean' in key:
260 | # for d in range(num_domains):
261 | # new_state_dict[key[0:-12] + 'bns.{}.running_mean'.format(d)] = val.data.clone()
262 |
263 | # if 'running_var' in key:
264 | # for d in range(num_domains):
265 | # new_state_dict[key[0:-11] + 'bns.{}.running_var'.format(d)] = val.data.clone()
266 |
267 | # if 'num_batches_tracked' in key:
268 | # for d in range(num_domains):
269 | # new_state_dict[
270 | # key[0:-len('num_batches_tracked')] + 'bns.{}.num_batches_tracked'.format(d)] = val.data.clone()
271 |
272 | if num_classes != 1000 or len([key for key in new_state_dict.keys() if 'fc' in key]) > 1:
273 | key_list = list(new_state_dict.keys())
274 | for key in key_list:
275 | if 'fc' in key:
276 | print('pretrained {} are not used as initial params.'.format(key))
277 | del new_state_dict[key]
278 |
279 | return new_state_dict
280 |
--------------------------------------------------------------------------------
/data/ImageCLEF/cList.txt:
--------------------------------------------------------------------------------
1 | c/113_0114.jpg 2
2 | c/113_0025.jpg 2
3 | c/113_0003.jpg 2
4 | c/113_0034.jpg 2
5 | c/113_0058.jpg 2
6 | c/113_0042.jpg 2
7 | c/113_0057.jpg 2
8 | c/113_0083.jpg 2
9 | c/113_0055.jpg 2
10 | c/113_0109.jpg 2
11 | c/113_0090.jpg 2
12 | c/113_0060.jpg 2
13 | c/113_0074.jpg 2
14 | c/113_0012.jpg 2
15 | c/113_0030.jpg 2
16 | c/113_0099.jpg 2
17 | c/113_0115.jpg 2
18 | c/113_0054.jpg 2
19 | c/113_0062.jpg 2
20 | c/113_0035.jpg 2
21 | c/113_0103.jpg 2
22 | c/113_0036.jpg 2
23 | c/113_0029.jpg 2
24 | c/113_0037.jpg 2
25 | c/113_0010.jpg 2
26 | c/113_0020.jpg 2
27 | c/113_0048.jpg 2
28 | c/113_0051.jpg 2
29 | c/113_0061.jpg 2
30 | c/113_0068.jpg 2
31 | c/113_0108.jpg 2
32 | c/113_0014.jpg 2
33 | c/113_0031.jpg 2
34 | c/113_0095.jpg 2
35 | c/113_0066.jpg 2
36 | c/113_0002.jpg 2
37 | c/113_0013.jpg 2
38 | c/113_0001.jpg 2
39 | c/113_0022.jpg 2
40 | c/113_0016.jpg 2
41 | c/113_0116.jpg 2
42 | c/113_0011.jpg 2
43 | c/113_0046.jpg 2
44 | c/113_0071.jpg 2
45 | c/113_0092.jpg 2
46 | c/113_0072.jpg 2
47 | c/113_0050.jpg 2
48 | c/113_0070.jpg 2
49 | c/113_0089.jpg 2
50 | c/113_0064.jpg 2
51 | c/224_0053.jpg 1
52 | c/146_0021.jpg 1
53 | c/146_0061.jpg 1
54 | c/146_0057.jpg 1
55 | c/224_0015.jpg 1
56 | c/146_0072.jpg 1
57 | c/224_0096.jpg 1
58 | c/146_0063.jpg 1
59 | c/224_0063.jpg 1
60 | c/224_0035.jpg 1
61 | c/146_0041.jpg 1
62 | c/224_0110.jpg 1
63 | c/224_0082.jpg 1
64 | c/224_0023.jpg 1
65 | c/146_0011.jpg 1
66 | c/146_0071.jpg 1
67 | c/224_0044.jpg 1
68 | c/224_0066.jpg 1
69 | c/224_0051.jpg 1
70 | c/146_0074.jpg 1
71 | c/224_0040.jpg 1
72 | c/224_0098.jpg 1
73 | c/224_0007.jpg 1
74 | c/224_0008.jpg 1
75 | c/146_0062.jpg 1
76 | c/224_0107.jpg 1
77 | c/224_0027.jpg 1
78 | c/224_0021.jpg 1
79 | c/224_0087.jpg 1
80 | c/224_0046.jpg 1
81 | c/224_0004.jpg 1
82 | c/224_0097.jpg 1
83 | c/146_0073.jpg 1
84 | c/146_0006.jpg 1
85 | c/224_0070.jpg 1
86 | c/224_0092.jpg 1
87 | c/146_0054.jpg 1
88 | c/224_0072.jpg 1
89 | c/224_0039.jpg 1
90 | c/224_0068.jpg 1
91 | c/146_0080.jpg 1
92 | c/224_0016.jpg 1
93 | c/146_0045.jpg 1
94 | c/224_0078.jpg 1
95 | c/224_0059.jpg 1
96 | c/224_0025.jpg 1
97 | c/146_0017.jpg 1
98 | c/224_0067.jpg 1
99 | c/224_0022.jpg 1
100 | c/224_0003.jpg 1
101 | c/105_0250.jpg 8
102 | c/105_0022.jpg 8
103 | c/105_0010.jpg 8
104 | c/105_0229.jpg 8
105 | c/105_0248.jpg 8
106 | c/105_0132.jpg 8
107 | c/105_0055.jpg 8
108 | c/105_0069.jpg 8
109 | c/105_0067.jpg 8
110 | c/105_0268.jpg 8
111 | c/105_0192.jpg 8
112 | c/105_0068.jpg 8
113 | c/105_0140.jpg 8
114 | c/105_0218.jpg 8
115 | c/105_0044.jpg 8
116 | c/105_0100.jpg 8
117 | c/105_0222.jpg 8
118 | c/105_0003.jpg 8
119 | c/105_0175.jpg 8
120 | c/105_0242.jpg 8
121 | c/105_0247.jpg 8
122 | c/105_0023.jpg 8
123 | c/105_0061.jpg 8
124 | c/105_0223.jpg 8
125 | c/105_0065.jpg 8
126 | c/105_0238.jpg 8
127 | c/105_0062.jpg 8
128 | c/105_0105.jpg 8
129 | c/105_0191.jpg 8
130 | c/105_0073.jpg 8
131 | c/105_0173.jpg 8
132 | c/105_0212.jpg 8
133 | c/105_0082.jpg 8
134 | c/105_0030.jpg 8
135 | c/105_0092.jpg 8
136 | c/105_0138.jpg 8
137 | c/105_0113.jpg 8
138 | c/105_0117.jpg 8
139 | c/105_0129.jpg 8
140 | c/105_0252.jpg 8
141 | c/105_0141.jpg 8
142 | c/105_0094.jpg 8
143 | c/105_0115.jpg 8
144 | c/105_0116.jpg 8
145 | c/105_0179.jpg 8
146 | c/105_0060.jpg 8
147 | c/105_0139.jpg 8
148 | c/105_0133.jpg 8
149 | c/105_0221.jpg 8
150 | c/105_0204.jpg 8
151 | c/159_0068.jpg 11
152 | c/159_0017.jpg 11
153 | c/159_0046.jpg 11
154 | c/159_0158.jpg 11
155 | c/159_0074.jpg 11
156 | c/159_0199.jpg 11
157 | c/159_0137.jpg 11
158 | c/159_0060.jpg 11
159 | c/159_0048.jpg 11
160 | c/159_0200.jpg 11
161 | c/159_0098.jpg 11
162 | c/159_0180.jpg 11
163 | c/159_0160.jpg 11
164 | c/159_0178.jpg 11
165 | c/159_0134.jpg 11
166 | c/159_0022.jpg 11
167 | c/159_0186.jpg 11
168 | c/159_0129.jpg 11
169 | c/159_0170.jpg 11
170 | c/159_0179.jpg 11
171 | c/159_0196.jpg 11
172 | c/159_0126.jpg 11
173 | c/159_0182.jpg 11
174 | c/159_0108.jpg 11
175 | c/159_0055.jpg 11
176 | c/159_0197.jpg 11
177 | c/159_0149.jpg 11
178 | c/159_0176.jpg 11
179 | c/159_0094.jpg 11
180 | c/159_0096.jpg 11
181 | c/159_0141.jpg 11
182 | c/159_0120.jpg 11
183 | c/159_0030.jpg 11
184 | c/159_0114.jpg 11
185 | c/159_0009.jpg 11
186 | c/159_0163.jpg 11
187 | c/159_0039.jpg 11
188 | c/159_0045.jpg 11
189 | c/159_0021.jpg 11
190 | c/159_0131.jpg 11
191 | c/159_0041.jpg 11
192 | c/159_0066.jpg 11
193 | c/159_0153.jpg 11
194 | c/159_0181.jpg 11
195 | c/159_0024.jpg 11
196 | c/159_0143.jpg 11
197 | c/159_0043.jpg 11
198 | c/159_0166.jpg 11
199 | c/159_0040.jpg 11
200 | c/159_0083.jpg 11
201 | c/046_0076.jpg 9
202 | c/046_0087.jpg 9
203 | c/046_0100.jpg 9
204 | c/046_0072.jpg 9
205 | c/046_0069.jpg 9
206 | c/046_0025.jpg 9
207 | c/046_0124.jpg 9
208 | c/046_0105.jpg 9
209 | c/046_0128.jpg 9
210 | c/046_0005.jpg 9
211 | c/046_0001.jpg 9
212 | c/046_0021.jpg 9
213 | c/046_0023.jpg 9
214 | c/046_0093.jpg 9
215 | c/046_0058.jpg 9
216 | c/046_0106.jpg 9
217 | c/046_0050.jpg 9
218 | c/046_0060.jpg 9
219 | c/046_0026.jpg 9
220 | c/046_0014.jpg 9
221 | c/046_0054.jpg 9
222 | c/046_0077.jpg 9
223 | c/046_0048.jpg 9
224 | c/046_0066.jpg 9
225 | c/046_0110.jpg 9
226 | c/046_0020.jpg 9
227 | c/046_0024.jpg 9
228 | c/046_0008.jpg 9
229 | c/046_0038.jpg 9
230 | c/046_0096.jpg 9
231 | c/046_0121.jpg 9
232 | c/046_0102.jpg 9
233 | c/046_0051.jpg 9
234 | c/046_0006.jpg 9
235 | c/046_0062.jpg 9
236 | c/046_0052.jpg 9
237 | c/046_0073.jpg 9
238 | c/046_0012.jpg 9
239 | c/046_0118.jpg 9
240 | c/046_0114.jpg 9
241 | c/046_0092.jpg 9
242 | c/046_0067.jpg 9
243 | c/046_0079.jpg 9
244 | c/046_0113.jpg 9
245 | c/046_0085.jpg 9
246 | c/046_0101.jpg 9
247 | c/046_0040.jpg 9
248 | c/046_0104.jpg 9
249 | c/046_0013.jpg 9
250 | c/046_0017.jpg 9
251 | c/246_0100.jpg 4
252 | c/246_0048.jpg 4
253 | c/246_0070.jpg 4
254 | c/246_0011.jpg 4
255 | c/246_0038.jpg 4
256 | c/246_0029.jpg 4
257 | c/246_0086.jpg 4
258 | c/246_0044.jpg 4
259 | c/246_0037.jpg 4
260 | c/246_0056.jpg 4
261 | c/246_0083.jpg 4
262 | c/246_0042.jpg 4
263 | c/246_0055.jpg 4
264 | c/246_0039.jpg 4
265 | c/246_0018.jpg 4
266 | c/246_0064.jpg 4
267 | c/246_0053.jpg 4
268 | c/246_0052.jpg 4
269 | c/246_0033.jpg 4
270 | c/246_0058.jpg 4
271 | c/246_0041.jpg 4
272 | c/246_0067.jpg 4
273 | c/246_0008.jpg 4
274 | c/246_0093.jpg 4
275 | c/246_0045.jpg 4
276 | c/246_0002.jpg 4
277 | c/246_0101.jpg 4
278 | c/246_0043.jpg 4
279 | c/246_0020.jpg 4
280 | c/246_0031.jpg 4
281 | c/246_0096.jpg 4
282 | c/246_0049.jpg 4
283 | c/246_0074.jpg 4
284 | c/246_0005.jpg 4
285 | c/246_0071.jpg 4
286 | c/246_0057.jpg 4
287 | c/246_0085.jpg 4
288 | c/246_0082.jpg 4
289 | c/246_0034.jpg 4
290 | c/246_0062.jpg 4
291 | c/246_0010.jpg 4
292 | c/246_0061.jpg 4
293 | c/246_0017.jpg 4
294 | c/246_0028.jpg 4
295 | c/246_0035.jpg 4
296 | c/246_0084.jpg 4
297 | c/246_0030.jpg 4
298 | c/246_0098.jpg 4
299 | c/246_0088.jpg 4
300 | c/246_0032.jpg 4
301 | c/145_0167.jpg 10
302 | c/145_0511.jpg 10
303 | c/145_0555.jpg 10
304 | c/145_0758.jpg 10
305 | c/145_0692.jpg 10
306 | c/145_0074.jpg 10
307 | c/145_0155.jpg 10
308 | c/145_0267.jpg 10
309 | c/145_0314.jpg 10
310 | c/145_0127.jpg 10
311 | c/145_0069.jpg 10
312 | c/145_0635.jpg 10
313 | c/145_0234.jpg 10
314 | c/145_0338.jpg 10
315 | c/145_0462.jpg 10
316 | c/145_0422.jpg 10
317 | c/145_0233.jpg 10
318 | c/145_0697.jpg 10
319 | c/145_0084.jpg 10
320 | c/145_0266.jpg 10
321 | c/145_0413.jpg 10
322 | c/145_0552.jpg 10
323 | c/145_0450.jpg 10
324 | c/145_0284.jpg 10
325 | c/145_0442.jpg 10
326 | c/145_0636.jpg 10
327 | c/145_0185.jpg 10
328 | c/145_0451.jpg 10
329 | c/145_0476.jpg 10
330 | c/145_0087.jpg 10
331 | c/145_0776.jpg 10
332 | c/145_0734.jpg 10
333 | c/145_0719.jpg 10
334 | c/145_0223.jpg 10
335 | c/145_0533.jpg 10
336 | c/145_0513.jpg 10
337 | c/145_0780.jpg 10
338 | c/145_0560.jpg 10
339 | c/145_0458.jpg 10
340 | c/145_0494.jpg 10
341 | c/145_0798.jpg 10
342 | c/145_0559.jpg 10
343 | c/145_0277.jpg 10
344 | c/145_0742.jpg 10
345 | c/145_0322.jpg 10
346 | c/145_0191.jpg 10
347 | c/145_0165.jpg 10
348 | c/145_0715.jpg 10
349 | c/145_0433.jpg 10
350 | c/145_0032.jpg 10
351 | c/178_0050.jpg 5
352 | c/178_0091.jpg 5
353 | c/178_0062.jpg 5
354 | c/178_0068.jpg 5
355 | c/178_0045.jpg 5
356 | c/178_0031.jpg 5
357 | c/178_0079.jpg 5
358 | c/178_0046.jpg 5
359 | c/178_0037.jpg 5
360 | c/178_0013.jpg 5
361 | c/178_0074.jpg 5
362 | c/178_0051.jpg 5
363 | c/178_0034.jpg 5
364 | c/178_0040.jpg 5
365 | c/178_0010.jpg 5
366 | c/178_0041.jpg 5
367 | c/178_0017.jpg 5
368 | c/178_0016.jpg 5
369 | c/178_0005.jpg 5
370 | c/178_0025.jpg 5
371 | c/178_0097.jpg 5
372 | c/178_0078.jpg 5
373 | c/178_0058.jpg 5
374 | c/178_0067.jpg 5
375 | c/178_0027.jpg 5
376 | c/178_0012.jpg 5
377 | c/178_0044.jpg 5
378 | c/178_0060.jpg 5
379 | c/178_0043.jpg 5
380 | c/178_0087.jpg 5
381 | c/178_0085.jpg 5
382 | c/178_0004.jpg 5
383 | c/178_0047.jpg 5
384 | c/178_0029.jpg 5
385 | c/178_0080.jpg 5
386 | c/178_0039.jpg 5
387 | c/178_0065.jpg 5
388 | c/178_0083.jpg 5
389 | c/178_0002.jpg 5
390 | c/178_0061.jpg 5
391 | c/178_0095.jpg 5
392 | c/178_0054.jpg 5
393 | c/178_0022.jpg 5
394 | c/178_0053.jpg 5
395 | c/178_0030.jpg 5
396 | c/178_0071.jpg 5
397 | c/178_0090.jpg 5
398 | c/178_0092.jpg 5
399 | c/178_0077.jpg 5
400 | c/178_0064.jpg 5
401 | c/251_0041.jpg 0
402 | c/251_0095.jpg 0
403 | c/251_0069.jpg 0
404 | c/251_0561.jpg 0
405 | c/251_0326.jpg 0
406 | c/251_0478.jpg 0
407 | c/251_0121.jpg 0
408 | c/251_0390.jpg 0
409 | c/251_0419.jpg 0
410 | c/251_0519.jpg 0
411 | c/251_0090.jpg 0
412 | c/251_0238.jpg 0
413 | c/251_0366.jpg 0
414 | c/251_0231.jpg 0
415 | c/251_0714.jpg 0
416 | c/251_0545.jpg 0
417 | c/251_0587.jpg 0
418 | c/251_0115.jpg 0
419 | c/251_0769.jpg 0
420 | c/251_0061.jpg 0
421 | c/251_0381.jpg 0
422 | c/251_0126.jpg 0
423 | c/251_0248.jpg 0
424 | c/251_0800.jpg 0
425 | c/251_0023.jpg 0
426 | c/251_0371.jpg 0
427 | c/251_0646.jpg 0
428 | c/251_0679.jpg 0
429 | c/251_0618.jpg 0
430 | c/251_0779.jpg 0
431 | c/251_0741.jpg 0
432 | c/251_0481.jpg 0
433 | c/251_0425.jpg 0
434 | c/251_0642.jpg 0
435 | c/251_0377.jpg 0
436 | c/251_0447.jpg 0
437 | c/251_0361.jpg 0
438 | c/251_0743.jpg 0
439 | c/251_0215.jpg 0
440 | c/251_0746.jpg 0
441 | c/251_0682.jpg 0
442 | c/251_0614.jpg 0
443 | c/251_0599.jpg 0
444 | c/251_0048.jpg 0
445 | c/251_0389.jpg 0
446 | c/251_0766.jpg 0
447 | c/251_0189.jpg 0
448 | c/251_0404.jpg 0
449 | c/251_0183.jpg 0
450 | c/251_0570.jpg 0
451 | c/056_0021.jpg 7
452 | c/056_0043.jpg 7
453 | c/056_0100.jpg 7
454 | c/056_0055.jpg 7
455 | c/056_0078.jpg 7
456 | c/056_0031.jpg 7
457 | c/056_0095.jpg 7
458 | c/056_0101.jpg 7
459 | c/056_0076.jpg 7
460 | c/056_0037.jpg 7
461 | c/056_0009.jpg 7
462 | c/056_0013.jpg 7
463 | c/056_0048.jpg 7
464 | c/056_0057.jpg 7
465 | c/056_0046.jpg 7
466 | c/056_0072.jpg 7
467 | c/056_0014.jpg 7
468 | c/056_0050.jpg 7
469 | c/056_0023.jpg 7
470 | c/056_0039.jpg 7
471 | c/056_0024.jpg 7
472 | c/056_0069.jpg 7
473 | c/056_0038.jpg 7
474 | c/056_0093.jpg 7
475 | c/056_0058.jpg 7
476 | c/056_0063.jpg 7
477 | c/056_0082.jpg 7
478 | c/056_0077.jpg 7
479 | c/056_0099.jpg 7
480 | c/056_0052.jpg 7
481 | c/056_0070.jpg 7
482 | c/056_0054.jpg 7
483 | c/056_0064.jpg 7
484 | c/056_0081.jpg 7
485 | c/056_0056.jpg 7
486 | c/056_0028.jpg 7
487 | c/056_0004.jpg 7
488 | c/056_0090.jpg 7
489 | c/056_0006.jpg 7
490 | c/056_0033.jpg 7
491 | c/056_0025.jpg 7
492 | c/056_0029.jpg 7
493 | c/056_0080.jpg 7
494 | c/056_0079.jpg 7
495 | c/056_0065.jpg 7
496 | c/056_0088.jpg 7
497 | c/056_0061.jpg 7
498 | c/056_0001.jpg 7
499 | c/056_0016.jpg 7
500 | c/056_0091.jpg 7
501 | c/252_0059.jpg 6
502 | c/252_0092.jpg 6
503 | c/252_0085.jpg 6
504 | c/252_0068.jpg 6
505 | c/252_0098.jpg 6
506 | c/252_0087.jpg 6
507 | c/252_0034.jpg 6
508 | c/252_0003.jpg 6
509 | c/252_0077.jpg 6
510 | c/252_0062.jpg 6
511 | c/252_0045.jpg 6
512 | c/252_0043.jpg 6
513 | c/252_0073.jpg 6
514 | c/252_0002.jpg 6
515 | c/252_0005.jpg 6
516 | c/252_0114.jpg 6
517 | c/252_0042.jpg 6
518 | c/252_0113.jpg 6
519 | c/252_0089.jpg 6
520 | c/252_0037.jpg 6
521 | c/252_0052.jpg 6
522 | c/252_0028.jpg 6
523 | c/252_0083.jpg 6
524 | c/252_0056.jpg 6
525 | c/252_0109.jpg 6
526 | c/252_0078.jpg 6
527 | c/252_0039.jpg 6
528 | c/252_0093.jpg 6
529 | c/252_0026.jpg 6
530 | c/252_0076.jpg 6
531 | c/252_0071.jpg 6
532 | c/252_0044.jpg 6
533 | c/252_0040.jpg 6
534 | c/252_0091.jpg 6
535 | c/252_0027.jpg 6
536 | c/252_0115.jpg 6
537 | c/252_0074.jpg 6
538 | c/252_0025.jpg 6
539 | c/252_0104.jpg 6
540 | c/252_0033.jpg 6
541 | c/252_0105.jpg 6
542 | c/252_0116.jpg 6
543 | c/252_0007.jpg 6
544 | c/252_0111.jpg 6
545 | c/252_0064.jpg 6
546 | c/252_0006.jpg 6
547 | c/252_0070.jpg 6
548 | c/252_0082.jpg 6
549 | c/252_0080.jpg 6
550 | c/252_0029.jpg 6
551 | c/197_0019.jpg 3
552 | c/197_0087.jpg 3
553 | c/197_0059.jpg 3
554 | c/197_0038.jpg 3
555 | c/197_0064.jpg 3
556 | c/197_0043.jpg 3
557 | c/197_0012.jpg 3
558 | c/197_0037.jpg 3
559 | c/197_0010.jpg 3
560 | c/197_0013.jpg 3
561 | c/197_0074.jpg 3
562 | c/197_0079.jpg 3
563 | c/197_0014.jpg 3
564 | c/197_0091.jpg 3
565 | c/197_0068.jpg 3
566 | c/197_0060.jpg 3
567 | c/197_0077.jpg 3
568 | c/197_0011.jpg 3
569 | c/197_0051.jpg 3
570 | c/197_0052.jpg 3
571 | c/197_0057.jpg 3
572 | c/197_0007.jpg 3
573 | c/197_0085.jpg 3
574 | c/197_0058.jpg 3
575 | c/197_0016.jpg 3
576 | c/197_0001.jpg 3
577 | c/197_0082.jpg 3
578 | c/197_0028.jpg 3
579 | c/197_0030.jpg 3
580 | c/197_0034.jpg 3
581 | c/197_0055.jpg 3
582 | c/197_0073.jpg 3
583 | c/197_0031.jpg 3
584 | c/197_0065.jpg 3
585 | c/197_0070.jpg 3
586 | c/197_0099.jpg 3
587 | c/197_0081.jpg 3
588 | c/197_0035.jpg 3
589 | c/197_0090.jpg 3
590 | c/197_0025.jpg 3
591 | c/197_0047.jpg 3
592 | c/197_0040.jpg 3
593 | c/197_0084.jpg 3
594 | c/197_0054.jpg 3
595 | c/197_0046.jpg 3
596 | c/197_0004.jpg 3
597 | c/197_0053.jpg 3
598 | c/197_0061.jpg 3
599 | c/197_0020.jpg 3
600 | c/197_0049.jpg 3
601 |
--------------------------------------------------------------------------------
/data/ImageCLEF/pList.txt:
--------------------------------------------------------------------------------
1 | p/2008_006463.jpg 0
2 | p/2011_000163.jpg 0
3 | p/2009_003635.jpg 0
4 | p/2009_002888.jpg 0
5 | p/2010_002695.jpg 0
6 | p/2008_003423.jpg 0
7 | p/2008_007970.jpg 0
8 | p/2008_005834.jpg 0
9 | p/2009_002199.jpg 0
10 | p/2009_000387.jpg 0
11 | p/2008_003703.jpg 0
12 | p/2009_000801.jpg 0
13 | p/2010_003559.jpg 0
14 | p/2009_000545.jpg 0
15 | p/2010_004312.jpg 0
16 | p/2008_006951.jpg 0
17 | p/2009_002432.jpg 0
18 | p/2010_003737.jpg 0
19 | p/2010_003933.jpg 0
20 | p/2011_001044.jpg 0
21 | p/2010_005224.jpg 0
22 | p/2011_002993.jpg 0
23 | p/2009_003199.jpg 0
24 | p/2009_002752.jpg 0
25 | p/2008_003575.jpg 0
26 | p/2010_000437.jpg 0
27 | p/2010_001139.jpg 0
28 | p/2009_004414.jpg 0
29 | p/2008_005905.jpg 0
30 | p/2009_002914.jpg 0
31 | p/2009_002714.jpg 0
32 | p/2009_000734.jpg 0
33 | p/2010_004063.jpg 0
34 | p/2009_005215.jpg 0
35 | p/2011_002675.jpg 0
36 | p/2008_007758.jpg 0
37 | p/2009_001199.jpg 0
38 | p/2011_001489.jpg 0
39 | p/2008_003033.jpg 0
40 | p/2010_000418.jpg 0
41 | p/2010_001413.jpg 0
42 | p/2011_001858.jpg 0
43 | p/2009_002999.jpg 0
44 | p/2010_000270.jpg 0
45 | p/2009_004653.jpg 0
46 | p/2008_000291.jpg 0
47 | p/2010_004917.jpg 0
48 | p/2009_000661.jpg 0
49 | p/2008_007442.jpg 0
50 | p/2011_002751.jpg 0
51 | p/2009_001585.jpg 11
52 | p/2008_002947.jpg 11
53 | p/2008_008231.jpg 11
54 | p/2011_001030.jpg 11
55 | p/2009_001011.jpg 11
56 | p/2008_000689.jpg 11
57 | p/2009_004205.jpg 11
58 | p/2008_000568.jpg 11
59 | p/2008_004814.jpg 11
60 | p/2009_004794.jpg 11
61 | p/2008_001408.jpg 11
62 | p/2011_000559.jpg 11
63 | p/2009_001690.jpg 11
64 | p/2009_004285.jpg 11
65 | p/2008_008745.jpg 11
66 | p/2008_001223.jpg 11
67 | p/2008_002499.jpg 11
68 | p/2011_002519.jpg 11
69 | p/2008_002809.jpg 11
70 | p/2010_000324.jpg 11
71 | p/2010_004222.jpg 11
72 | p/2011_002987.jpg 11
73 | p/2008_008659.jpg 11
74 | p/2008_005168.jpg 11
75 | p/2009_003071.jpg 11
76 | p/2010_004666.jpg 11
77 | p/2009_001952.jpg 11
78 | p/2010_004708.jpg 11
79 | p/2008_002758.jpg 11
80 | p/2008_006145.jpg 11
81 | p/2011_002097.jpg 11
82 | p/2009_001581.jpg 11
83 | p/2008_008162.jpg 11
84 | p/2009_002285.jpg 11
85 | p/2008_002675.jpg 11
86 | p/2008_002908.jpg 11
87 | p/2008_000552.jpg 11
88 | p/2008_000203.jpg 11
89 | p/2009_000529.jpg 11
90 | p/2008_002700.jpg 11
91 | p/2008_006554.jpg 11
92 | p/2008_006991.jpg 11
93 | p/2009_004032.jpg 11
94 | p/2008_000965.jpg 11
95 | p/2010_004160.jpg 11
96 | p/2011_003020.jpg 11
97 | p/2008_008705.jpg 11
98 | p/2008_002067.jpg 11
99 | p/2010_002346.jpg 11
100 | p/2008_005825.jpg 11
101 | p/2009_004197.jpg 5
102 | p/2009_000199.jpg 5
103 | p/2008_005196.jpg 5
104 | p/2010_002203.jpg 5
105 | p/2009_002436.jpg 5
106 | p/2010_005959.jpg 5
107 | p/2010_003293.jpg 5
108 | p/2008_008343.jpg 5
109 | p/2010_000603.jpg 5
110 | p/2009_000335.jpg 5
111 | p/2008_004794.jpg 5
112 | p/2010_001403.jpg 5
113 | p/2011_000267.jpg 5
114 | p/2009_001967.jpg 5
115 | p/2009_002066.jpg 5
116 | p/2009_001822.jpg 5
117 | p/2010_004637.jpg 5
118 | p/2011_001105.jpg 5
119 | p/2009_002566.jpg 5
120 | p/2008_003673.jpg 5
121 | p/2011_002330.jpg 5
122 | p/2009_002105.jpg 5
123 | p/2011_000007.jpg 5
124 | p/2010_001646.jpg 5
125 | p/2011_000138.jpg 5
126 | p/2010_000947.jpg 5
127 | p/2011_003114.jpg 5
128 | p/2010_002263.jpg 5
129 | p/2009_002398.jpg 5
130 | p/2008_006483.jpg 5
131 | p/2010_003173.jpg 5
132 | p/2010_005118.jpg 5
133 | p/2009_001847.jpg 5
134 | p/2009_001098.jpg 5
135 | p/2011_001536.jpg 5
136 | p/2010_005063.jpg 5
137 | p/2011_001341.jpg 5
138 | p/2010_005080.jpg 5
139 | p/2009_000140.jpg 5
140 | p/2009_004040.jpg 5
141 | p/2011_001110.jpg 5
142 | p/2010_001771.jpg 5
143 | p/2010_002102.jpg 5
144 | p/2008_008080.jpg 5
145 | p/2009_005279.jpg 5
146 | p/2009_001675.jpg 5
147 | p/2009_001590.jpg 5
148 | p/2010_003534.jpg 5
149 | p/2010_004997.jpg 5
150 | p/2009_002052.jpg 5
151 | p/2010_004766.jpg 10
152 | p/2008_004345.jpg 10
153 | p/2011_000927.jpg 10
154 | p/2009_004845.jpg 10
155 | p/2010_003947.jpg 10
156 | p/2008_005637.jpg 10
157 | p/2009_002894.jpg 10
158 | p/2010_000495.jpg 10
159 | p/2010_002408.jpg 10
160 | p/2008_005427.jpg 10
161 | p/2011_000496.jpg 10
162 | p/2008_000545.jpg 10
163 | p/2009_001135.jpg 10
164 | p/2010_005252.jpg 10
165 | p/2010_005199.jpg 10
166 | p/2009_001805.jpg 10
167 | p/2009_004502.jpg 10
168 | p/2008_004615.jpg 10
169 | p/2011_000512.jpg 10
170 | p/2011_001040.jpg 10
171 | p/2008_003249.jpg 10
172 | p/2008_003618.jpg 10
173 | p/2008_004371.jpg 10
174 | p/2008_002191.jpg 10
175 | p/2011_000034.jpg 10
176 | p/2008_003429.jpg 10
177 | p/2010_005997.jpg 10
178 | p/2008_005893.jpg 10
179 | p/2009_001646.jpg 10
180 | p/2010_000695.jpg 10
181 | p/2008_001150.jpg 10
182 | p/2009_003560.jpg 10
183 | p/2011_002598.jpg 10
184 | p/2010_004143.jpg 10
185 | p/2010_001828.jpg 10
186 | p/2009_003020.jpg 10
187 | p/2009_003090.jpg 10
188 | p/2009_004117.jpg 10
189 | p/2008_008218.jpg 10
190 | p/2009_003747.jpg 10
191 | p/2009_004738.jpg 10
192 | p/2008_008097.jpg 10
193 | p/2008_007558.jpg 10
194 | p/2008_002115.jpg 10
195 | p/2008_007075.jpg 10
196 | p/2008_004084.jpg 10
197 | p/2008_001119.jpg 10
198 | p/2008_001631.jpg 10
199 | p/2009_003639.jpg 10
200 | p/2008_008227.jpg 10
201 | p/2009_002453.jpg 3
202 | p/2011_002871.jpg 3
203 | p/2011_000086.jpg 3
204 | p/2011_001221.jpg 3
205 | p/2008_007750.jpg 3
206 | p/2008_004969.jpg 3
207 | p/2008_005923.jpg 3
208 | p/2008_007643.jpg 3
209 | p/2011_003025.jpg 3
210 | p/2009_001289.jpg 3
211 | p/2011_001591.jpg 3
212 | p/2010_001916.jpg 3
213 | p/2008_003858.jpg 3
214 | p/2010_005198.jpg 3
215 | p/2008_007841.jpg 3
216 | p/2009_002662.jpg 3
217 | p/2011_000435.jpg 3
218 | p/2009_002308.jpg 3
219 | p/2008_004291.jpg 3
220 | p/2009_003652.jpg 3
221 | p/2011_000286.jpg 3
222 | p/2011_001901.jpg 3
223 | p/2008_004053.jpg 3
224 | p/2009_003936.jpg 3
225 | p/2009_002449.jpg 3
226 | p/2010_002150.jpg 3
227 | p/2009_001406.jpg 3
228 | p/2011_001001.jpg 3
229 | p/2010_005705.jpg 3
230 | p/2008_004124.jpg 3
231 | p/2009_004224.jpg 3
232 | p/2010_002104.jpg 3
233 | p/2009_000686.jpg 3
234 | p/2010_003640.jpg 3
235 | p/2008_007305.jpg 3
236 | p/2008_002773.jpg 3
237 | p/2010_005192.jpg 3
238 | p/2009_002543.jpg 3
239 | p/2008_005517.jpg 3
240 | p/2009_005015.jpg 3
241 | p/2011_002854.jpg 3
242 | p/2009_003019.jpg 3
243 | p/2011_001991.jpg 3
244 | p/2008_001858.jpg 3
245 | p/2011_001310.jpg 3
246 | p/2009_003136.jpg 3
247 | p/2010_004714.jpg 3
248 | p/2010_003815.jpg 3
249 | p/2008_003362.jpg 3
250 | p/2008_003480.jpg 3
251 | p/2009_002975.jpg 8
252 | p/2008_003782.jpg 8
253 | p/2008_008279.jpg 8
254 | p/2009_003128.jpg 8
255 | p/2008_000765.jpg 8
256 | p/2008_006317.jpg 8
257 | p/2011_001956.jpg 8
258 | p/2009_004134.jpg 8
259 | p/2009_004942.jpg 8
260 | p/2009_001837.jpg 8
261 | p/2009_000409.jpg 8
262 | p/2008_004624.jpg 8
263 | p/2009_000412.jpg 8
264 | p/2008_007625.jpg 8
265 | p/2011_002583.jpg 8
266 | p/2008_007004.jpg 8
267 | p/2009_002697.jpg 8
268 | p/2008_001934.jpg 8
269 | p/2010_000749.jpg 8
270 | p/2008_006219.jpg 8
271 | p/2010_001759.jpg 8
272 | p/2008_003272.jpg 8
273 | p/2008_003800.jpg 8
274 | p/2010_002168.jpg 8
275 | p/2010_003714.jpg 8
276 | p/2008_000076.jpg 8
277 | p/2010_004228.jpg 8
278 | p/2008_007576.jpg 8
279 | p/2008_001218.jpg 8
280 | p/2009_004790.jpg 8
281 | p/2009_001323.jpg 8
282 | p/2008_006758.jpg 8
283 | p/2008_007588.jpg 8
284 | p/2008_002589.jpg 8
285 | p/2011_002341.jpg 8
286 | p/2011_000771.jpg 8
287 | p/2010_001675.jpg 8
288 | p/2008_004705.jpg 8
289 | p/2008_001542.jpg 8
290 | p/2008_004195.jpg 8
291 | p/2008_003802.jpg 8
292 | p/2008_005525.jpg 8
293 | p/2009_001433.jpg 8
294 | p/2009_000001.jpg 8
295 | p/2009_002882.jpg 8
296 | p/2008_007531.jpg 8
297 | p/2008_004764.jpg 8
298 | p/2008_003055.jpg 8
299 | p/2011_000022.jpg 8
300 | p/2011_000210.jpg 8
301 | p/2009_003551.jpg 9
302 | p/2011_001726.jpg 9
303 | p/2010_002513.jpg 9
304 | p/2008_000244.jpg 9
305 | p/2009_002758.jpg 9
306 | p/2009_001553.jpg 9
307 | p/2008_004665.jpg 9
308 | p/2010_005744.jpg 9
309 | p/2008_003264.jpg 9
310 | p/2008_007038.jpg 9
311 | p/2008_006624.jpg 9
312 | p/2009_005030.jpg 9
313 | p/2008_002464.jpg 9
314 | p/2010_005410.jpg 9
315 | p/2010_002247.jpg 9
316 | p/2011_000253.jpg 9
317 | p/2010_004009.jpg 9
318 | p/2010_005345.jpg 9
319 | p/2008_007361.jpg 9
320 | p/2010_002843.jpg 9
321 | p/2010_005305.jpg 9
322 | p/2010_003236.jpg 9
323 | p/2008_003885.jpg 9
324 | p/2008_000305.jpg 9
325 | p/2008_005817.jpg 9
326 | p/2011_001910.jpg 9
327 | p/2008_005066.jpg 9
328 | p/2008_007536.jpg 9
329 | p/2008_003667.jpg 9
330 | p/2011_002418.jpg 9
331 | p/2008_005954.jpg 9
332 | p/2009_005288.jpg 9
333 | p/2009_001608.jpg 9
334 | p/2010_000435.jpg 9
335 | p/2008_005064.jpg 9
336 | p/2008_004807.jpg 9
337 | p/2008_006591.jpg 9
338 | p/2011_001705.jpg 9
339 | p/2009_003140.jpg 9
340 | p/2010_002152.jpg 9
341 | p/2010_003035.jpg 9
342 | p/2008_007446.jpg 9
343 | p/2009_001106.jpg 9
344 | p/2008_000070.jpg 9
345 | p/2008_003435.jpg 9
346 | p/2008_006233.jpg 9
347 | p/2008_001481.jpg 9
348 | p/2009_003078.jpg 9
349 | p/2008_002066.jpg 9
350 | p/2009_000981.jpg 9
351 | p/2008_005325.jpg 6
352 | p/2008_006310.jpg 6
353 | p/2010_002807.jpg 6
354 | p/2011_001901.jpg 6
355 | p/2008_005865.jpg 6
356 | p/2008_003647.jpg 6
357 | p/2009_003031.jpg 6
358 | p/2009_004290.jpg 6
359 | p/2009_004271.jpg 6
360 | p/2010_003139.jpg 6
361 | p/2009_000631.jpg 6
362 | p/2008_004411.jpg 6
363 | p/2009_000390.jpg 6
364 | p/2008_001717.jpg 6
365 | p/2009_000720.jpg 6
366 | p/2008_002746.jpg 6
367 | p/2008_007171.jpg 6
368 | p/2008_006833.jpg 6
369 | p/2008_007739.jpg 6
370 | p/2008_007090.jpg 6
371 | p/2008_002199.jpg 6
372 | p/2008_001681.jpg 6
373 | p/2008_007877.jpg 6
374 | p/2011_002598.jpg 6
375 | p/2008_001007.jpg 6
376 | p/2008_006989.jpg 6
377 | p/2010_004349.jpg 6
378 | p/2011_003114.jpg 6
379 | p/2009_004518.jpg 6
380 | p/2011_002585.jpg 6
381 | p/2009_000886.jpg 6
382 | p/2008_006483.jpg 6
383 | p/2010_001403.jpg 6
384 | p/2010_003562.jpg 6
385 | p/2008_006872.jpg 6
386 | p/2008_004711.jpg 6
387 | p/2009_002380.jpg 6
388 | p/2010_003129.jpg 6
389 | p/2011_001271.jpg 6
390 | p/2010_003450.jpg 6
391 | p/2008_000176.jpg 6
392 | p/2009_000737.jpg 6
393 | p/2010_000947.jpg 6
394 | p/2010_005733.jpg 6
395 | p/2008_001432.jpg 6
396 | p/2010_004690.jpg 6
397 | p/2010_005804.jpg 6
398 | p/2008_007364.jpg 6
399 | p/2008_000719.jpg 6
400 | p/2008_004923.jpg 6
401 | p/2010_005671.jpg 7
402 | p/2010_000655.jpg 7
403 | p/2009_004688.jpg 7
404 | p/2010_002398.jpg 7
405 | p/2009_001687.jpg 7
406 | p/2011_000317.jpg 7
407 | p/2008_007286.jpg 7
408 | p/2010_002674.jpg 7
409 | p/2010_005109.jpg 7
410 | p/2010_001838.jpg 7
411 | p/2008_003801.jpg 7
412 | p/2010_001626.jpg 7
413 | p/2010_001432.jpg 7
414 | p/2010_004345.jpg 7
415 | p/2009_004772.jpg 7
416 | p/2010_001768.jpg 7
417 | p/2010_001422.jpg 7
418 | p/2010_004227.jpg 7
419 | p/2010_001421.jpg 7
420 | p/2008_001420.jpg 7
421 | p/2011_000981.jpg 7
422 | p/2008_005379.jpg 7
423 | p/2009_002605.jpg 7
424 | p/2008_007583.jpg 7
425 | p/2008_007358.jpg 7
426 | p/2009_002865.jpg 7
427 | p/2008_006724.jpg 7
428 | p/2010_000263.jpg 7
429 | p/2008_005333.jpg 7
430 | p/2011_002239.jpg 7
431 | p/2008_006032.jpg 7
432 | p/2010_003351.jpg 7
433 | p/2010_003056.jpg 7
434 | p/2008_000419.jpg 7
435 | p/2010_001756.jpg 7
436 | p/2008_006716.jpg 7
437 | p/2008_000162.jpg 7
438 | p/2010_003554.jpg 7
439 | p/2009_000874.jpg 7
440 | p/2010_002644.jpg 7
441 | p/2011_002900.jpg 7
442 | p/2010_004417.jpg 7
443 | p/2010_002129.jpg 7
444 | p/2010_003879.jpg 7
445 | p/2008_000931.jpg 7
446 | p/2008_007208.jpg 7
447 | p/2008_006220.jpg 7
448 | p/2009_001538.jpg 7
449 | p/2008_001550.jpg 7
450 | p/2009_003735.jpg 7
451 | p/2008_008724.jpg 1
452 | p/2010_003828.jpg 1
453 | p/2009_000820.jpg 1
454 | p/2009_001642.jpg 1
455 | p/2009_000445.jpg 1
456 | p/2008_004758.jpg 1
457 | p/2010_001119.jpg 1
458 | p/2011_000453.jpg 1
459 | p/2010_003701.jpg 1
460 | p/2010_005000.jpg 1
461 | p/2011_000087.jpg 1
462 | p/2010_005374.jpg 1
463 | p/2011_001937.jpg 1
464 | p/2009_005154.jpg 1
465 | p/2009_003175.jpg 1
466 | p/2010_001753.jpg 1
467 | p/2008_002714.jpg 1
468 | p/2010_002927.jpg 1
469 | p/2010_002497.jpg 1
470 | p/2010_006070.jpg 1
471 | p/2008_008758.jpg 1
472 | p/2011_002913.jpg 1
473 | p/2010_005226.jpg 1
474 | p/2009_001384.jpg 1
475 | p/2008_008337.jpg 1
476 | p/2009_003075.jpg 1
477 | p/2011_000505.jpg 1
478 | p/2010_005782.jpg 1
479 | p/2008_000133.jpg 1
480 | p/2009_004876.jpg 1
481 | p/2008_002883.jpg 1
482 | p/2010_005556.jpg 1
483 | p/2010_003912.jpg 1
484 | p/2008_005175.jpg 1
485 | p/2009_003860.jpg 1
486 | p/2008_007510.jpg 1
487 | p/2008_008629.jpg 1
488 | p/2009_000015.jpg 1
489 | p/2008_000803.jpg 1
490 | p/2008_002679.jpg 1
491 | p/2010_000893.jpg 1
492 | p/2011_002406.jpg 1
493 | p/2009_004797.jpg 1
494 | p/2008_005412.jpg 1
495 | p/2008_004995.jpg 1
496 | p/2011_000485.jpg 1
497 | p/2008_002787.jpg 1
498 | p/2010_005848.jpg 1
499 | p/2009_004848.jpg 1
500 | p/2009_002983.jpg 1
501 | p/2010_002457.jpg 4
502 | p/2010_004198.jpg 4
503 | p/2008_003467.jpg 4
504 | p/2011_003038.jpg 4
505 | p/2010_005512.jpg 4
506 | p/2008_008072.jpg 4
507 | p/2008_007219.jpg 4
508 | p/2011_001538.jpg 4
509 | p/2009_002980.jpg 4
510 | p/2008_003591.jpg 4
511 | p/2008_005991.jpg 4
512 | p/2008_004979.jpg 4
513 | p/2010_002858.jpg 4
514 | p/2008_006145.jpg 4
515 | p/2011_002186.jpg 4
516 | p/2008_000801.jpg 4
517 | p/2008_000290.jpg 4
518 | p/2009_005302.jpg 4
519 | p/2008_001593.jpg 4
520 | p/2010_003912.jpg 4
521 | p/2008_007146.jpg 4
522 | p/2009_003588.jpg 4
523 | p/2008_000703.jpg 4
524 | p/2010_000089.jpg 4
525 | p/2008_000691.jpg 4
526 | p/2008_003635.jpg 4
527 | p/2008_000522.jpg 4
528 | p/2010_005663.jpg 4
529 | p/2008_000953.jpg 4
530 | p/2009_004247.jpg 4
531 | p/2009_000464.jpg 4
532 | p/2009_002649.jpg 4
533 | p/2008_005758.jpg 4
534 | p/2008_000851.jpg 4
535 | p/2009_002594.jpg 4
536 | p/2011_002590.jpg 4
537 | p/2009_003214.jpg 4
538 | p/2010_005540.jpg 4
539 | p/2009_000494.jpg 4
540 | p/2009_001908.jpg 4
541 | p/2009_001106.jpg 4
542 | p/2009_003154.jpg 4
543 | p/2008_004016.jpg 4
544 | p/2011_000747.jpg 4
545 | p/2010_002455.jpg 4
546 | p/2009_005278.jpg 4
547 | p/2008_001638.jpg 4
548 | p/2008_008266.jpg 4
549 | p/2011_002814.jpg 4
550 | p/2008_005140.jpg 4
551 | p/2008_007870.jpg 2
552 | p/2008_004671.jpg 2
553 | p/2008_006802.jpg 2
554 | p/2010_000621.jpg 2
555 | p/2008_008523.jpg 2
556 | p/2009_005040.jpg 2
557 | p/2008_006164.jpg 2
558 | p/2008_007120.jpg 2
559 | p/2008_001673.jpg 2
560 | p/2008_007486.jpg 2
561 | p/2009_000930.jpg 2
562 | p/2009_003285.jpg 2
563 | p/2008_004551.jpg 2
564 | p/2008_002784.jpg 2
565 | p/2008_003426.jpg 2
566 | p/2010_000918.jpg 2
567 | p/2008_008309.jpg 2
568 | p/2008_007003.jpg 2
569 | p/2009_003531.jpg 2
570 | p/2008_007465.jpg 2
571 | p/2008_003484.jpg 2
572 | p/2008_004689.jpg 2
573 | p/2008_000103.jpg 2
574 | p/2011_001967.jpg 2
575 | p/2008_002255.jpg 2
576 | p/2009_000317.jpg 2
577 | p/2008_008197.jpg 2
578 | p/2009_001128.jpg 2
579 | p/2009_000664.jpg 2
580 | p/2010_002023.jpg 2
581 | p/2008_006090.jpg 2
582 | p/2011_002463.jpg 2
583 | p/2009_002286.jpg 2
584 | p/2009_001660.jpg 2
585 | p/2010_004187.jpg 2
586 | p/2011_001134.jpg 2
587 | p/2008_002970.jpg 2
588 | p/2008_007656.jpg 2
589 | p/2009_000790.jpg 2
590 | p/2008_007854.jpg 2
591 | p/2009_002629.jpg 2
592 | p/2010_001992.jpg 2
593 | p/2008_005757.jpg 2
594 | p/2010_005428.jpg 2
595 | p/2009_001397.jpg 2
596 | p/2008_001415.jpg 2
597 | p/2009_002877.jpg 2
598 | p/2010_005993.jpg 2
599 | p/2010_003771.jpg 2
600 | p/2008_004147.jpg 2
601 |
--------------------------------------------------------------------------------
/data/ImageCLEF/iList.txt:
--------------------------------------------------------------------------------
1 | i/n02691156_7563.JPEG 0
2 | i/n02691156_30456.JPEG 0
3 | i/n02691156_4273.JPEG 0
4 | i/n02691156_21342.JPEG 0
5 | i/n02691156_11944.JPEG 0
6 | i/n02691156_8007.JPEG 0
7 | i/n02691156_2510.JPEG 0
8 | i/n02691156_1682.JPEG 0
9 | i/n02691156_30294.JPEG 0
10 | i/n02691156_6973.JPEG 0
11 | i/n02691156_2351.JPEG 0
12 | i/n02691156_12350.JPEG 0
13 | i/n02691156_6382.JPEG 0
14 | i/n02691156_10045.JPEG 0
15 | i/n02691156_1663.JPEG 0
16 | i/n02691156_12546.JPEG 0
17 | i/n02691156_240.JPEG 0
18 | i/n02691156_49198.JPEG 0
19 | i/n02691156_7515.JPEG 0
20 | i/n02691156_2701.JPEG 0
21 | i/n02691156_10027.JPEG 0
22 | i/n02691156_29561.JPEG 0
23 | i/n02691156_11169.JPEG 0
24 | i/n02691156_10494.JPEG 0
25 | i/n02691156_54301.JPEG 0
26 | i/n02691156_1806.JPEG 0
27 | i/n02691156_38031.JPEG 0
28 | i/n02691156_38968.JPEG 0
29 | i/n02691156_3107.JPEG 0
30 | i/n02691156_11788.JPEG 0
31 | i/n02691156_3581.JPEG 0
32 | i/n02691156_7304.JPEG 0
33 | i/n02691156_12581.JPEG 0
34 | i/n02691156_11550.JPEG 0
35 | i/n02691156_31395.JPEG 0
36 | i/n02691156_52254.JPEG 0
37 | i/n02691156_7196.JPEG 0
38 | i/n02691156_6587.JPEG 0
39 | i/n02691156_1015.JPEG 0
40 | i/n02691156_38570.JPEG 0
41 | i/n02691156_6673.JPEG 0
42 | i/n02691156_9079.JPEG 0
43 | i/n02691156_5975.JPEG 0
44 | i/n02691156_6122.JPEG 0
45 | i/n02691156_7027.JPEG 0
46 | i/n02691156_11729.JPEG 0
47 | i/n02691156_2289.JPEG 0
48 | i/n02691156_89.JPEG 0
49 | i/n02691156_4662.JPEG 0
50 | i/n02691156_14912.JPEG 0
51 | i/n02084071_12460.JPEG 7
52 | i/n02084071_766.JPEG 7
53 | i/n02084071_25409.JPEG 7
54 | i/n02084071_1389.JPEG 7
55 | i/n02084071_1445.JPEG 7
56 | i/n02084071_15074.JPEG 7
57 | i/n02084071_18746.JPEG 7
58 | i/n02084071_7940.JPEG 7
59 | i/n02084071_23498.JPEG 7
60 | i/n02084071_23239.JPEG 7
61 | i/n02084071_661.JPEG 7
62 | i/n02084071_525.JPEG 7
63 | i/n02084071_11564.JPEG 7
64 | i/n02084071_32835.JPEG 7
65 | i/n02084071_32500.JPEG 7
66 | i/n02084071_9138.JPEG 7
67 | i/n02084071_4657.JPEG 7
68 | i/n02084071_27936.JPEG 7
69 | i/n02084071_760.JPEG 7
70 | i/n02084071_32569.JPEG 7
71 | i/n02084071_17496.JPEG 7
72 | i/n02084071_2103.JPEG 7
73 | i/n02084071_11287.JPEG 7
74 | i/n02084071_31709.JPEG 7
75 | i/n02084071_5662.JPEG 7
76 | i/n02084071_5128.JPEG 7
77 | i/n02084071_182.JPEG 7
78 | i/n02084071_24007.JPEG 7
79 | i/n02084071_22620.JPEG 7
80 | i/n02084071_29560.JPEG 7
81 | i/n02084071_27730.JPEG 7
82 | i/n02084071_34874.JPEG 7
83 | i/n02084071_25631.JPEG 7
84 | i/n02084071_17180.JPEG 7
85 | i/n02084071_2732.JPEG 7
86 | i/n02084071_7098.JPEG 7
87 | i/n02084071_16131.JPEG 7
88 | i/n02084071_1848.JPEG 7
89 | i/n02084071_19134.JPEG 7
90 | i/n02084071_545.JPEG 7
91 | i/n02084071_243.JPEG 7
92 | i/n02084071_22252.JPEG 7
93 | i/n02084071_8803.JPEG 7
94 | i/n02084071_22378.JPEG 7
95 | i/n02084071_31703.JPEG 7
96 | i/n02084071_108.JPEG 7
97 | i/n02084071_26796.JPEG 7
98 | i/n02084071_27298.JPEG 7
99 | i/n02084071_29282.JPEG 7
100 | i/n02084071_28769.JPEG 7
101 | i/n02858304_9777.JPEG 3
102 | i/n02858304_837.JPEG 3
103 | i/n02858304_9962.JPEG 3
104 | i/n02858304_576.JPEG 3
105 | i/n02858304_236.JPEG 3
106 | i/n02858304_1610.JPEG 3
107 | i/n02858304_1921.JPEG 3
108 | i/n02858304_3759.JPEG 3
109 | i/n02858304_3730.JPEG 3
110 | i/n02858304_20876.JPEG 3
111 | i/n02858304_7454.JPEG 3
112 | i/n02858304_9208.JPEG 3
113 | i/n02858304_169.JPEG 3
114 | i/n02858304_312.JPEG 3
115 | i/n02858304_3137.JPEG 3
116 | i/n02858304_676.JPEG 3
117 | i/n02858304_3574.JPEG 3
118 | i/n02858304_1249.JPEG 3
119 | i/n02858304_1883.JPEG 3
120 | i/n02858304_9938.JPEG 3
121 | i/n02858304_3268.JPEG 3
122 | i/n02858304_3030.JPEG 3
123 | i/n02858304_917.JPEG 3
124 | i/n02858304_3175.JPEG 3
125 | i/n02858304_2400.JPEG 3
126 | i/n02858304_416.JPEG 3
127 | i/n02858304_746.JPEG 3
128 | i/n02858304_4082.JPEG 3
129 | i/n02858304_454.JPEG 3
130 | i/n02858304_3112.JPEG 3
131 | i/n02858304_9362.JPEG 3
132 | i/n02858304_8049.JPEG 3
133 | i/n02858304_9973.JPEG 3
134 | i/n02858304_19915.JPEG 3
135 | i/n02858304_2818.JPEG 3
136 | i/n02858304_4052.JPEG 3
137 | i/n02858304_2348.JPEG 3
138 | i/n02858304_379.JPEG 3
139 | i/n02858304_8479.JPEG 3
140 | i/n02858304_10332.JPEG 3
141 | i/n02858304_1612.JPEG 3
142 | i/n02858304_10526.JPEG 3
143 | i/n02858304_8155.JPEG 3
144 | i/n02858304_4545.JPEG 3
145 | i/n02858304_2126.JPEG 3
146 | i/n02858304_391.JPEG 3
147 | i/n02858304_2049.JPEG 3
148 | i/n02858304_7231.JPEG 3
149 | i/n02858304_2175.JPEG 3
150 | i/n02858304_9844.JPEG 3
151 | i/n02924116_64004.JPEG 5
152 | i/n02924116_84238.JPEG 5
153 | i/n02924116_53329.JPEG 5
154 | i/n02924116_9012.JPEG 5
155 | i/n02924116_73751.JPEG 5
156 | i/n02924116_56702.JPEG 5
157 | i/n02924116_85589.JPEG 5
158 | i/n02924116_34753.JPEG 5
159 | i/n02924116_20498.JPEG 5
160 | i/n02924116_57267.JPEG 5
161 | i/n02924116_38704.JPEG 5
162 | i/n02924116_93395.JPEG 5
163 | i/n02924116_14503.JPEG 5
164 | i/n02924116_51663.JPEG 5
165 | i/n02924116_33800.JPEG 5
166 | i/n02924116_73378.JPEG 5
167 | i/n02924116_39743.JPEG 5
168 | i/n02924116_10156.JPEG 5
169 | i/n02924116_78093.JPEG 5
170 | i/n02924116_18209.JPEG 5
171 | i/n02924116_73434.JPEG 5
172 | i/n02924116_3470.JPEG 5
173 | i/n02924116_61109.JPEG 5
174 | i/n02924116_61339.JPEG 5
175 | i/n02924116_35958.JPEG 5
176 | i/n02924116_84781.JPEG 5
177 | i/n02924116_33755.JPEG 5
178 | i/n02924116_43906.JPEG 5
179 | i/n02924116_67006.JPEG 5
180 | i/n02924116_73904.JPEG 5
181 | i/n02924116_44980.JPEG 5
182 | i/n02924116_78812.JPEG 5
183 | i/n02924116_37730.JPEG 5
184 | i/n02924116_82999.JPEG 5
185 | i/n02924116_37402.JPEG 5
186 | i/n02924116_73389.JPEG 5
187 | i/n02924116_3941.JPEG 5
188 | i/n02924116_77225.JPEG 5
189 | i/n02924116_8054.JPEG 5
190 | i/n02924116_50926.JPEG 5
191 | i/n02924116_68762.JPEG 5
192 | i/n02924116_95090.JPEG 5
193 | i/n02924116_82453.JPEG 5
194 | i/n02924116_37624.JPEG 5
195 | i/n02924116_65903.JPEG 5
196 | i/n02924116_33400.JPEG 5
197 | i/n02924116_647.JPEG 5
198 | i/n02924116_86629.JPEG 5
199 | i/n02924116_3352.JPEG 5
200 | i/n02924116_30382.JPEG 5
201 | i/n03782190_12209.JPEG 9
202 | i/n03782190_2426.JPEG 9
203 | i/n03782190_5658.JPEG 9
204 | i/n03782190_2152.JPEG 9
205 | i/n03782190_11158.JPEG 9
206 | i/n03782190_13975.JPEG 9
207 | i/n03782190_225.JPEG 9
208 | i/n03782190_15774.JPEG 9
209 | i/n03782190_14408.JPEG 9
210 | i/n03782190_12880.JPEG 9
211 | i/n03782190_5464.JPEG 9
212 | i/n03782190_7494.JPEG 9
213 | i/n03782190_4205.JPEG 9
214 | i/n03782190_12034.JPEG 9
215 | i/n03782190_4945.JPEG 9
216 | i/n03782190_4268.JPEG 9
217 | i/n03782190_10175.JPEG 9
218 | i/n03782190_14532.JPEG 9
219 | i/n03782190_2218.JPEG 9
220 | i/n03782190_9627.JPEG 9
221 | i/n03782190_8233.JPEG 9
222 | i/n03782190_9951.JPEG 9
223 | i/n03782190_6762.JPEG 9
224 | i/n03782190_14178.JPEG 9
225 | i/n03782190_4015.JPEG 9
226 | i/n03782190_1982.JPEG 9
227 | i/n03782190_1626.JPEG 9
228 | i/n03782190_1214.JPEG 9
229 | i/n03782190_15928.JPEG 9
230 | i/n03782190_2513.JPEG 9
231 | i/n03782190_5220.JPEG 9
232 | i/n03782190_220.JPEG 9
233 | i/n03782190_19506.JPEG 9
234 | i/n03782190_1776.JPEG 9
235 | i/n03782190_11918.JPEG 9
236 | i/n03782190_20760.JPEG 9
237 | i/n03782190_11999.JPEG 9
238 | i/n03782190_7533.JPEG 9
239 | i/n03782190_15896.JPEG 9
240 | i/n03782190_2523.JPEG 9
241 | i/n03782190_7111.JPEG 9
242 | i/n03782190_14888.JPEG 9
243 | i/n03782190_6319.JPEG 9
244 | i/n03782190_2154.JPEG 9
245 | i/n03782190_17955.JPEG 9
246 | i/n03782190_4211.JPEG 9
247 | i/n03782190_3964.JPEG 9
248 | i/n03782190_11648.JPEG 9
249 | i/n03782190_7286.JPEG 9
250 | i/n03782190_17779.JPEG 9
251 | i/n07942152_33397.JPEG 11
252 | i/n07942152_27123.JPEG 11
253 | i/n07942152_19556.JPEG 11
254 | i/n07942152_56.JPEG 11
255 | i/n07942152_12547.JPEG 11
256 | i/n07942152_76119.JPEG 11
257 | i/n07942152_22807.JPEG 11
258 | i/n07942152_32300.JPEG 11
259 | i/n07942152_33090.JPEG 11
260 | i/n07942152_13151.JPEG 11
261 | i/n07942152_56835.JPEG 11
262 | i/n07942152_36134.JPEG 11
263 | i/n07942152_16845.JPEG 11
264 | i/n07942152_9563.JPEG 11
265 | i/n07942152_16633.JPEG 11
266 | i/n07942152_8261.JPEG 11
267 | i/n07942152_27105.JPEG 11
268 | i/n07942152_4183.JPEG 11
269 | i/n07942152_34434.JPEG 11
270 | i/n07942152_18551.JPEG 11
271 | i/n07942152_413.JPEG 11
272 | i/n07942152_510.JPEG 11
273 | i/n07942152_27529.JPEG 11
274 | i/n07942152_31584.JPEG 11
275 | i/n07942152_38507.JPEG 11
276 | i/n07942152_37969.JPEG 11
277 | i/n07942152_30118.JPEG 11
278 | i/n07942152_3594.JPEG 11
279 | i/n07942152_5568.JPEG 11
280 | i/n07942152_35239.JPEG 11
281 | i/n07942152_5588.JPEG 11
282 | i/n07942152_42078.JPEG 11
283 | i/n07942152_34679.JPEG 11
284 | i/n07942152_8418.JPEG 11
285 | i/n07942152_24975.JPEG 11
286 | i/n07942152_28165.JPEG 11
287 | i/n07942152_8542.JPEG 11
288 | i/n07942152_5592.JPEG 11
289 | i/n07942152_4777.JPEG 11
290 | i/n07942152_33797.JPEG 11
291 | i/n07942152_14349.JPEG 11
292 | i/n07942152_36072.JPEG 11
293 | i/n07942152_40587.JPEG 11
294 | i/n07942152_37972.JPEG 11
295 | i/n07942152_33751.JPEG 11
296 | i/n07942152_14138.JPEG 11
297 | i/n07942152_27703.JPEG 11
298 | i/n07942152_27649.JPEG 11
299 | i/n07942152_38584.JPEG 11
300 | i/n07942152_37885.JPEG 11
301 | i/n01503061_1868.JPEG 2
302 | i/n01503061_11612.JPEG 2
303 | i/n01503061_5453.JPEG 2
304 | i/n01503061_8646.JPEG 2
305 | i/n01503061_4167.JPEG 2
306 | i/n01503061_17521.JPEG 2
307 | i/n01503061_1387.JPEG 2
308 | i/n01503061_12696.JPEG 2
309 | i/n01503061_2819.JPEG 2
310 | i/n01503061_2744.JPEG 2
311 | i/n01503061_2378.JPEG 2
312 | i/n01503061_6413.JPEG 2
313 | i/n01503061_688.JPEG 2
314 | i/n01503061_3094.JPEG 2
315 | i/n01503061_2076.JPEG 2
316 | i/n01503061_1457.JPEG 2
317 | i/n01503061_2839.JPEG 2
318 | i/n01503061_13213.JPEG 2
319 | i/n01503061_13929.JPEG 2
320 | i/n01503061_14761.JPEG 2
321 | i/n01503061_1202.JPEG 2
322 | i/n01503061_1780.JPEG 2
323 | i/n01503061_9799.JPEG 2
324 | i/n01503061_5215.JPEG 2
325 | i/n01503061_1227.JPEG 2
326 | i/n01503061_6456.JPEG 2
327 | i/n01503061_3028.JPEG 2
328 | i/n01503061_6454.JPEG 2
329 | i/n01503061_552.JPEG 2
330 | i/n01503061_9622.JPEG 2
331 | i/n01503061_1825.JPEG 2
332 | i/n01503061_12735.JPEG 2
333 | i/n01503061_14886.JPEG 2
334 | i/n01503061_10665.JPEG 2
335 | i/n01503061_12662.JPEG 2
336 | i/n01503061_14700.JPEG 2
337 | i/n01503061_4379.JPEG 2
338 | i/n01503061_13119.JPEG 2
339 | i/n01503061_3862.JPEG 2
340 | i/n01503061_5054.JPEG 2
341 | i/n01503061_2167.JPEG 2
342 | i/n01503061_11048.JPEG 2
343 | i/n01503061_4150.JPEG 2
344 | i/n01503061_13495.JPEG 2
345 | i/n01503061_1306.JPEG 2
346 | i/n01503061_7783.JPEG 2
347 | i/n01503061_13307.JPEG 2
348 | i/n01503061_9390.JPEG 2
349 | i/n01503061_9081.JPEG 2
350 | i/n01503061_2386.JPEG 2
351 | i/n02374451_12030.JPEG 8
352 | i/n02374451_151.JPEG 8
353 | i/n02374451_15002.JPEG 8
354 | i/n02374451_9975.JPEG 8
355 | i/n02374451_2978.JPEG 8
356 | i/n02374451_4722.JPEG 8
357 | i/n02374451_8251.JPEG 8
358 | i/n02374451_13125.JPEG 8
359 | i/n02374451_6254.JPEG 8
360 | i/n02374451_4795.JPEG 8
361 | i/n02374451_1284.JPEG 8
362 | i/n02374451_16310.JPEG 8
363 | i/n02374451_6017.JPEG 8
364 | i/n02374451_13929.JPEG 8
365 | i/n02374451_5598.JPEG 8
366 | i/n02374451_5013.JPEG 8
367 | i/n02374451_16603.JPEG 8
368 | i/n02374451_15685.JPEG 8
369 | i/n02374451_14586.JPEG 8
370 | i/n02374451_17474.JPEG 8
371 | i/n02374451_816.JPEG 8
372 | i/n02374451_15098.JPEG 8
373 | i/n02374451_9897.JPEG 8
374 | i/n02374451_14233.JPEG 8
375 | i/n02374451_15663.JPEG 8
376 | i/n02374451_13953.JPEG 8
377 | i/n02374451_11173.JPEG 8
378 | i/n02374451_12024.JPEG 8
379 | i/n02374451_12086.JPEG 8
380 | i/n02374451_19198.JPEG 8
381 | i/n02374451_20103.JPEG 8
382 | i/n02374451_8301.JPEG 8
383 | i/n02374451_16785.JPEG 8
384 | i/n02374451_15633.JPEG 8
385 | i/n02374451_14329.JPEG 8
386 | i/n02374451_597.JPEG 8
387 | i/n02374451_820.JPEG 8
388 | i/n02374451_2944.JPEG 8
389 | i/n02374451_9264.JPEG 8
390 | i/n02374451_11057.JPEG 8
391 | i/n02374451_13159.JPEG 8
392 | i/n02374451_4240.JPEG 8
393 | i/n02374451_11483.JPEG 8
394 | i/n02374451_4479.JPEG 8
395 | i/n02374451_2846.JPEG 8
396 | i/n02374451_18419.JPEG 8
397 | i/n02374451_12376.JPEG 8
398 | i/n02374451_18141.JPEG 8
399 | i/n02374451_16449.JPEG 8
400 | i/n02374451_1062.JPEG 8
401 | i/n02834778_5790.JPEG 1
402 | i/n02834778_8639.JPEG 1
403 | i/n02834778_10647.JPEG 1
404 | i/n02834778_6733.JPEG 1
405 | i/n02834778_5206.JPEG 1
406 | i/n02834778_10288.JPEG 1
407 | i/n02834778_763.JPEG 1
408 | i/n02834778_1533.JPEG 1
409 | i/n02834778_3090.JPEG 1
410 | i/n02834778_8949.JPEG 1
411 | i/n02834778_5311.JPEG 1
412 | i/n02834778_6571.JPEG 1
413 | i/n02834778_12206.JPEG 1
414 | i/n02834778_2196.JPEG 1
415 | i/n02834778_4013.JPEG 1
416 | i/n02834778_5247.JPEG 1
417 | i/n02834778_8507.JPEG 1
418 | i/n02834778_9153.JPEG 1
419 | i/n02834778_43697.JPEG 1
420 | i/n02834778_5420.JPEG 1
421 | i/n02834778_11280.JPEG 1
422 | i/n02834778_716.JPEG 1
423 | i/n02834778_31006.JPEG 1
424 | i/n02834778_8044.JPEG 1
425 | i/n02834778_7161.JPEG 1
426 | i/n02834778_9223.JPEG 1
427 | i/n02834778_11400.JPEG 1
428 | i/n02834778_5314.JPEG 1
429 | i/n02834778_10781.JPEG 1
430 | i/n02834778_187.JPEG 1
431 | i/n02834778_3345.JPEG 1
432 | i/n02834778_10227.JPEG 1
433 | i/n02834778_817.JPEG 1
434 | i/n02834778_700.JPEG 1
435 | i/n02834778_46418.JPEG 1
436 | i/n02834778_6597.JPEG 1
437 | i/n02834778_4695.JPEG 1
438 | i/n02834778_3311.JPEG 1
439 | i/n02834778_8918.JPEG 1
440 | i/n02834778_3735.JPEG 1
441 | i/n02834778_3119.JPEG 1
442 | i/n02834778_32473.JPEG 1
443 | i/n02834778_2737.JPEG 1
444 | i/n02834778_673.JPEG 1
445 | i/n02834778_6375.JPEG 1
446 | i/n02834778_9232.JPEG 1
447 | i/n02834778_12188.JPEG 1
448 | i/n02834778_4302.JPEG 1
449 | i/n02834778_12386.JPEG 1
450 | i/n02834778_12754.JPEG 1
451 | i/n02876657_13023.JPEG 4
452 | i/n02876657_5177.JPEG 4
453 | i/n02876657_7612.JPEG 4
454 | i/n02876657_7965.JPEG 4
455 | i/n02876657_3827.JPEG 4
456 | i/n02876657_12118.JPEG 4
457 | i/n02876657_7993.JPEG 4
458 | i/n02876657_7201.JPEG 4
459 | i/n02876657_4709.JPEG 4
460 | i/n02876657_15101.JPEG 4
461 | i/n02876657_9504.JPEG 4
462 | i/n02876657_3720.JPEG 4
463 | i/n02876657_2631.JPEG 4
464 | i/n02876657_8629.JPEG 4
465 | i/n02876657_2245.JPEG 4
466 | i/n02876657_7018.JPEG 4
467 | i/n02876657_2856.JPEG 4
468 | i/n02876657_13959.JPEG 4
469 | i/n02876657_6293.JPEG 4
470 | i/n02876657_10725.JPEG 4
471 | i/n02876657_7886.JPEG 4
472 | i/n02876657_5324.JPEG 4
473 | i/n02876657_11443.JPEG 4
474 | i/n02876657_5481.JPEG 4
475 | i/n02876657_12301.JPEG 4
476 | i/n02876657_7718.JPEG 4
477 | i/n02876657_7244.JPEG 4
478 | i/n02876657_7535.JPEG 4
479 | i/n02876657_11695.JPEG 4
480 | i/n02876657_2625.JPEG 4
481 | i/n02876657_198.JPEG 4
482 | i/n02876657_668.JPEG 4
483 | i/n02876657_4138.JPEG 4
484 | i/n02876657_5822.JPEG 4
485 | i/n02876657_10178.JPEG 4
486 | i/n02876657_5094.JPEG 4
487 | i/n02876657_14142.JPEG 4
488 | i/n02876657_10103.JPEG 4
489 | i/n02876657_7576.JPEG 4
490 | i/n02876657_5276.JPEG 4
491 | i/n02876657_6246.JPEG 4
492 | i/n02876657_8209.JPEG 4
493 | i/n02876657_5612.JPEG 4
494 | i/n02876657_2191.JPEG 4
495 | i/n02876657_16228.JPEG 4
496 | i/n02876657_4991.JPEG 4
497 | i/n02876657_12397.JPEG 4
498 | i/n02876657_12206.JPEG 4
499 | i/n02876657_11898.JPEG 4
500 | i/n02876657_6550.JPEG 4
501 | i/n02958343_4976.JPEG 6
502 | i/n02958343_80662.JPEG 6
503 | i/n02958343_9147.JPEG 6
504 | i/n02958343_4414.JPEG 6
505 | i/n02958343_12811.JPEG 6
506 | i/n02958343_80339.JPEG 6
507 | i/n02958343_8796.JPEG 6
508 | i/n02958343_8490.JPEG 6
509 | i/n02958343_3541.JPEG 6
510 | i/n02958343_14006.JPEG 6
511 | i/n02958343_9672.JPEG 6
512 | i/n02958343_8811.JPEG 6
513 | i/n02958343_75694.JPEG 6
514 | i/n02958343_44103.JPEG 6
515 | i/n02958343_11718.JPEG 6
516 | i/n02958343_67754.JPEG 6
517 | i/n02958343_11586.JPEG 6
518 | i/n02958343_14713.JPEG 6
519 | i/n02958343_4478.JPEG 6
520 | i/n02958343_6654.JPEG 6
521 | i/n02958343_939.JPEG 6
522 | i/n02958343_4881.JPEG 6
523 | i/n02958343_13459.JPEG 6
524 | i/n02958343_10892.JPEG 6
525 | i/n02958343_5281.JPEG 6
526 | i/n02958343_3098.JPEG 6
527 | i/n02958343_13760.JPEG 6
528 | i/n02958343_10580.JPEG 6
529 | i/n02958343_9561.JPEG 6
530 | i/n02958343_52439.JPEG 6
531 | i/n02958343_8307.JPEG 6
532 | i/n02958343_50886.JPEG 6
533 | i/n02958343_9154.JPEG 6
534 | i/n02958343_13352.JPEG 6
535 | i/n02958343_4524.JPEG 6
536 | i/n02958343_5126.JPEG 6
537 | i/n02958343_9290.JPEG 6
538 | i/n02958343_4643.JPEG 6
539 | i/n02958343_9081.JPEG 6
540 | i/n02958343_8712.JPEG 6
541 | i/n02958343_61122.JPEG 6
542 | i/n02958343_12116.JPEG 6
543 | i/n02958343_55286.JPEG 6
544 | i/n02958343_65338.JPEG 6
545 | i/n02958343_14347.JPEG 6
546 | i/n02958343_8994.JPEG 6
547 | i/n02958343_77917.JPEG 6
548 | i/n02958343_11703.JPEG 6
549 | i/n02958343_2032.JPEG 6
550 | i/n02958343_11693.JPEG 6
551 | i/n03790512_722.JPEG 10
552 | i/n03790512_9326.JPEG 10
553 | i/n03790512_13080.JPEG 10
554 | i/n03790512_7537.JPEG 10
555 | i/n03790512_2370.JPEG 10
556 | i/n03790512_37212.JPEG 10
557 | i/n03790512_11812.JPEG 10
558 | i/n03790512_13431.JPEG 10
559 | i/n03790512_10965.JPEG 10
560 | i/n03790512_10260.JPEG 10
561 | i/n03790512_2068.JPEG 10
562 | i/n03790512_9022.JPEG 10
563 | i/n03790512_11545.JPEG 10
564 | i/n03790512_17139.JPEG 10
565 | i/n03790512_40609.JPEG 10
566 | i/n03790512_8982.JPEG 10
567 | i/n03790512_12426.JPEG 10
568 | i/n03790512_10023.JPEG 10
569 | i/n03790512_9187.JPEG 10
570 | i/n03790512_8654.JPEG 10
571 | i/n03790512_7308.JPEG 10
572 | i/n03790512_11298.JPEG 10
573 | i/n03790512_14972.JPEG 10
574 | i/n03790512_12946.JPEG 10
575 | i/n03790512_12093.JPEG 10
576 | i/n03790512_25915.JPEG 10
577 | i/n03790512_14967.JPEG 10
578 | i/n03790512_5081.JPEG 10
579 | i/n03790512_9328.JPEG 10
580 | i/n03790512_4460.JPEG 10
581 | i/n03790512_8100.JPEG 10
582 | i/n03790512_38917.JPEG 10
583 | i/n03790512_6136.JPEG 10
584 | i/n03790512_5722.JPEG 10
585 | i/n03790512_11613.JPEG 10
586 | i/n03790512_14709.JPEG 10
587 | i/n03790512_6062.JPEG 10
588 | i/n03790512_9794.JPEG 10
589 | i/n03790512_6410.JPEG 10
590 | i/n03790512_7851.JPEG 10
591 | i/n03790512_10451.JPEG 10
592 | i/n03790512_8955.JPEG 10
593 | i/n03790512_310.JPEG 10
594 | i/n03790512_7743.JPEG 10
595 | i/n03790512_6050.JPEG 10
596 | i/n03790512_10888.JPEG 10
597 | i/n03790512_5892.JPEG 10
598 | i/n03790512_248.JPEG 10
599 | i/n03790512_1579.JPEG 10
600 | i/n03790512_13338.JPEG 10
601 |
--------------------------------------------------------------------------------
/test_image.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 | import os.path as osp
4 |
5 | import numpy as np
6 | import torch
7 | import torch.nn as nn
8 | import torch.optim as optim
9 | import loss
10 | import pre_process as prep
11 | from torch.utils.data import DataLoader
12 | import lr_schedule
13 | import data_list
14 | from data_list import ImageList
15 | from torch.autograd import Variable
16 | import random
17 | import pdb
18 | import math
19 | from option import args
20 | from sklearn.metrics import fbeta_score, precision_score, recall_score, precision_recall_fscore_support
21 | import network
22 |
23 | if args.norm_type == 'dsbn':
24 | import resnetdsbn
25 | from tensorboardX import SummaryWriter
26 | import time
27 |
28 |
29 |
30 | def image_classification_test(loader, model, best_avg=0):
31 | start_test = True
32 | # start_time2 = 0
33 | end_time = 0
34 | end_time2 = 0
35 | times = []
36 | with torch.no_grad():
37 |
38 | iter_test = iter(loader["test"])
39 | for i in range(len(loader['test'])):
40 | data = iter_test.next()
41 | inputs = data[0]
42 | batch_size = inputs.shape[0]
43 | labels = data[1]
44 | inputs = inputs.cuda()
45 | labels = labels.cuda()
46 | domain_labels = torch.from_numpy(np.array([[0]] * batch_size)).long().cuda()
47 | if args.norm_type == 'dsbn':
48 | # start_time = time.time()
49 | features, outputs = model(inputs, domain_labels)
50 | # end_time += (time.time() - start_time)
51 | # times.append(time.time() - start_time)
52 |
53 | else:
54 | # start_time = time.time()
55 | features, outputs = model(inputs)
56 | # end_time += (time.time() - start_time)
57 | # times.append(time.time() - start_time)
58 | # _, outputs = model(inputs)
59 | if start_test:
60 | all_output = outputs.float()
61 | all_label = labels.float()
62 | start_test = False
63 | else:
64 | all_output = torch.cat((all_output, outputs.float()), 0)
65 | all_label = torch.cat((all_label, labels.float()), 0)
66 |
67 | _, predict = torch.max(all_output, 1)
68 |
69 | if args.dset != 'visda':
70 | accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])
71 |
72 | if args.dset == 'visda':
73 | # best_avg = 0
74 | best_per = 0
75 | gt = all_label.cpu().numpy()
76 | pred = predict.cpu().numpy()
77 | labels = np.unique(gt).tolist()
78 | macro_precision = precision_score(gt, pred, average='macro', labels=labels)
79 | prec, recall, f1, _ = precision_recall_fscore_support(gt, pred, average=None, labels=labels)
80 | prec_list = []
81 | precs = []
82 | for lab, p, r, f in zip(labels, prec, recall, f1):
83 | precs.append(p)
84 | p = '{:d}({:.4f})'.format(int(lab), p)
85 | prec_list.append(p)
86 |
87 | per_lab_p = 'per label precision: {}'.format(prec_list)
88 | avg_lab_p = 'avg label precision: {}'.format(np.mean(precs))
89 |
90 | cur_avg = np.mean(precs)
91 | if cur_avg > best_avg:
92 | best_avg = cur_avg
93 | best_per = per_lab_p
94 | best_avg_p = 'best vag label precision: {}'.format(best_avg)
95 |
96 | print(per_lab_p)
97 | print(avg_lab_p)
98 | print(best_avg_p)
99 | accuracy = 0
100 |
101 |
102 |
103 | return accuracy, best_avg
104 |
105 |
106 | def train(config):
107 | ## set pre-process
108 | logger = SummaryWriter()
109 |
110 | prep_dict = {}
111 | dsets = {}
112 | dset_loaders = {}
113 | data_config = config["data"]
114 | prep_config = config["prep"]
115 |
116 | prep_dict["test"] = prep.image_test(**config["prep"]['params'])
117 |
118 | ## prepare data = data_config["source"]["batch_size"]
119 | train_bs = data_config["source"]["batch_size"]
120 | test_bs = data_config["test"]["batch_size"]
121 |
122 |
123 | if "webcam" in data_config["source"]["list_path"] or "dslr" in data_config["source"]["list_path"]:
124 | prep_dict["source"] = prep.image_train31(**config["prep"]['params'])
125 | else:
126 | prep_dict["source"] = prep.image_train(**config["prep"]['params'])
127 |
128 | if "webcam" in data_config["target"]["list_path"] or "dslr" in data_config["target"]["list_path"]:
129 | prep_dict["target"] = prep.image_train31(**config["prep"]['params'])
130 | else:
131 | prep_dict["target"] = prep.image_train(**config["prep"]['params'])
132 |
133 |
134 | dsets["source"] = ImageList(open(data_config["source"]["list_path"]).readlines(), \
135 | transform=prep_dict["source"])
136 | dset_loaders["source"] = DataLoader(dsets["source"], batch_size=train_bs, \
137 | shuffle=True, num_workers=4, drop_last=True)
138 | dsets["target"] = ImageList(open(data_config["target"]["list_path"]).readlines(), \
139 | transform=prep_dict["target"])
140 | dset_loaders["target"] = DataLoader(dsets["target"], batch_size=train_bs, \
141 | shuffle=True, num_workers=4, drop_last=True)
142 |
143 | if prep_config["test_10crop"]:
144 | prep_dict["test"] = prep.image_test_10crop(**config["prep"]['params'])
145 | for i in range(10):
146 | dsets["test"] = [ImageList(open(data_config["test"]["list_path"]).readlines(), \
147 | transform=prep_dict["test"][i]) for i in range(10)]
148 | dset_loaders["test"] = [DataLoader(dset, batch_size=test_bs, \
149 | shuffle=False, num_workers=4) for dset in dsets['test']]
150 | else:
151 | dsets["test"] = ImageList(open(data_config["test"]["list_path"]).readlines(), \
152 | transform=prep_dict["test"])
153 | dset_loaders["test"] = DataLoader(dsets["test"], batch_size=test_bs, \
154 | shuffle=False, num_workers=4)
155 |
156 | class_num = config["network"]["params"]["class_num"]
157 |
158 | ## set base network
159 |
160 | net_config = config["network"]
161 | base_network = net_config["name"](**net_config["params"])
162 | base_network = base_network.cuda()
163 | ## add additional network for some CDANs
164 | if config["loss"]["random"]:
165 | random_layer = network.RandomLayer([base_network.output_num(), class_num], config["loss"]["random_dim"])
166 | ad_net = network.AdversarialNetwork(config["loss"]["random_dim"], 1024)
167 | else:
168 | random_layer = None
169 | if "DANN" in args.CDAN:
170 | print('DANN')
171 | ad_net = network.AdversarialNetwork(base_network.output_num(), 512)
172 | else:
173 | ad_net = network.AdversarialNetwork(base_network.output_num() * class_num, 1024)
174 | if config["loss"]["random"]:
175 | random_layer.cuda()
176 | ad_net = ad_net.cuda()
177 |
178 | parameter_list = base_network.get_parameters() + ad_net.get_parameters()
179 | # p = base_network.feature_layers
180 | # my_p = [v for k,v in p.named_parameters() if 'my' in k]
181 | if args.norm_type == 'rn':
182 | p = base_network.get_parameters()
183 | my_p = [v for k, v in base_network.named_parameters() if 'my' in k]
184 | else:
185 | my_p = None
186 |
187 | ## set optimizer
188 | optimizer_config = config["optimizer"]
189 | optimizer = optimizer_config["type"](parameter_list, \
190 | **(optimizer_config["optim_params"]))
191 | param_lr = []
192 | for param_group in optimizer.param_groups:
193 | param_lr.append(param_group["lr"])
194 | schedule_param = optimizer_config["lr_param"]
195 | lr_scheduler = lr_schedule.schedule_dict[optimizer_config["lr_type"]]
196 |
197 | gpus = config['gpu'].split(',')
198 | if len(gpus) > 1:
199 | ad_net = nn.DataParallel(ad_net, device_ids=[int(i) for i in gpus])
200 | base_network = nn.DataParallel(base_network, device_ids=[int(i) for i in gpus])
201 |
202 | ## train
203 | len_train_source = len(dset_loaders["source"])
204 | len_train_target = len(dset_loaders["target"])
205 | transfer_loss_value = classifier_loss_value = total_loss_value = 0.0
206 | best_acc = 0.0
207 | best_avg_visda = 0.0
208 |
209 |
210 |
211 | root = r'./visda/RN/train2val/'
212 | path = r'best_model_rn_visda_train2val_resnet50_rn.pth.tar'
213 | #path = r'best_model_rn_visda_train2val_200911_rn.pth.tar'
214 | print(path)
215 | base_network = torch.load(osp.join(root, path))
216 | print(base_network)
217 | base_network.train(False)
218 | temp_acc, best_avg = image_classification_test(dset_loaders, \
219 | base_network, best_avg=best_avg_visda)
220 |
221 | print('acc:', temp_acc)
222 | # log_str = "iter: {:05d}, precision: {:.5f}, bset_acc:{:.5f}".format(i, temp_acc, best_acc)
223 | # print(log_str)
224 |
225 | return temp_acc
226 |
227 |
228 | if __name__ == "__main__":
229 | os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_id
230 | # train config
231 | config = {}
232 | config['CDAN'] = args.CDAN
233 | config['method'] = args.method
234 | config["gpu"] = args.gpu_id
235 | config["num_iterations"] = args.num_iterations
236 | config["print_num"] = args.print_num
237 | config["test_interval"] = args.test_interval
238 | config["snapshot_interval"] = args.snapshot_interval
239 | config["output_for_test"] = True
240 | config["show"] = args.show
241 | config["output_path"] = args.dset + '/' + args.output_dir
242 | config["run_num"] = args.run_num
243 | config["record_file"] = "record/%s/" % args.method + '%s_net_%s_%s_to_%s_num_%s.txt' % (
244 | args.method, args.net, args.source, args.target, args.run_num)
245 | config["record_file_loss"] = "record/%s/" % args.method + '%s_net_%s_%s_to_%s_num_%s_loss.txt' % (
246 | args.method, args.net, args.source, args.target, args.run_num)
247 |
248 |
249 | if not osp.exists(config["output_path"]):
250 | os.system('mkdir -p ' + config["output_path"])
251 | config["out_file"] = open(osp.join(config["output_path"], "log.txt"), "w")
252 | if not osp.exists(config["output_path"]):
253 | os.mkdir(config["output_path"])
254 |
255 | config["prep"] = {"test_10crop": False, 'params': {"resize_size": 256, "crop_size": 224, 'alexnet': False}}
256 | config["loss"] = {"trade_off": args.trade_off, "lambda_method": args.lambda_method}
257 | if "AlexNet" in args.net:
258 | config["prep"]['params']['alexnet'] = True
259 | config["prep"]['params']['crop_size'] = 227
260 | config["network"] = {"name": network.AlexNetFc, \
261 | "params": {"use_bottleneck": True, "bottleneck_dim": 256, "new_cls": True}}
262 | elif "ResNet" in args.net:
263 | # exit(0)
264 | if args.norm_type == 'dsbn':
265 | config["network"] = {"name": resnetdsbn.resnet50dsbn, \
266 | "params": {"use_bottleneck": True, "bottleneck_dim": args.bottle_dim, "new_cls": True}}
267 | else:
268 | config["network"] = {"name": network.ResNetFc, \
269 | "params": {"resnet_name": args.net, "use_bottleneck": True,
270 | "bottleneck_dim": args.bottle_dim, "new_cls": True}}
271 | elif "VGG" in args.net:
272 | config["network"] = {"name": network.VGGFc, \
273 | "params": {"vgg_name": args.net, "use_bottleneck": True, "bottleneck_dim": 256,
274 | "new_cls": True}}
275 | elif "DANN" in args.net or "DANN" in args.CDAN:
276 | config["network"] = {"name": network.ResNetFc, \
277 | "params": {"resnet_name": args.net, "use_bottleneck": True, "bottleneck_dim": 256,
278 | "new_cls": True}}
279 | config["loss"]["random"] = args.random
280 | config["loss"]["random_dim"] = 1024
281 |
282 | config["optimizer"] = {"type": optim.SGD, "optim_params": {'lr': args.lr, "momentum": 0.9, \
283 | "weight_decay": 0.0005, "nesterov": True},
284 | "lr_type": "inv", \
285 | "lr_param": {"lr": args.lr, "gamma": 0.001, "power": 0.75}}
286 |
287 | if args.dset == 'office-home':
288 | art_txt = "./data/office-home/Art.txt"
289 | clipart_txt = "./data/office-home/Clipart.txt"
290 | realworld_txt = "./data/office-home/Real_World.txt"
291 | product_txt = "./data/office-home/Product.txt"
292 | if args.source == 'R' : s_dset_path = realworld_txt
293 | elif args.source == 'C': s_dset_path = clipart_txt
294 | elif args.source == 'A': s_dset_path = art_txt
295 | elif args.source == 'P': s_dset_path = product_txt
296 | if args.target == 'R' : t_dset_path = realworld_txt
297 | elif args.target == 'C': t_dset_path = clipart_txt
298 | elif args.target == 'A': t_dset_path = art_txt
299 | elif args.target == 'P': t_dset_path = product_txt
300 |
301 | elif args.dset == 'office31':
302 | amazon_txt = "./data/office/amazon_list.txt"
303 | dslr_txt = "./data/office/dslr_list.txt"
304 | webcam_txt = "./data/office/webcam_list.txt"
305 | if args.source == 'A': s_dset_path = amazon_txt
306 | elif args.source == 'D': s_dset_path = dslr_txt
307 | elif args.source == 'W': s_dset_path = webcam_txt
308 | if args.target == 'A': t_dset_path = amazon_txt
309 | elif args.target == 'D': t_dset_path = dslr_txt
310 | elif args.target == 'W': t_dset_path = webcam_txt
311 | elif args.dset == 'image-clef':
312 | p_txt = r'./data/image-Clef/pList.txt'
313 | i_txt = r'./data/image-Clef/iList.txt'
314 | c_txt = r'./data/image-Clef/cList.txt'
315 | if args.source == 'I': s_dset_path = i_txt
316 | elif args.source == 'C': s_dset_path = c_txt
317 | elif args.source == 'P': s_dset_path = p_txt
318 | if args.target == 'I': t_dset_path = i_txt
319 | elif args.target == 'C': t_dset_path = c_txt
320 | elif args.target == 'P': t_dset_path = p_txt
321 | print(s_dset_path, t_dset_path)
322 | elif args.dset == 'visda':
323 | s_dset_path = r'./data/visda/train_list.txt'
324 | t_dset_path = r'./data/visda/validation_list.txt'
325 | else:
326 | s_dset_path = args.s_dset_path
327 | t_dset_path =args.t_dset_path
328 | config["dataset"] = args.dset
329 | config["data"] = {"source": {"list_path": s_dset_path, "batch_size": args.batch_size}, \
330 | "target": {"list_path": t_dset_path, "batch_size": args.batch_size}, \
331 | "test": {"list_path": t_dset_path, "batch_size": args.batch_size}}
332 |
333 | if config["dataset"] == "office31":
334 | if ("webcam" in args.s_dset_path and "dslr" in args.t_dset_path) or \
335 | ("webcam" in args.s_dset_path and "amazon" in args.t_dset_path) or \
336 | ("dslr" in args.s_dset_path and "amazon" in args.t_dset_path):
337 | config["optimizer"]["lr_param"]["lr"] = 0.001 # optimal parameters
338 | elif ("amazon" in args.s_dset_path and "dslr" in args.t_dset_path) or \
339 | ("amazon" in args.s_dset_path and "webcam" in args.t_dset_path) or \
340 | ("dslr" in args.s_dset_path and "webcam" in args.t_dset_path):
341 | config["optimizer"]["lr_param"]["lr"] = 0.0003 # optimal parameters
342 | config["network"]["params"]["class_num"] = 31
343 | elif config["dataset"] == "image-clef":
344 | config["optimizer"]["lr_param"]["lr"] = 0.001 # optimal parameters
345 | config["network"]["params"]["class_num"] = 12
346 | elif config["dataset"] == "visda":
347 | config["optimizer"]["lr_param"]["lr"] = 0.0003 # optimal parameters
348 | config["network"]["params"]["class_num"] = 12
349 | config['loss']["trade_off"] = 1.0
350 | elif config["dataset"] == "office-home":
351 | config["optimizer"]["lr_param"]["lr"] = 0.001 # optimal parameters
352 | config["network"]["params"]["class_num"] = 65
353 | else:
354 | raise ValueError('Dataset cannot be recognized. Please define your own dataset here.')
355 |
356 | if args.seed is not None:
357 | seed = args.seed
358 |
359 | else:
360 | seed = random.randint(1, 10000)
361 | print(seed)
362 | torch.manual_seed(seed)
363 | torch.cuda.manual_seed(seed)
364 | torch.cuda.manual_seed_all(seed)
365 | np.random.seed(seed)
366 | random.seed(seed)
367 | os.environ['PYTHONHASHSEED'] = str(seed)
368 |
369 | # uncommenting the following two lines for reproducing
370 | # torch.backends.cudnn.deterministic = True
371 | # torch.backends.cudnn.benchmark = False
372 | config["out_file"].write(str(config))
373 | config["out_file"].flush()
374 | train(config)
375 |
--------------------------------------------------------------------------------
/network.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import torch
3 | import torch.nn as nn
4 | import torchvision
5 | from torchvision import models
6 | from torch.autograd import Variable
7 | import math
8 | import pdb
9 | from option import args
10 | import resnet_rn
11 | from reciprocal_norm import RN1d, RN2d
12 |
13 | def calc_coeff(iter_num, high=1.0, low=0.0, alpha=10.0, max_iter=10000.0):
14 | return np.float(2.0 * (high - low) / (1.0 + np.exp(-alpha*iter_num / max_iter)) - (high - low) + low)
15 |
16 | def init_weights(m):
17 | classname = m.__class__.__name__
18 | if classname.find('Conv2d') != -1 or classname.find('ConvTranspose2d') != -1:
19 | nn.init.kaiming_uniform_(m.weight)
20 | nn.init.zeros_(m.bias)
21 | elif classname.find('BatchNorm') != -1:
22 | nn.init.normal_(m.weight, 1.0, 0.02)
23 | nn.init.zeros_(m.bias)
24 | elif classname.find('Linear') != -1:
25 | nn.init.xavier_normal_(m.weight)
26 | nn.init.zeros_(m.bias)
27 |
28 | def zero_weights(m):
29 | classname = m.__class__.__name__
30 | if classname.find('Conv2d') != -1 or classname.find('ConvTranspose2d') != -1:
31 | nn.init.kaiming_uniform_(m.weight)
32 | nn.init.zeros_(m.bias)
33 | elif classname.find('BatchNorm') != -1:
34 | nn.init.normal_(m.weight, 1.0, 0.02)
35 | nn.init.zeros_(m.bias)
36 | elif classname.find('Linear') != -1:
37 | nn.init.zeros_(m.weight)
38 | nn.init.zeros_(m.bias)
39 |
40 | class RandomLayer(nn.Module):
41 | def __init__(self, input_dim_list=[], output_dim=1024):
42 | super(RandomLayer, self).__init__()
43 | self.input_num = len(input_dim_list)
44 | self.output_dim = output_dim
45 | self.random_matrix = [torch.randn(input_dim_list[i], output_dim) for i in range(self.input_num)]
46 |
47 | def forward(self, input_list):
48 | return_list = [torch.mm(input_list[i], self.random_matrix[i]) for i in range(self.input_num)]
49 | return_tensor = return_list[0] / math.pow(float(self.output_dim), 1.0/len(return_list))
50 | for single in return_list[1:]:
51 | return_tensor = torch.mul(return_tensor, single)
52 | return return_tensor
53 |
54 | def cuda(self):
55 | super(RandomLayer, self).cuda()
56 | self.random_matrix = [val.cuda() for val in self.random_matrix]
57 |
58 | class LRN(nn.Module):
59 | def __init__(self, local_size=1, alpha=1.0, beta=0.75, ACROSS_CHANNELS=True):
60 | super(LRN, self).__init__()
61 | self.ACROSS_CHANNELS = ACROSS_CHANNELS
62 | if ACROSS_CHANNELS:
63 | self.average=nn.AvgPool3d(kernel_size=(local_size, 1, 1),
64 | stride=1,
65 | padding=(int((local_size-1.0)/2), 0, 0))
66 | else:
67 | self.average=nn.AvgPool2d(kernel_size=local_size,
68 | stride=1,
69 | padding=int((local_size-1.0)/2))
70 | self.alpha = alpha
71 | self.beta = beta
72 |
73 |
74 | def forward(self, x):
75 | if self.ACROSS_CHANNELS:
76 | div = x.pow(2).unsqueeze(1)
77 | div = self.average(div).squeeze(1)
78 | div = div.mul(self.alpha).add(1.0).pow(self.beta)
79 | else:
80 | div = x.pow(2)
81 | div = self.average(div)
82 | div = div.mul(self.alpha).add(1.0).pow(self.beta)
83 | x = x.div(div)
84 | return x
85 |
86 | class AlexNet(nn.Module):
87 |
88 | def __init__(self, num_classes=1000):
89 | super(AlexNet, self).__init__()
90 | self.features = nn.Sequential(
91 | nn.Conv2d(3, 96, kernel_size=11, stride=4, padding=0),
92 | nn.ReLU(inplace=True),
93 | LRN(local_size=5, alpha=0.0001, beta=0.75),
94 | nn.MaxPool2d(kernel_size=3, stride=2),
95 | nn.Conv2d(96, 256, kernel_size=5, padding=2, groups=2),
96 | nn.ReLU(inplace=True),
97 | LRN(local_size=5, alpha=0.0001, beta=0.75),
98 | nn.MaxPool2d(kernel_size=3, stride=2),
99 | nn.Conv2d(256, 384, kernel_size=3, padding=1),
100 | nn.ReLU(inplace=True),
101 | nn.Conv2d(384, 384, kernel_size=3, padding=1, groups=2),
102 | nn.ReLU(inplace=True),
103 | nn.Conv2d(384, 256, kernel_size=3, padding=1, groups=2),
104 | nn.ReLU(inplace=True),
105 | nn.MaxPool2d(kernel_size=3, stride=2),
106 | )
107 | self.classifier = nn.Sequential(
108 | nn.Linear(256 * 6 * 6, 4096),
109 | nn.ReLU(inplace=True),
110 | nn.Dropout(),
111 | nn.Linear(4096, 4096),
112 | nn.ReLU(inplace=True),
113 | nn.Dropout(),
114 | nn.Linear(4096, num_classes),
115 | )
116 |
117 | def forward(self, x):
118 | x = self.features(x)
119 | print(x.size())
120 | x = x.view(x.size(0), 256 * 6 * 6)
121 | x = self.classifier(x)
122 | return x
123 |
124 |
125 | def alexnet(pretrained=False, **kwargs):
126 | r"""AlexNet model architecture from the
127 | `"One weird trick..." `_ paper.
128 | Args:
129 | pretrained (bool): If True, returns a model pre-trained on ImageNet
130 | """
131 | model = AlexNet(**kwargs)
132 | if pretrained:
133 | model_path = './alexnet.pth.tar'
134 | pretrained_model = torch.load(model_path)
135 | model.load_state_dict(pretrained_model['state_dict'])
136 | return model
137 |
138 | # convnet without the last layer
139 | class AlexNetFc(nn.Module):
140 | def __init__(self, use_bottleneck=True, bottleneck_dim=256, new_cls=False, class_num=1000):
141 | super(AlexNetFc, self).__init__()
142 | model_alexnet = alexnet(pretrained=True)
143 | self.features = model_alexnet.features
144 | self.classifier = nn.Sequential()
145 | for i in range(6):
146 | self.classifier.add_module("classifier"+str(i), model_alexnet.classifier[i])
147 | self.feature_layers = nn.Sequential(self.features, self.classifier)
148 |
149 | self.use_bottleneck = use_bottleneck
150 | self.new_cls = new_cls
151 | if new_cls:
152 | if self.use_bottleneck:
153 | self.bottleneck = nn.Linear(4096, bottleneck_dim)
154 | self.fc = nn.Linear(bottleneck_dim, class_num)
155 | self.bottleneck.apply(init_weights)
156 | self.fc.apply(init_weights)
157 | self.__in_features = bottleneck_dim
158 | else:
159 | self.fc = nn.Linear(4096, class_num)
160 | self.fc.apply(init_weights)
161 | self.__in_features = 4096
162 | else:
163 | self.fc = model_alexnet.classifier[6]
164 | self.__in_features = 4096
165 |
166 | def forward(self, x):
167 | x = self.features(x)
168 | x = x.view(x.size(0), -1)
169 | x = self.classifier(x)
170 | if self.use_bottleneck and self.new_cls:
171 | x = self.bottleneck(x)
172 | y = self.fc(x)
173 | return x, y
174 |
175 | def output_num(self):
176 | return self.__in_features
177 |
178 | def get_parameters(self):
179 | if self.new_cls:
180 | if self.use_bottleneck:
181 | parameter_list = [{"params":self.features.parameters(), "lr_mult":1, 'decay_mult':2}, \
182 | {"params":self.classifier.parameters(), "lr_mult":1, 'decay_mult':2}, \
183 | {"params":self.bottleneck.parameters(), "lr_mult":10, 'decay_mult':2}, \
184 | {"params":self.fc.parameters(), "lr_mult":10, 'decay_mult':2}]
185 | else:
186 | parameter_list = [{"params":self.feature_layers.parameters(), "lr_mult":1, 'decay_mult':2}, \
187 | {"params":self.classifier.parameters(), "lr_mult":1, 'decay_mult':2}, \
188 | {"params":self.fc.parameters(), "lr_mult":10, 'decay_mult':2}]
189 | else:
190 | parameter_list = [{"params":self.parameters(), "lr_mult":1, 'decay_mult':2}]
191 | return parameter_list
192 |
193 |
194 | resnet_rn_dict = {"ResNet18": resnet_rn.resnet18, "ResNet34": resnet_rn.resnet34, "ResNet50": resnet_rn.resnet50,
195 | "ResNet101": resnet_rn.resnet101, "ResNet152": resnet_rn.resnet152}
196 |
197 | def grl_hook(coeff):
198 | def fun1(grad):
199 | return -coeff*grad.clone()
200 | return fun1
201 |
202 | class ResNetFc(nn.Module):
203 | def __init__(self, resnet_name, use_bottleneck=True, bottleneck_dim=256, new_cls=False, class_num=1000):
204 | super(ResNetFc, self).__init__()
205 |
206 | model_resnet = resnet_rn_dict[resnet_name](pretrained=True)
207 | self.conv1 = model_resnet.conv1
208 | self.bn1 = model_resnet.bn1
209 | self.relu = model_resnet.relu
210 | self.maxpool = model_resnet.maxpool
211 | self.layer1 = model_resnet.layer1
212 | self.layer2 = model_resnet.layer2
213 | self.layer3 = model_resnet.layer3
214 | self.layer4 = model_resnet.layer4
215 | self.avgpool = model_resnet.avgpool
216 | self.feature_layers = nn.Sequential(self.conv1, self.bn1, self.relu, self.maxpool, \
217 | self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool)
218 |
219 |
220 | self.use_bottleneck = use_bottleneck
221 | self.new_cls = new_cls
222 | if new_cls:
223 | if self.use_bottleneck:
224 | if args.dset == 'visda' and args.net == 'ResNet50':
225 | self.bottleneck = nn.Sequential(nn.Linear(model_resnet.fc.in_features, bottleneck_dim))
226 | else:
227 | self.bottleneck = nn.Sequential(nn.Linear(model_resnet.fc.in_features, bottleneck_dim),
228 | RN1d(bottleneck_dim),
229 | nn.ReLU(inplace=True))
230 | self.fc = nn.Linear(bottleneck_dim, class_num)
231 | self.bottleneck.apply(init_weights)
232 | self.fc.apply(init_weights)
233 | self.__in_features = bottleneck_dim
234 | else:
235 | self.fc = nn.Linear(model_resnet.fc.in_features, class_num)
236 | self.fc.apply(init_weights)
237 | self.__in_features = model_resnet.fc.in_features
238 | else:
239 | self.fc = model_resnet.fc
240 | self.__in_features = model_resnet.fc.in_features
241 |
242 | def forward(self, x):
243 | x = self.feature_layers(x)
244 | x = x.view(x.size(0), -1)
245 | if self.use_bottleneck and self.new_cls:
246 | x = self.bottleneck(x)
247 | y = self.fc(x)
248 | return x, y
249 | def output_num(self):
250 | return self.__in_features
251 |
252 | def get_parameters(self):
253 | if self.new_cls:
254 | if self.use_bottleneck:
255 | if args.norm_type == 'rn':
256 | my_weight = [v for k, v in self.feature_layers.named_parameters() if 'my' in k]
257 | features = [v for k, v in self.feature_layers.named_parameters() if not 'my' in k ]
258 | parameter_list = [{"params": features, "lr_mult": 1, 'decay_mult': 2}, \
259 | {"params": my_weight, "lr_mult": args.lr_mult, 'decay_mult': 2}, \
260 | {"params": self.bottleneck.parameters(), "lr_mult": 10, 'decay_mult': 2}, \
261 | {"params": self.fc.parameters(), "lr_mult": 10, 'decay_mult': 2}]
262 |
263 | else:
264 | parameter_list = [{"params": self.feature_layers.parameters(), "lr_mult": 1, 'decay_mult': 2}, \
265 | {"params": self.bottleneck.parameters(), "lr_mult": 10, 'decay_mult': 2}, \
266 | {"params": self.fc.parameters(), "lr_mult": 10, 'decay_mult': 2}]
267 | else:
268 | if args.norm_type == 'rn':
269 | my_weight = [v for k, v in self.feature_layers.named_parameters() if 'my' in k]
270 | features = [v for k, v in self.feature_layers.named_parameters() if not 'my' in k]
271 | parameter_list = [{"params": features, "lr_mult": 1, 'decay_mult': 2}, \
272 | {"params": my_weight, "lr_mult": args.lr_mult, 'decay_mult': 2}, \
273 | {"params": self.fc.parameters(), "lr_mult": 10, 'decay_mult': 2}]
274 | else:
275 | parameter_list = [{"params":self.feature_layers.parameters(), "lr_mult":1, 'decay_mult':2}, \
276 | {"params":self.fc.parameters(), "lr_mult":10, 'decay_mult':2}]
277 | else:
278 | parameter_list = [{"params":self.parameters(), "lr_mult":1, 'decay_mult':2}]
279 | return parameter_list
280 |
281 | vgg_dict = {"VGG11":models.vgg11, "VGG13":models.vgg13, "VGG16":models.vgg16, "VGG19":models.vgg19, "VGG11BN":models.vgg11_bn, "VGG13BN":models.vgg13_bn, "VGG16BN":models.vgg16_bn, "VGG19BN":models.vgg19_bn}
282 | class VGGFc(nn.Module):
283 | def __init__(self, vgg_name, use_bottleneck=True, bottleneck_dim=256, new_cls=False, class_num=1000):
284 | super(VGGFc, self).__init__()
285 | model_vgg = vgg_dict[vgg_name](pretrained=True)
286 | self.features = model_vgg.features
287 | self.classifier = nn.Sequential()
288 | for i in range(6):
289 | self.classifier.add_module("classifier"+str(i), model_vgg.classifier[i])
290 | self.feature_layers = nn.Sequential(self.features, self.classifier)
291 |
292 | self.use_bottleneck = use_bottleneck
293 | self.new_cls = new_cls
294 | if new_cls:
295 | if self.use_bottleneck:
296 | self.bottleneck = nn.Linear(4096, bottleneck_dim)
297 | self.fc = nn.Linear(bottleneck_dim, class_num)
298 | self.bridge = nn.Linear(bottleneck_dim, class_num)
299 | self.bottleneck.apply(init_weights)
300 | self.fc.apply(init_weights)
301 | self.bridge.apply(init_weights)
302 | self.__in_features = bottleneck_dim
303 | else:
304 | self.fc = nn.Linear(4096, class_num)
305 | self.fc.apply(init_weights)
306 | self.__in_features = 4096
307 | else:
308 | self.fc = model_vgg.classifier[6]
309 | self.__in_features = 4096
310 |
311 | def forward(self, x):
312 | x = self.features(x)
313 | x = x.view(x.size(0), -1)
314 | x = self.classifier(x)
315 | if self.use_bottleneck and self.new_cls:
316 | x = self.bottleneck(x)
317 | y = self.fc(x)
318 | z = self.bridge(x)
319 | return x, y, z
320 |
321 | def output_num(self):
322 | return self.__in_features
323 |
324 | def get_parameters(self):
325 | if self.new_cls:
326 | if self.use_bottleneck:
327 | parameter_list = [{"params":self.features.parameters(), "lr_mult":1, 'decay_mult':2}, \
328 | {"params":self.classifier.parameters(), "lr_mult":1, 'decay_mult':2}, \
329 | {"params":self.bottleneck.parameters(), "lr_mult":10, 'decay_mult':2}, \
330 | {"params":self.fc.parameters(), "lr_mult":10, 'decay_mult':2}, \
331 | {"params":self.bridge.parameters(), "lr_mult":10, 'decay_mult':2}]
332 | else:
333 | parameter_list = [{"params":self.feature_layers.parameters(), "lr_mult":1, 'decay_mult':2}, \
334 | {"params":self.classifier.parameters(), "lr_mult":1, 'decay_mult':2}, \
335 | {"params":self.fc.parameters(), "lr_mult":10, 'decay_mult':2}]
336 | else:
337 | parameter_list = [{"params":self.parameters(), "lr_mult":1, 'decay_mult':2}]
338 | return parameter_list
339 |
340 | # For SVHN dataset
341 | class DTN(nn.Module):
342 | def __init__(self):
343 | super(DTN, self).__init__()
344 | self.conv_params = nn.Sequential (
345 | nn.Conv2d(3, 64, kernel_size=5, stride=2, padding=2),
346 | nn.BatchNorm2d(64),
347 | nn.Dropout2d(0.1),
348 | nn.ReLU(),
349 | nn.Conv2d(64, 128, kernel_size=5, stride=2, padding=2),
350 | nn.BatchNorm2d(128),
351 | nn.Dropout2d(0.3),
352 | nn.ReLU(),
353 | nn.Conv2d(128, 256, kernel_size=5, stride=2, padding=2),
354 | nn.BatchNorm2d(256),
355 | nn.Dropout2d(0.5),
356 | nn.ReLU()
357 | )
358 |
359 | self.fc_params = nn.Sequential (
360 | nn.Linear(256*4*4, 512),
361 | nn.BatchNorm1d(512),
362 | nn.ReLU(),
363 | nn.Dropout()
364 | )
365 |
366 | self.classifier = nn.Linear(512, 10)
367 | self.__in_features = 512
368 |
369 | def forward(self, x):
370 | x = self.conv_params(x)
371 | x = x.view(x.size(0), -1)
372 | x = self.fc_params(x)
373 | y = self.classifier(x)
374 | return x, y
375 |
376 | def output_num(self):
377 | return self.__in_features
378 |
379 | class LeNet(nn.Module):
380 | def __init__(self):
381 | super(LeNet, self).__init__()
382 | self.conv_params = nn.Sequential(
383 | nn.Conv2d(1, 20, kernel_size=5),
384 | nn.MaxPool2d(2),
385 | nn.ReLU(),
386 | nn.Conv2d(20, 50, kernel_size=5),
387 | nn.Dropout2d(p=0.5),
388 | nn.MaxPool2d(2),
389 | nn.ReLU(),
390 | )
391 |
392 | self.fc_params = nn.Sequential(nn.Linear(50*4*4, 500), nn.ReLU(), nn.Dropout(p=0.5))
393 | self.classifier = nn.Linear(500, 10)
394 | self.__in_features = 500
395 |
396 |
397 | def forward(self, x):
398 | x = self.conv_params(x)
399 | x = x.view(x.size(0), -1)
400 | x = self.fc_params(x)
401 | y = self.classifier(x)
402 | return x, y
403 |
404 | def output_num(self):
405 | return self.__in_features
406 |
407 | class AdversarialNetwork(nn.Module):
408 | def __init__(self, in_feature, hidden_size):
409 | super(AdversarialNetwork, self).__init__()
410 | self.ad_layer1 = nn.Linear(in_feature, hidden_size)
411 | self.ad_layer2 = nn.Linear(hidden_size, hidden_size)
412 | self.ad_layer3 = nn.Linear(hidden_size, 1)
413 | self.relu1 = nn.ReLU()
414 | self.relu2 = nn.ReLU()
415 | self.dropout1 = nn.Dropout(0.5)
416 | self.dropout2 = nn.Dropout(0.5)
417 | self.sigmoid = nn.Sigmoid()
418 | self.apply(init_weights)
419 | self.iter_num = 0
420 | self.alpha = 10
421 | self.low = 0.0
422 | self.high = 1.0
423 | self.max_iter = 10000.0
424 |
425 | def forward(self, x):
426 | if self.training:
427 | self.iter_num += 1
428 | coeff = calc_coeff(self.iter_num, self.high, self.low, self.alpha, self.max_iter)
429 | x = x * 1.0
430 | if self.train and x.requires_grad:
431 | x.register_hook(grl_hook(coeff))
432 | x = self.ad_layer1(x)
433 | x = self.relu1(x)
434 | x = self.dropout1(x)
435 | y = self.ad_layer2(x)
436 | y = self.relu2(y)
437 | y = self.dropout2(y)
438 | y = self.ad_layer3(y)
439 | return y
440 |
441 | def output_num(self):
442 | return 1
443 | def get_parameters(self):
444 | return [{"params":self.parameters(), "lr_mult":10, 'decay_mult':2}]
445 |
446 |
447 | class Domain_Classifier(nn.Module):
448 | def __init__(self, in_feature):
449 | super(Domain_Classifier, self).__init__()
450 | self.domain_classifer = nn.Linear(in_feature, 2)
451 |
452 | def forward(self, x):
453 | out = self.domain_classifer(x)
454 | return out
455 |
456 | def get_parameters(self):
457 | return [{"params": self.parameters(), "lr_mult": 10, 'decay_mult': 2}]
458 |
--------------------------------------------------------------------------------
/train_image.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 | import os.path as osp
4 |
5 | import numpy as np
6 | import torch
7 | import torch.nn as nn
8 | import torch.optim as optim
9 | import loss
10 | import pre_process as prep
11 | from torch.utils.data import DataLoader
12 | import lr_schedule
13 | import data_list
14 | from data_list import ImageList
15 | from torch.autograd import Variable
16 | import random
17 | import pdb
18 | import math
19 | from option import args
20 | from sklearn.metrics import fbeta_score, precision_score, recall_score, precision_recall_fscore_support
21 | import network
22 | if args.norm_type == 'dsbn':
23 | import resnetdsbn
24 | from tensorboardX import SummaryWriter
25 |
26 | def image_classification_test(loader, model, best_avg=0, test_10crop=True):
27 | start_test = True
28 | with torch.no_grad():
29 | iter_test = iter(loader["test"])
30 | for i in range(len(loader['test'])):
31 | data = iter_test.next()
32 | inputs = data[0]
33 | batch_size = inputs.shape[0]
34 | labels = data[1]
35 | inputs = inputs.cuda()
36 | labels = labels.cuda()
37 | domain_labels = torch.from_numpy(np.array([[0]] * batch_size)).long().cuda()
38 | if args.norm_type == 'dsbn':
39 | #start_time = time.time()
40 | features, outputs = model(inputs, domain_labels)
41 | #end_time += (time.time() - start_time)
42 | else:
43 | #start_time = time.time()
44 | features, outputs = model(inputs)
45 | #end_time += (time.time() - start_time)
46 | # _, outputs = model(inputs)
47 | if start_test:
48 | all_output = outputs.float()
49 | all_label = labels.float()
50 | start_test = False
51 | else:
52 | all_output = torch.cat((all_output, outputs.float()), 0)
53 | all_label = torch.cat((all_label, labels.float()), 0)
54 |
55 | _, predict = torch.max(all_output, 1)
56 | accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])
57 | if args.dset == 'visda':
58 | # best_avg = 0
59 | best_per = 0
60 | gt = all_label.cpu().numpy()
61 | pred = predict.cpu().numpy()
62 | labels = np.unique(gt).tolist()
63 | macro_precision = precision_score(gt, pred, average='macro', labels=labels)
64 | prec, recall, f1, _ = precision_recall_fscore_support(gt, pred, average=None, labels=labels)
65 | prec_list = []
66 | precs = []
67 | for lab, p, r, f in zip(labels, prec, recall, f1):
68 | precs.append(p)
69 | p = '{:d}({:.4f})'.format(int(lab), p)
70 | prec_list.append(p)
71 |
72 | per_lab_p = 'per label precision: {}'.format(prec_list)
73 | avg_lab_p = 'avg label precision: {}'.format(np.mean(precs))
74 |
75 | cur_avg = np.mean(precs)
76 | if cur_avg > best_avg:
77 | best_avg = cur_avg
78 | best_per = per_lab_p
79 | best_avg_p = 'best vag label precision: {}'.format(best_avg)
80 |
81 | print(per_lab_p)
82 | print(avg_lab_p)
83 | print(best_avg_p)
84 | # print(best_avg_p)
85 |
86 | return accuracy, best_avg
87 |
88 |
89 | def train(config):
90 | ## set pre-process
91 | logger = SummaryWriter()
92 | prep_dict = {}
93 | dsets = {}
94 | dset_loaders = {}
95 | data_config = config["data"]
96 | prep_config = config["prep"]
97 |
98 | prep_dict["source"] = prep.image_train(**config["prep"]['params'])
99 | prep_dict["target"] = prep.image_train(**config["prep"]['params'])
100 | prep_dict["test"] = prep.image_test(**config["prep"]['params'])
101 |
102 | ## prepare data = data_config["source"]["batch_size"]
103 | train_bs = data_config["source"]["batch_size"]
104 | test_bs = data_config["test"]["batch_size"]
105 |
106 | dsets["source"] = ImageList(open(data_config["source"]["list_path"]).readlines(), \
107 | transform=prep_dict["source"])
108 | dset_loaders["source"] = DataLoader(dsets["source"], batch_size=train_bs, \
109 | shuffle=True, num_workers=32, drop_last=True)
110 | dsets["target"] = ImageList(open(data_config["target"]["list_path"]).readlines(), \
111 | transform=prep_dict["target"])
112 | dset_loaders["target"] = DataLoader(dsets["target"], batch_size=train_bs, \
113 | shuffle=True, num_workers=32, drop_last=True)
114 |
115 | dsets["test"] = ImageList(open(data_config["test"]["list_path"]).readlines(), \
116 | transform=prep_dict["test"])
117 | dset_loaders["test"] = DataLoader(dsets["test"], batch_size=test_bs, \
118 | shuffle=False, num_workers=32)
119 |
120 | class_num = config["network"]["params"]["class_num"]
121 |
122 | ## set base network
123 | net_config = config["network"]
124 | base_network = net_config["name"](**net_config["params"])
125 | base_network = base_network.cuda()
126 | ## add additional network for some CDANs
127 | if config["loss"]["random"]:
128 | random_layer = network.RandomLayer([base_network.output_num(), class_num], config["loss"]["random_dim"])
129 | ad_net = network.AdversarialNetwork(config["loss"]["random_dim"], 1024)
130 | else:
131 | random_layer = None
132 | ad_net = network.AdversarialNetwork(base_network.output_num() * class_num, 1024)
133 | if config["loss"]["random"]:
134 | random_layer.cuda()
135 | ad_net = ad_net.cuda()
136 | parameter_list = base_network.get_parameters() + ad_net.get_parameters()
137 |
138 | if args.norm_type == 'rn':
139 | p = base_network.get_parameters()
140 | my_p = [v for k,v in base_network.named_parameters() if 'my' in k]
141 | else:
142 | my_p = None
143 |
144 | ## set optimizer
145 | optimizer_config = config["optimizer"]
146 | optimizer = optimizer_config["type"](parameter_list, \
147 | **(optimizer_config["optim_params"]))
148 | param_lr = []
149 | for param_group in optimizer.param_groups:
150 | param_lr.append(param_group["lr"])
151 | schedule_param = optimizer_config["lr_param"]
152 | lr_scheduler = lr_schedule.schedule_dict[optimizer_config["lr_type"]]
153 |
154 | gpus = config['gpu'].split(',')
155 | if len(gpus) > 1:
156 | ad_net = nn.DataParallel(ad_net, device_ids=[int(i) for i in gpus])
157 | base_network = nn.DataParallel(base_network, device_ids=[int(i) for i in gpus])
158 |
159 | ## train
160 | len_train_source = len(dset_loaders["source"])
161 | len_train_target = len(dset_loaders["target"])
162 | transfer_loss_value = classifier_loss_value = total_loss_value = 0.0
163 | best_acc = 0.0
164 | best_avg_visda = 0.0
165 |
166 | for i in range(config["num_iterations"]):
167 | if i % config["test_interval"] == config["test_interval"]-1:
168 | base_network.train(False)
169 | temp_acc, best_avg = image_classification_test(dset_loaders, \
170 | base_network, best_avg=best_avg_visda)
171 | temp_model = nn.Sequential(base_network)
172 |
173 | if temp_acc > best_acc:
174 | best_acc = temp_acc
175 | best_model = temp_model
176 | torch.save(base_network, osp.join(config["output_path"], \
177 | "best_model_%s.pth.tar") % config["run_num"])
178 | torch.save(ad_net, osp.join(config["output_path"], \
179 | "best_adnet_%s_%s.pth.tar") % (args.CDAN, config["run_num"]))
180 |
181 | if best_avg > best_avg_visda:
182 | best_avg_visda = best_avg
183 | torch.save(base_network, osp.join(config["output_path"], \
184 | "best_model_%s.pth.tar") % config["run_num"])
185 |
186 | if 'visda' not in args.dset:
187 | log_str = "iter: {:05d}, precision: {:.5f}, bset_acc:{:.5f}".format(i, temp_acc, best_acc)
188 | config["out_file"].write(log_str+"\n")
189 | config["out_file"].flush()
190 | print(log_str)
191 | with open(config["record_file"], 'a') as f:
192 | f.write(log_str + '\n')
193 | if i % config["snapshot_interval"] == 0:
194 | torch.save(base_network, osp.join(config["output_path"], \
195 | "latest_model_%s.pth.tar") % config["run_num"])
196 | loss_params = config["loss"]
197 | ## train one iter
198 | base_network.train(True)
199 | ad_net.train(True)
200 | optimizer = lr_scheduler(optimizer, i, **schedule_param)
201 | optimizer.zero_grad()
202 | if i % len_train_source == 0:
203 | iter_source = iter(dset_loaders["source"])
204 | if i % len_train_target == 0:
205 | iter_target = iter(dset_loaders["target"])
206 |
207 | inputs_source, labels_source = iter_source.next()
208 | inputs_target, labels_target = iter_target.next()
209 | inputs_source, inputs_target, labels_source = inputs_source.cuda(), inputs_target.cuda(), labels_source.cuda()
210 | batch_size = args.batch_size
211 | inputs = torch.cat((inputs_source,inputs_target),dim=0)
212 | if args.norm_type == 'dsbn':
213 | domain_labels = torch.from_numpy(np.array([[1]] * batch_size + [[0]] * batch_size)).long().cuda()
214 | features, outputs = base_network(inputs, domain_labels)
215 | else:
216 | features, outputs = base_network(inputs)
217 | outputs_source, outputs_target = outputs[:batch_size], outputs[batch_size:]
218 | softmax_src = nn.Softmax(dim=1)(outputs_source)
219 | softmax_tgt = nn.Softmax(dim=1)(outputs_target)
220 | softmax_out = torch.cat((softmax_src, softmax_tgt), dim=0)
221 |
222 | if config['CDAN'] == 'CDAN+E':
223 | entropy = loss.Entropy(softmax_out)
224 | # transfer_loss = 0
225 | transfer_loss = loss.CDAN([features, softmax_out], ad_net, entropy, network.calc_coeff(i), random_layer)
226 | elif config['CDAN'] == 'CDAN':
227 | transfer_loss = loss.CDAN([features, softmax_out], ad_net, None, None, random_layer)
228 | else:
229 | raise ValueError('Method cannot be recognized.')
230 |
231 | if args.ent:
232 | ent_loss = -0.1*torch.mean(torch.sum(softmax_tgt*torch.log(softmax_tgt+1e-8),dim=1))
233 | else:
234 | ent_loss = 0
235 |
236 | classifier_loss = nn.CrossEntropyLoss()(outputs_source, labels_source)
237 | total_loss = loss_params["trade_off"] * transfer_loss + classifier_loss + ent_loss
238 | total_loss.backward()
239 | optimizer.step()
240 | if my_p is not None:
241 | for p in my_p:
242 | p.data.clamp_(min=0.5, max=1)
243 |
244 | if i % config['print_num'] == 0:
245 | log_str = "iter: {:05d}, classification: {:.5f}, transfer: {:.5f}, method: {:.5f}".format(i, classifier_loss, transfer_loss, ent_loss)
246 | config["out_file"].write(log_str+"\n")
247 | config["out_file"].flush()
248 | with open(config["record_file_loss"], 'a') as f:
249 | f.write(log_str + '\n')
250 | if config['show']:
251 | print(log_str)
252 | torch.save(best_model, osp.join(config["output_path"], "best_model.pth.tar"))
253 | return best_acc
254 |
255 | if __name__ == "__main__":
256 | os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_id
257 | # train config
258 | config = {}
259 | config['CDAN'] = args.CDAN
260 | config['method'] = args.method
261 | config["gpu"] = args.gpu_id
262 | config["num_iterations"] = args.num_iterations
263 | config["print_num"] = args.print_num
264 | config["test_interval"] = args.test_interval
265 | config["snapshot_interval"] = args.snapshot_interval
266 | config["output_for_test"] = True
267 | config["show"] = args.show
268 | config["output_path"] = args.dset + '/' + args.output_dir
269 | config["record"] = 'record/%s' % args.method
270 | config["run_num"] = args.run_num
271 | config["record_file"] = "record/%s/" % args.method + '%s_net_%s_%s_to_%s_num_%s.txt' % (args.method, args.net, args.source, args.target, args.run_num)
272 | config["record_file_loss"] = "record/%s/" % args.method + '%s_net_%s_%s_to_%s_num_%s_loss.txt' % (args.method, args.net, args.source, args.target, args.run_num)
273 |
274 | if not osp.exists(config["output_path"]):
275 | os.system('mkdir -p '+config["output_path"])
276 | config["out_file"] = open(osp.join(config["output_path"], "log.txt"), "w")
277 | if not osp.exists(config["record"]):
278 | os.system('mkdir -p ' + config["record"])
279 |
280 | config["prep"] = {"test_10crop":False, 'params':{"resize_size":256, "crop_size":224, 'alexnet':False}}
281 | config["loss"] = {"trade_off":args.trade_off, "lambda_method":args.lambda_method}
282 | if "AlexNet" in args.net:
283 | config["prep"]['params']['alexnet'] = True
284 | config["prep"]['params']['crop_size'] = 227
285 | config["network"] = {"name":network.AlexNetFc, \
286 | "params":{"use_bottleneck":True, "bottleneck_dim":256, "new_cls":True} }
287 | elif "ResNet" in args.net:
288 | # exit(0)
289 | if args.norm_type == 'dsbn':
290 | config["network"] = {"name":resnetdsbn.resnet50dsbn, \
291 | "params":{ "use_bottleneck":True, "bottleneck_dim":args.bottle_dim, "new_cls":True} }
292 | else:
293 | config["network"] = {"name":network.ResNetFc, \
294 | "params":{"resnet_name":args.net, "use_bottleneck":True, "bottleneck_dim":args.bottle_dim, "new_cls":True} }
295 | elif "VGG" in args.net:
296 | config["network"] = {"name":network.VGGFc, \
297 | "params":{"vgg_name":args.net, "use_bottleneck":True, "bottleneck_dim":256, "new_cls":True} }
298 |
299 | config["loss"]["random"] = args.random
300 | config["loss"]["random_dim"] = 1024
301 |
302 | config["optimizer"] = {"type":optim.SGD, "optim_params":{'lr':args.lr, "momentum":0.9, \
303 | "weight_decay":0.0005, "nesterov":True}, "lr_type":"inv", \
304 | "lr_param":{"lr":args.lr, "gamma":0.001, "power":0.75} }
305 | if args.dset == 'office-home':
306 | art_txt = "/home/saulsheng/workspace/project/NewAttention/data/office-home/Art.txt"
307 | clipart_txt = "/home/saulsheng/workspace/project/NewAttention/data/office-home/Clipart.txt"
308 | realworld_txt = "/home/saulsheng/workspace/project/NewAttention/data/office-home/Real_World.txt"
309 | product_txt = "/home/saulsheng/workspace/project/NewAttention/data/office-home/Product.txt"
310 | if args.source == 'R' : s_dset_path = realworld_txt
311 | elif args.source == 'C': s_dset_path = clipart_txt
312 | elif args.source == 'A': s_dset_path = art_txt
313 | elif args.source == 'P': s_dset_path = product_txt
314 | if args.target == 'R' : t_dset_path = realworld_txt
315 | elif args.target == 'C': t_dset_path = clipart_txt
316 | elif args.target == 'A': t_dset_path = art_txt
317 | elif args.target == 'P': t_dset_path = product_txt
318 | if args.multi_source:
319 | multi_tarA = r'/home/saulsheng/workspace/project/NewAttention/data/office-home/multi_tarA.txt'
320 | multi_tarC = r'/home/saulsheng/workspace/project/NewAttention/data/office-home/multi_tarC.txt'
321 | multi_tarP = r'/home/saulsheng/workspace/project/NewAttention/data/office-home/multi_tarP.txt'
322 | multi_tarR = r'/home/saulsheng/workspace/project/NewAttention/data/office-home/multi_tarR.txt'
323 | if args.target == 'A':
324 | s_dset_path = multi_tarA
325 | t_dset_path = art_txt
326 | elif args.target == 'C':
327 | s_dset_path = multi_tarC
328 | t_dset_path = clipart_txt
329 | elif args.target == 'P':
330 | s_dset_path = multi_tarP
331 | t_dset_path = product_txt
332 | elif args.target == 'R':
333 | s_dset_path = multi_tarR
334 | t_dset_path = realworld_txt
335 | elif args.dset == 'office31':
336 | amazon_txt = "/home/saulsheng/workspace/project/NewAttention/data/office/amazon_list.txt"
337 | dslr_txt = "/home/saulsheng/workspace/project/NewAttention/data/office/dslr_list.txt"
338 | webcam_txt = "/home/saulsheng/workspace/project/NewAttention/data/office/webcam_list.txt"
339 | if args.source == 'A': s_dset_path = amazon_txt
340 | elif args.source == 'D': s_dset_path = dslr_txt
341 | elif args.source == 'W': s_dset_path = webcam_txt
342 | if args.target == 'A': t_dset_path = amazon_txt
343 | elif args.target == 'D': t_dset_path = dslr_txt
344 | elif args.target == 'W': t_dset_path = webcam_txt
345 | elif args.dset == 'image-clef':
346 | p_txt = "/home/saulsheng/workspace/dataset/image-clef/ImageCLEF/pList.txt"
347 | c_txt = "/home/saulsheng/workspace/dataset/image-clef/ImageCLEF/cList.txt"
348 | i_txt = "/home/saulsheng/workspace/dataset/image-clef/ImageCLEF/iList.txt"
349 | if args.source == 'P': s_dset_path = p_txt
350 | elif args.source == 'C': s_dset_path = c_txt
351 | elif args.source == 'I': s_dset_path = i_txt
352 | if args.target == 'P': t_dset_path = p_txt
353 | elif args.target == 'C': t_dset_path = c_txt
354 | elif args.target == 'I': t_dset_path = i_txt
355 | elif args.dset == 'visda':
356 | s_dset_path = r'/home/saulsheng/workspace/project/NewAttention/data/visda-2017/train_list.txt'
357 | t_dset_path = r'/home/saulsheng/workspace/project/NewAttention/data/visda-2017/validation_list.txt'
358 | else:
359 | s_dset_path = args.s_dset_path
360 | t_dset_path =args.t_dset_path
361 | config["dataset"] = args.dset
362 | config["data"] = {"source":{"list_path":s_dset_path, "batch_size":args.batch_size}, \
363 | "target":{"list_path":t_dset_path, "batch_size":args.batch_size}, \
364 | "test":{"list_path":t_dset_path, "batch_size":args.batch_size}}
365 |
366 | if config["dataset"] == "office31":
367 | if ("webcam" in args.s_dset_path and "dslr" in args.t_dset_path) or \
368 | ("webcam" in args.s_dset_path and "amazon" in args.t_dset_path) or \
369 | ("dslr" in args.s_dset_path and "amazon" in args.t_dset_path):
370 | config["optimizer"]["lr_param"]["lr"] = 0.001 # optimal parameters
371 | elif ("amazon" in args.s_dset_path and "dslr" in args.t_dset_path) or \
372 | ("amazon" in args.s_dset_path and "webcam" in args.t_dset_path) or \
373 | ("dslr" in args.s_dset_path and "webcam" in args.t_dset_path):
374 | config["optimizer"]["lr_param"]["lr"] = 0.0003 # optimal parameters
375 | config["network"]["params"]["class_num"] = 31
376 | elif config["dataset"] == "image-clef":
377 | config["optimizer"]["lr_param"]["lr"] = 0.001 # optimal parameters
378 | config["network"]["params"]["class_num"] = 12
379 | elif config["dataset"] == "visda":
380 | config["optimizer"]["lr_param"]["lr"] = 0.0003 # optimal parameters
381 | config["network"]["params"]["class_num"] = 12
382 | config['loss']["trade_off"] = 1.0
383 | elif config["dataset"] == "office-home":
384 | config["optimizer"]["lr_param"]["lr"] = 0.001 # optimal parameters
385 | config["network"]["params"]["class_num"] = 65
386 | else:
387 | raise ValueError('Dataset cannot be recognized. Please define your own dataset here.')
388 |
389 | if args.seed is not None:
390 | seed = args.seed
391 | else:
392 | seed = random.randint(1,10000)
393 | print(seed)
394 | torch.manual_seed(seed)
395 | torch.cuda.manual_seed(seed)
396 | torch.cuda.manual_seed_all(seed)
397 | np.random.seed(seed)
398 | random.seed(seed)
399 | os.environ['PYTHONHASHSEED'] = str(seed)
400 |
401 |
402 | config["out_file"].write(str(config))
403 | config["out_file"].flush()
404 | train(config)
405 |
--------------------------------------------------------------------------------
/data/ImageCLEF/bList.txt:
--------------------------------------------------------------------------------
1 | b/252_0446.jpg 6
2 | b/252_0337.jpg 6
3 | b/252_0493.jpg 6
4 | b/252_0242.jpg 6
5 | b/252_0057.jpg 6
6 | b/252_0273.jpg 6
7 | b/252_0505.jpg 6
8 | b/252_0345.jpg 6
9 | b/252_0463.jpg 6
10 | b/252_0408.jpg 6
11 | b/252_0173.jpg 6
12 | b/252_0300.jpg 6
13 | b/252_0142.jpg 6
14 | b/252_0030.jpg 6
15 | b/252_0036.jpg 6
16 | b/252_0487.jpg 6
17 | b/252_0033.jpg 6
18 | b/252_0481.jpg 6
19 | b/252_0297.jpg 6
20 | b/252_0397.jpg 6
21 | b/252_0280.jpg 6
22 | b/252_0424.jpg 6
23 | b/252_0278.jpg 6
24 | b/252_0445.jpg 6
25 | b/252_0267.jpg 6
26 | b/252_0331.jpg 6
27 | b/252_0092.jpg 6
28 | b/252_0494.jpg 6
29 | b/252_0211.jpg 6
30 | b/252_0014.jpg 6
31 | b/252_0438.jpg 6
32 | b/252_0072.jpg 6
33 | b/252_0327.jpg 6
34 | b/252_0025.jpg 6
35 | b/252_0196.jpg 6
36 | b/252_0285.jpg 6
37 | b/252_0400.jpg 6
38 | b/252_0467.jpg 6
39 | b/252_0232.jpg 6
40 | b/252_0279.jpg 6
41 | b/252_0189.jpg 6
42 | b/252_0187.jpg 6
43 | b/252_0229.jpg 6
44 | b/252_0066.jpg 6
45 | b/252_0499.jpg 6
46 | b/252_0016.jpg 6
47 | b/252_0175.jpg 6
48 | b/252_0512.jpg 6
49 | b/252_0296.jpg 6
50 | b/252_0144.jpg 6
51 | b/99B047454CD9B6D811D0288BE087715E.jpg 9
52 | b/4368FC8E03F2A288ADF2DCDBD712E750.jpg 9
53 | b/954071A67971CBA89764EDB20428D8F3.jpg 9
54 | b/8EB65F199E94D875FFD6768F35710276.jpg 9
55 | b/A04FC61BCC99C49B353D39F7D471D554.jpg 9
56 | b/A4088A337FAB3ECA4D7520729FC56AEF.jpg 9
57 | b/A692A3E48919A104D3A706C94844A4C2.jpg 9
58 | b/7FAC9E56C4F8DBF4C968445000B7DFD2.jpg 9
59 | b/BCA5859E1E87F738CDE4E9FCC43616AD.jpg 9
60 | b/16128B2C344486D5B22CCB544A498D26.jpg 9
61 | b/68D2FB34573152F7E5E8569257C177B4.jpg 9
62 | b/934CDA1D8798ACC1F31B0C0994F1A254.jpg 9
63 | b/07B9E5E73C8EF79063FA07B3F52F2735.jpg 9
64 | b/6879DEF4F6D2339688FDD82DA2C85754.jpg 9
65 | b/10092B6598A5EAB2C52212C76B66636F.jpg 9
66 | b/30E1B08A10F380EF9AF1F2C54BC37F99.jpg 9
67 | b/5C3E621FBCACCF9DD0783EC9C48086AB.jpg 9
68 | b/996A4E7860E0E124D0E28B4624EF7A3E.jpg 9
69 | b/7A614107DF056752DCEB1884EB48F52E.jpg 9
70 | b/167F8C023E0564EA731D5AD4443DCC78.jpg 9
71 | b/5D7D2E837FC15A257C810B6F99E8E2C3.jpg 9
72 | b/EAADBBFFC051985B11BF51FEA90738DD.jpg 9
73 | b/FF6854B0B1F13AF2FD62A3744D95F890.jpg 9
74 | b/01624A6E5002E7A3DF9AC102D1EE025E.jpg 9
75 | b/CFA34381EA85F93404A0CB2BC291D659.jpg 9
76 | b/E068BF4E458356B9A18A106DE11DEB62.jpg 9
77 | b/EF1993AEEB6D084ED31B0F29B4D01119.jpg 9
78 | b/040AADC700F811DA31F02DB6C3B01403.jpg 9
79 | b/9120B69B673D10FFF15DE9306FB63E26.jpg 9
80 | b/5B7D32115CEF942A9E5C67D9CE59E5BA.jpg 9
81 | b/4CDACA16FEB95BFA24FCC83D7AADEB51.jpg 9
82 | b/111002CDE28DE7FC6EC8105C0F6F1913.jpg 9
83 | b/7812BB4418847E19B6863BE7D93FC516.jpg 9
84 | b/2547D105CAEB73242F8342EE9AA0EB10.jpg 9
85 | b/1519B34721FF9BF0E289C4B92FA6DDE8.jpg 9
86 | b/C8579112A830E3894B23DD259CA6F651.jpg 9
87 | b/371105E72FC205275879A002EC3CBA91.jpg 9
88 | b/446C104E4DE6AD474C82AFFC25D97CC7.jpg 9
89 | b/4335FEB13FB8E95B80825CBC367B74A6.jpg 9
90 | b/4C543ADD81590F54708E9EE604718661.jpg 9
91 | b/A63E71DFFB82DDA6134F95547978D5A6.jpg 9
92 | b/0BD2328961DD8A5777BC756044CC41CE.jpg 9
93 | b/CE7003F44D239819AC24BB18D57EAF36.jpg 9
94 | b/C946D7BC36A1D1A6BA5B362C4B317B0B.jpg 9
95 | b/E3BE7EAE643951A8949B71552B12DD3C.jpg 9
96 | b/765653A4C9F07D2258700ED49854152E.jpg 9
97 | b/F2F3C096D8D349605A76EBE3B189507C.jpg 9
98 | b/2939ED3DAB0021BFACF058D4EB72739E.jpg 9
99 | b/78B37D75172C8C1F40C9B8209D9E57D2.jpg 9
100 | b/2BF59300B51BFF87A9F920E7D1EF2363.jpg 9
101 | b/E44C9A35C92B27D3F812608DE6CA910F.jpg 7
102 | b/9B64A10D9D3F0D8DDF0A4FD6B620F793.jpg 7
103 | b/016E08E303ACBDF596B0C071830D9D0E.jpg 7
104 | b/019B54A87F8663382B80E9EB5F11ECE6.jpg 7
105 | b/14706E0F6C0A886F93BDC4564D9FCF1C.jpg 7
106 | b/2993174C7CD5AFF712DDDCB1DD1F5F05.jpg 7
107 | b/1D284A29CD3A93D678C3F43A7C1240D0.jpg 7
108 | b/2EBBBE60D95063CBC9500076F91B1414.jpg 7
109 | b/A099C89E07064CB065A200ED5A56AA3D.jpg 7
110 | b/3574F45BD92A71A04A51302BBA3076C6.jpg 7
111 | b/6FD717560C67FCD1A8D69F246CDC1DBF.jpg 7
112 | b/5DA4CB45E851C166D7405927602BC592.jpg 7
113 | b/51147F65309C1B9329152EA4DFD1E7EF.jpg 7
114 | b/4EAD0040169FB171151B987CF678C89C.jpg 7
115 | b/BDDCDA3C43D1FBE339830675EE790BC4.jpg 7
116 | b/D93244D2E8D5FE0B0203C7E3CF032D8C.jpg 7
117 | b/665B2EA7D97C3ECD5961CD787B3E4B10.jpg 7
118 | b/44AF4192DB14121B9607E40F45852D3F.jpg 7
119 | b/40C049DCD2D6E864164A0FEF468DD27E.jpg 7
120 | b/1E8B1161C3248FBF8C8B4E2C193DBEA5.jpg 7
121 | b/046AE64BF285C34A173C93896F9201B7.jpg 7
122 | b/BF8881854887B688AE2552853B06634D.jpg 7
123 | b/D347B5C3B30BE0BB5DF85C17CEC9026A.jpg 7
124 | b/2E3EF19FEB17FB00C6EE6A6FFE9DA084.jpg 7
125 | b/C5DAC51DD6730DD1D8B65CA3F4260F17.jpg 7
126 | b/5CAC03956E4F77658D31A427918AB441.jpg 7
127 | b/0C64DF97C1F7D93798EE7AC39C1C5DB6.jpg 7
128 | b/FD03B59B87DA6912FAEE634B6745AAB0.jpg 7
129 | b/FDD66506D81871A8536C27563483C4A9.jpg 7
130 | b/D208B7A14FB2CC965D2D27D72AB0F7B6.jpg 7
131 | b/8E5E8683D65BB32C27556F1A92484C06.jpg 7
132 | b/2E182D5D0084A990BB181D029FECB134.jpg 7
133 | b/5D68FA5DA23492AD102E4C51D751E0C5.jpg 7
134 | b/757B031495A588F1288D1484103ABEF7.jpg 7
135 | b/A2953E5DBEA8844396FA196A136A443A.jpg 7
136 | b/187585E2FA471A9A3110E2E49DC23D89.jpg 7
137 | b/06176B915EDED374DD9538A26C9355AF.jpg 7
138 | b/C46892BD2F5F640D094D003966E47198.jpg 7
139 | b/890AB1D52F11EA1F7565D6EB22E03160.jpg 7
140 | b/4E5A38B53C9ECDB70994B82D0EE24324.jpg 7
141 | b/5CBCD72CAF98C2F10EE428C5A41095F3.jpg 7
142 | b/5E1D3F56E8620456A323EB3DFA28FA98.jpg 7
143 | b/79A4DB5AD3C4319B030C41041C1CC772.jpg 7
144 | b/399A73127E633BE4CF615585D1100EDB.jpg 7
145 | b/93A4AE80A27C7C57F09B81C82144A5AB.jpg 7
146 | b/C9C938D6C83343D26920F4F19F194390.jpg 7
147 | b/E59FF601F1D61841567C57D146D3630B.jpg 7
148 | b/EBEC99772324C0FCE5D5313F7109D6BA.jpg 7
149 | b/D80939E4CFDB90FBBD5095F6162FA29B.jpg 7
150 | b/0F1910541E4594BF6AE30CB4104FFB00.jpg 7
151 | b/303A184971074E3B4E2A8F049A5E4FE9.jpg 8
152 | b/897D3867A3683B2C299F1128AC4DA9EC.jpg 8
153 | b/9032A8B30A6BF5C274E3EBBF49BDA72D.jpg 8
154 | b/174BBA9C9AEEC116C8CD8263692FFB12.jpg 8
155 | b/EF10E0D4A505AF42D51C62301C811B1D.jpg 8
156 | b/DFF071F32258BE98D31E0293E0B42F52.jpg 8
157 | b/DF36EC86BBDBF157ECC9F71DA41F492F.jpg 8
158 | b/F7722CEC2C7F49DCF0300E9089FBE550.jpg 8
159 | b/9CB03D84405DA70A405FCD8555645736.jpg 8
160 | b/E4475F5F4123E77D2E64F11187269E0B.jpg 8
161 | b/00EFC6FC24740AD02D1322AB2847424B.jpg 8
162 | b/FADB6C2AB826CBFB2464734E9B28E0C6.jpg 8
163 | b/E865F0CCB33DC97EB24BC9FA9CC66F3C.jpg 8
164 | b/37FFF33C040CD9B023F2B0C739346584.jpg 8
165 | b/B0BE1146B7B543C5CC8BF636EBE73ABD.jpg 8
166 | b/CF2C04C18ACA69DEE41E4AEB3AC40F66.jpg 8
167 | b/2B70CCF18FF865B58AA4979943E73A4A.jpg 8
168 | b/AEEBA1619227AD5E712D167CB3A76560.jpg 8
169 | b/C69B2009A9C540609C3DFB527D433D37.jpg 8
170 | b/0227FDEC0640B5B4B8A2622807186F9D.jpg 8
171 | b/03F7E9713302C8B0E06130D062325B91.jpg 8
172 | b/D2EBBD22BEC2B5191E7C9EDC408F41F7.jpg 8
173 | b/596D7BF182188DED40B7CD7884BACF52.jpg 8
174 | b/9B5359845473A61E01EB975FBD724B22.jpg 8
175 | b/BF9E2B9D99F713CE4C09938D85182D45.jpg 8
176 | b/6E3075911EDF41B0F5E7B77353575DE1.jpg 8
177 | b/A236F01F2E14CE72392A541B83ED6300.jpg 8
178 | b/0C6F1DD934D222BD7F61080CB8D54476.jpg 8
179 | b/C73821398958DC39961B67106D563AD1.jpg 8
180 | b/8B5500DAF0401C20CAF76BCA013CA965.jpg 8
181 | b/AF5697859340324FEFB037BA79E791DE.jpg 8
182 | b/942C4ECC3AA6CAB343D2B3D6D6587536.jpg 8
183 | b/C326C13E0E4848D3F9A0398706951D18.jpg 8
184 | b/910DBD0C045BFE8611CF317C0C60DC43.jpg 8
185 | b/0843F6ECD367B7817B4700ED4B1095FF.jpg 8
186 | b/CB3495A971A1AC9B6326C24A205A1FB9.jpg 8
187 | b/3633025B7252C562E88C3AAAEF390367.jpg 8
188 | b/B5B0D85CCCFBEA76A3CB33C2396B183E.jpg 8
189 | b/0755FF97B7389BCB05CA783DED6CE38F.jpg 8
190 | b/6202D0BBD5CCF3A45F72983790E5FDCD.jpg 8
191 | b/8D5E4FADA5D6496B225965569C3A6A6E.jpg 8
192 | b/936A95DD2C9500B38DCBF4664E4B999C.jpg 8
193 | b/3F1C070BABD316E8E2CAC4CF5E5A3162.jpg 8
194 | b/BCDD2121DDFB641DCF219A42FCD226EF.jpg 8
195 | b/062A86011D7D4462D7A1D910DE5406CC.jpg 8
196 | b/28909B95CF0342F0A6146E69779B4C79.jpg 8
197 | b/0833521B6398D33469B12C52131E1FF9.jpg 8
198 | b/0F19F4F4B449AA3DC30EC5C5080931FE.jpg 8
199 | b/A10F8941898D7FDAF1250AD86517D160.jpg 8
200 | b/D564E45383C6AE04BA53E5867DA81BCE.jpg 8
201 | b/1DA9F95F105A84908AA2E6138036EADB.jpg 2
202 | b/C3C49A4123224889DAEACDB04036AFF9.jpg 2
203 | b/3198FE1A6CE6ACD0E9F05765CDD888AC.jpg 2
204 | b/95BD5D17B8EE9A5B0B7140AF3D3EE155.jpg 2
205 | b/CC33B67CEAB260C70B484FD728D77A11.jpg 2
206 | b/95F124A5CFD54F6C6156257869B902B3.jpg 2
207 | b/461CBE2E4490B1126D75BF5774BF7E2B.jpg 2
208 | b/26DEE9250A7B866F49301B50BCE4A8D9.jpg 2
209 | b/5F7090EE7DB242C7808CFAE3C158E2CA.jpg 2
210 | b/2C914AD42D56193E456E8C5C4F606AA8.jpg 2
211 | b/0D4C4128FB03C80B4A70762B9F730BE0.jpg 2
212 | b/943EAF8EE6F3DAD93C350D7334E5D43E.jpg 2
213 | b/8E71498075A52224B9DB53DC5AB9E03C.jpg 2
214 | b/C3F566E1F64CDC2E5E39D8DB53A9A92F.jpg 2
215 | b/5513CAD5A526CAA15FF2D7858088EC7A.jpg 2
216 | b/8DE0CA1C66B13C3422D2510F5741A7CF.jpg 2
217 | b/2D2AF15FD593F6831AC50A9F9D2EC9EF.jpg 2
218 | b/AC866E2E87C79A4C6DDBBF2957B60641.jpg 2
219 | b/F8454BD5C7267E9348BE34B46DAF9C3E.jpg 2
220 | b/A806021E59957A157359258F1B0FCCFD.jpg 2
221 | b/570734E61A79E0DD7DAA759AA16E84CB.jpg 2
222 | b/37E972099EBA6B295EA443DA17288B27.jpg 2
223 | b/76F78EAC76E1E476ABECBE4E6A55A321.jpg 2
224 | b/5453A1F47A601A8D0BF3F4741BDF3DFA.jpg 2
225 | b/161E937F0769F6018D4A2DE89A94ED72.jpg 2
226 | b/C256C06CD8FE55EA7D06DEAA60A8F325.jpg 2
227 | b/535C060A2B6BF625B3F63FEDAB9FC4ED.jpg 2
228 | b/EA0256F7A672A1253A5720B0F480B1D7.jpg 2
229 | b/88816EF105060A2840E8D0B9818BE02D.jpg 2
230 | b/2F812EE05204B95D035679AE36260953.jpg 2
231 | b/5D2DD16038474585420053AF925FDC61.jpg 2
232 | b/95A176181338C3F6609D508147F05CC0.jpg 2
233 | b/A05BB25301885E2CBBFB48B684EFF4C3.jpg 2
234 | b/0E3763A7051609ED45055CE2AA187B56.jpg 2
235 | b/AFEA94305B7BAE3D6FDC3AEEAF4A5B8D.jpg 2
236 | b/0367235F822FB39337329ADB8431D7A7.jpg 2
237 | b/C3896EDD27F601950AEB5CF67F4E7CD4.jpg 2
238 | b/8D57C4BB57E86CC5E547A82D88433255.jpg 2
239 | b/E92D3FAEC1DB5564E9B8B37674482A4D.jpg 2
240 | b/F9AB469CCB792CBFB072C6AAF4819943.jpg 2
241 | b/16FC961DBAE75E0EE096F6FAF7832DA3.jpg 2
242 | b/4A583BE465941467859B35D5E3D99BEC.jpg 2
243 | b/0130814315084D9CD4F6093CE25041FB.jpg 2
244 | b/8873695BFD8E3D2393439161466588B0.jpg 2
245 | b/5E5FED6EDA0768E31CD136A73210D9AA.jpg 2
246 | b/06343E0AE2FF16BAC012CA8298EB7C37.jpg 2
247 | b/02971601D6B855172D5D81615234BFFF.jpg 2
248 | b/9BCD41863FCC0699EA960611ECB3D9B1.jpg 2
249 | b/1B88B667A17F9BE3041276D4F1982296.jpg 2
250 | b/FBDA9B9254483E1060FD2CE0E819DEAA.jpg 2
251 | b/ADC96D0263E181615FE9D491CA4BD721.jpg 10
252 | b/596FFF97CCADE4C6D56A7F86B7ADC4AA.jpg 10
253 | b/AF59B7AF2928BDAC7DA78EA4DE7E789F.jpg 10
254 | b/B2387134FDBA1728EE31D8DBA5F929A6.jpg 10
255 | b/0A36A7B9A724B01A5E98E90210E224E0.jpg 10
256 | b/D4226310F8543F66BE53820C40697F2D.jpg 10
257 | b/A19462EE3C53688CEA99DEEE65B2CE34.jpg 10
258 | b/6144F29AFFEC5C70D42F4E85A7C145E8.jpg 10
259 | b/BEDF5F6174AE0035B91BC442AB4C5442.jpg 10
260 | b/BE202CAF702644FBB2958ACF4E35A8BE.jpg 10
261 | b/04FACB943377EDE55F37678DBF85B6FD.jpg 10
262 | b/086464B2C7F1CA924457B741F7B010C4.jpg 10
263 | b/D21F9071E3A393C47E4CA26783B81F15.jpg 10
264 | b/4407233DC2E2EE980AC3A42B729EADA4.jpg 10
265 | b/568BD2B97C5A8F14F683FABAE41FBF66.jpg 10
266 | b/009D596B8E08DFCF4884A9E266CBE6F3.jpg 10
267 | b/11BBBF5DB04DE5F6C398EE8EA358357F.jpg 10
268 | b/DFB46188BA5754B86A5DC9A0C40D4888.jpg 10
269 | b/683DC3083CB96BD64836EA29F9BB344A.jpg 10
270 | b/7F5595160FCDE0286A91D43F1B84D851.jpg 10
271 | b/1DC986B87E0FB98BC070E5700289DABF.jpg 10
272 | b/2068B6BCF55638C892A00A96E781A1ED.jpg 10
273 | b/53EE4E1C64548B33EF93C44EC14411A7.jpg 10
274 | b/E5DD48CF34AD63DAEDD82BF0802771AD.jpg 10
275 | b/B6C7DD34EC67078C780ED311B2F69F2A.jpg 10
276 | b/1F1FEB24B5B015089952F97940996505.jpg 10
277 | b/9443B8E8F682CB83C6775904E6347E7F.jpg 10
278 | b/4BD66FAAC250C47F90216B31D38FDAFD.jpg 10
279 | b/A1C06B307C726981112446BBC2440A88.jpg 10
280 | b/5E9EE691E2073B77B302D9B88633FD24.jpg 10
281 | b/C15001D8FA6E8134415BF4C6F9CCD686.jpg 10
282 | b/C7764F2DE2E1C19855C41F9D34EE9F97.jpg 10
283 | b/E651FFD6C847D1DAD806AA7AAC10D5EE.jpg 10
284 | b/28F885B7E338D2EC76E4223D65E5600C.jpg 10
285 | b/B39D78380CFFCE66F619F886CDF35B00.jpg 10
286 | b/64709DAC7F8F19B5A159D6688D9C1C4C.jpg 10
287 | b/9D9061A2750C82A87F05429E0FDDB982.jpg 10
288 | b/3DEF7317699E60DEAE12069826FCBAC5.jpg 10
289 | b/598AEB6C824EA77ED116588E4A66718D.jpg 10
290 | b/C7E185C1A43618E9D4555106C4CDAEA2.jpg 10
291 | b/DE32BB1C556018C204BCF88792FF26D3.jpg 10
292 | b/36105C43E7BF401AF4C83AD609BC88F4.jpg 10
293 | b/3C003BB2F384B6E7CB247B15C029C18B.jpg 10
294 | b/2376DEF9BC3D8361D1C2D945FAB837A0.jpg 10
295 | b/085902158622DB7E8F473263FF3DF9E7.jpg 10
296 | b/68B1481A01ABDDA278E17FF02C124FCC.jpg 10
297 | b/3ADDD2423A59963FE4A61F5904DE8557.jpg 10
298 | b/786218FB02044550259D737210398787.jpg 10
299 | b/E2F3BA7D4BD5FBF8E58CF3E5DB4338E6.jpg 10
300 | b/1DAE745F1341E3005CA5E575AA688EEC.jpg 10
301 | b/9453C5315949BFF5E0EE9C9A0592A740.jpg 1
302 | b/6B14D3B3308C0ABED6C0EECD1A8204D6.jpg 1
303 | b/20D3C7E0E7D94B1B499CEA7EB0301491.jpg 1
304 | b/D1C9313072151C95F0AA44C0E4BA9736.jpg 1
305 | b/22B33A4BB6D5A58B0539C9AB825D8A8E.jpg 1
306 | b/5DE6ADD369366694C47B4C3E8899FF18.jpg 1
307 | b/A60F1AC1F67D99F8A98DD9024AA3F152.jpg 1
308 | b/F1AB4EF686776E64E172B26D2CEF767D.jpg 1
309 | b/AFA4A5B756A09DCC41D1504A0909088E.jpg 1
310 | b/98BB12769E4C068CFAAB64E918D4F225.jpg 1
311 | b/907AFED2066568F94F93E6B0D56D6FD0.jpg 1
312 | b/C747E2B2C1EC047F1C8046F1E1AE91BC.jpg 1
313 | b/F424A7D73A0BAC6C6AA1B9B8F950B5ED.jpg 1
314 | b/D04DAD8F9D392EA5630A47AF1B2D1779.jpg 1
315 | b/7FAB94B2111213A776CAA0FCA99AF1F6.jpg 1
316 | b/4A58CC87C8DD2AAE688E155CE8755452.jpg 1
317 | b/566659F4593FD6F9AB411F740EEFD905.jpg 1
318 | b/76AAF2650B393E7A3B08F2988211054C.jpg 1
319 | b/45FA1628256DF18227854492FB02AAA2.jpg 1
320 | b/CA96AE286CDC5A937D9493E004FBD9AB.jpg 1
321 | b/0D33206B4C64086D4E957E172C9D4FE6.jpg 1
322 | b/57429E195F95DD9541E3E657C77146B4.jpg 1
323 | b/13A3E38BA7D48C0CB1D8C33E3CF3F894.jpg 1
324 | b/036CCB4FC76BAEF206B5449F5656466A.jpg 1
325 | b/35C01806318A3DF97A3CBC919A2B61E1.jpg 1
326 | b/9BC12726749D602634F7949CE6001EE9.jpg 1
327 | b/431C61C7FBD08E5E8F8164355467C746.jpg 1
328 | b/EB6C6B1B61CC73355284A942C79A5FB3.jpg 1
329 | b/D168AEFBDEF1E7B96297AAB4F213A991.jpg 1
330 | b/6CDF9D50D0F1168FED0788A2A955D754.jpg 1
331 | b/0EE0504FFB3B686D2E9B4CBFBC4E726D.jpg 1
332 | b/365FD760A1CB66D7A7DC7D8630B4BF76.jpg 1
333 | b/A566F2968A534836141295685294BAC5.jpg 1
334 | b/3317C84412D8247C7ACFA6ECCA1F4732.jpg 1
335 | b/A58B2BEF5B2EFFBAC16DD6BAA0B9D133.jpg 1
336 | b/5A4CE68844E38D3F53CC2D921585CF21.jpg 1
337 | b/5280C73955AA328043F283D7E4AED17E.jpg 1
338 | b/11C71EA217F8AB294345CC7D070CA6A9.jpg 1
339 | b/03641633A7AE916AC692F4F86E9F6358.jpg 1
340 | b/D2C0AADC00688164B9D2084425BA6429.jpg 1
341 | b/5E674690B23F473EBCD64C430307E85F.jpg 1
342 | b/A3CDC8101E8164F7E5C3D8608F97760A.jpg 1
343 | b/18906D40FE8F1F0D6E9E90397A77E681.jpg 1
344 | b/A5951C7CE55D44D1D7983675C8E66076.jpg 1
345 | b/9A7E2D2853F2CBBF65FDA0AD0EA186BF.jpg 1
346 | b/8BFED687E998CC76FB15ABC222B549E0.jpg 1
347 | b/9B62E39749832D4D081558F1DBEA1A54.jpg 1
348 | b/06A0E6EB6E2A6CEDE167D56124F42D3B.jpg 1
349 | b/E2FBA63A04C5C623EC946420D69FC712.jpg 1
350 | b/18575192544A1997E2771E096B3B59AF.jpg 1
351 | b/4BBC9E1799C77EBE97CDB3A9C2A6A0F6.jpg 11
352 | b/29E875A1614ACAB4FEA4FC35D4F53874.jpg 11
353 | b/EDA3CCF6C1650A0033E65F7F85C3E320.jpg 11
354 | b/AEEC33BD87BCF638A90024C57ECFF976.jpg 11
355 | b/B874B113808639835BB69389B473C25F.jpg 11
356 | b/BD67EAB839C733091BF4D5EFA00BAF53.jpg 11
357 | b/347A65B54C8634ECA5DB93646B9BD593.jpg 11
358 | b/C845EA4BF5699C6FC9861C3239ED3521.jpg 11
359 | b/5EF40468FF1F809ABAA073FAF202DECE.jpg 11
360 | b/830B37B1BF68AC6EBE88172583EC051C.jpg 11
361 | b/02D06D9DB1DF35626E74CFFFFDC1D924.jpg 11
362 | b/6E8738180B3E92989069E424A0F2E821.jpg 11
363 | b/60EFE50F000FE800C88C49C6B7BA78AE.jpg 11
364 | b/BE59980E6EA01751AC9D2528F6B4E3CE.jpg 11
365 | b/4AE44E88B0D3EB56168844D35E9512E1.jpg 11
366 | b/43D91B6329EB66B10A48DEB2D2ECE5DB.jpg 11
367 | b/F7D7EDC248D40404B3779BD8B1F528C2.jpg 11
368 | b/EA091E5439B0432B2784BDF35CEE4355.jpg 11
369 | b/1DFFAAA472A5EB0DF80D6B9E39D75D11.jpg 11
370 | b/D59FFC521FEC8E220A741312ACFE07F9.jpg 11
371 | b/3636B1978F58F9610EABCEB1932558D1.jpg 11
372 | b/B3670F67E80DACFDE64B3D90435389F6.jpg 11
373 | b/47963CB2B8B734EA2106C8D5AD079B30.jpg 11
374 | b/4A105B56BF1CDF4C63361E8A28709004.jpg 11
375 | b/24931A873672F774CC3D9C042DED63C7.jpg 11
376 | b/C868CCB253B127B4734D8501AD9F47C6.jpg 11
377 | b/78B752D6A3A8FA6BD555049BEEDC329F.jpg 11
378 | b/D5714A245ABC39F3D2F32DFACD5062CA.jpg 11
379 | b/B08049B07C47E849CAE17BA138A377F9.jpg 11
380 | b/EA475C1E5432E4DEA3E76C2EFAF99E37.jpg 11
381 | b/184A89BA4F190FA4AF48A29A0542A415.jpg 11
382 | b/53FC876F8EE34EAFAF11D73FBFA9EC1A.jpg 11
383 | b/39B8649F0687BB45ABFAD9A8081EA7C0.jpg 11
384 | b/A052ECA2821412200310D413D537082C.jpg 11
385 | b/3A020B73ED69802A175A6C83153A552B.jpg 11
386 | b/A17B141138A1459E0A8160B7658DA72F.jpg 11
387 | b/2EF970002819A676E1F31BCAA0DFB798.jpg 11
388 | b/54597E7B450CF9F28EF72B6AF595E8F3.jpg 11
389 | b/7003CC063E5B011E284CC4654119738A.jpg 11
390 | b/A9D710641FBA1E9CCEADD93DD5EAF80C.jpg 11
391 | b/2791E6092748248C63C70FBC5FB9F8DE.jpg 11
392 | b/69A926F25C78E8DB02CCA5BB6483A63F.jpg 11
393 | b/CCB24C9F1F9475243D20F69406B36ADC.jpg 11
394 | b/2D55EA21C405EF4E9B1B38D84EEFE1F6.jpg 11
395 | b/77517FC7561F32D5120FD1CFA4BA5A51.jpg 11
396 | b/A24468CDF86147584D240FF17517F4C4.jpg 11
397 | b/E62C4F3C9887B98E398F2F54CE4379C1.jpg 11
398 | b/16AAE9074F5C00F9615C2C6A705E8513.jpg 11
399 | b/AF195E97A0B7A1CB3C144D723FA5A16F.jpg 11
400 | b/409B9D974570529C2F089BF27850AA99.jpg 11
401 | b/74930C08A2D258C3AC15FEE7FB1F624B.jpg 5
402 | b/24547E5BE40D231A049CFC56DE2ABF5F.jpg 5
403 | b/683FB74ADD52CFABEA685F6DC74EE7C9.jpg 5
404 | b/06DCB8E35A75236B1468ACF669D9FEC9.jpg 5
405 | b/C897A37C845E34312B5D71CC821D0B68.jpg 5
406 | b/21A81279C43ABAA217DB5FC4FFC752BF.jpg 5
407 | b/7548A8A889828B4C5C3268891AB60CAF.jpg 5
408 | b/2676F53BB1434E7F9707FA6E2624FD4B.jpg 5
409 | b/A0000F9AD52E520510F524676E6660D6.jpg 5
410 | b/A12E1F7F294F3C2BDFD63AC2271FE73F.jpg 5
411 | b/48A1252DBF7C94311ADC01F7DBF2AFAC.jpg 5
412 | b/DC6E7F8458FDB4DCB18FD008166DB0D2.jpg 5
413 | b/2037016AA33095757AE76833449D6C7B.jpg 5
414 | b/B69C8086ED89805885ECD2413B8BD67C.jpg 5
415 | b/559056179A304125379DC520E3B46CB4.jpg 5
416 | b/822E14FB3EC8BCB4280E90D10A5D1D96.jpg 5
417 | b/AFE2A7F8CDAD296A3BCF06CF2448645E.jpg 5
418 | b/D34E5483FF07167BAA9FD039B57A0429.jpg 5
419 | b/040050435E808D9336DB106741F5AC4E.jpg 5
420 | b/1D0BEC676E3B784D975D5A13E557FCF7.jpg 5
421 | b/206A8B6699A93818F0E87BC14FDDD781.jpg 5
422 | b/1E25189101FB8A7CB582396126180F79.jpg 5
423 | b/22D94AB94209F9657A9BE5083C53B611.jpg 5
424 | b/8D975391F91BC8CB3360467DC09EFE75.jpg 5
425 | b/ED50CB1365FA1640C6C8171F8B6BEF6E.jpg 5
426 | b/FFA7E4965BE7A26BA9F12AA353691B84.jpg 5
427 | b/A81A589E0D5C484D0F81B7B05C123322.jpg 5
428 | b/5DB33AB5D8F811249D2DE8808708D4E7.jpg 5
429 | b/FC54B028496416FC58F910E6EFBDA84B.jpg 5
430 | b/13CDE870001E9405EAF4323C599E2FB6.jpg 5
431 | b/D0C517CD4FA4986E432E23642FCC8ABC.jpg 5
432 | b/9C2F957F2FE350314AC15A399770C415.jpg 5
433 | b/DB3E24C36C4FA21940A15DA5B7351695.jpg 5
434 | b/3ACD3D6FB09F5CECCB4D1D10A3567D15.jpg 5
435 | b/58988BE83794045DF9F87E1D9111B164.jpg 5
436 | b/A38061C092DDADDBC12026B2FD8BBD9C.jpg 5
437 | b/2F654121C020F97F9F5B7D2E4EEE4809.jpg 5
438 | b/69D957D426E8BBB87F2BB27C3DAE0F5D.jpg 5
439 | b/0815025A981C055CBF1528B2DA2C6FA3.jpg 5
440 | b/154A793A0B1DDF8A49D5257395197051.jpg 5
441 | b/BF604F291613A1A5AFCB5756C26D5A42.jpg 5
442 | b/7553530C20E3EF5F72748CAF3FBDD319.jpg 5
443 | b/A4F5C2A63B0E9FD648890802B0D3794A.jpg 5
444 | b/9E1794FDA934CB0932EF635C86CE5529.jpg 5
445 | b/41DF079F00FC1CDF7A34F5148B98BB64.jpg 5
446 | b/9CFFC763C8A64604881052DA33C7462C.jpg 5
447 | b/56D45AD69679E30A5CB5CAAD444E58AA.jpg 5
448 | b/7D35D7E2A4727E36EC185ACE4E3BAD88.jpg 5
449 | b/0674D9FAE1C8F77FD76E2710EFE51E4D.jpg 5
450 | b/0AB55F775D31F53C97AD0FF64CD92569.jpg 5
451 | b/2F199132AE8C1BB012B9874803423633.jpg 3
452 | b/06E7546A599DCE0994635A0F17C13546.jpg 3
453 | b/4782FB6A9D47E59DF335EEB19F3BBC81.jpg 3
454 | b/228B6EBA3138877AB8A024E63E11E480.jpg 3
455 | b/A4D35F1F675A516BA5E104415DCD7239.jpg 3
456 | b/9D86C31949F3335A6492F2236F6BBAA9.jpg 3
457 | b/FD02BE5DED1CA899B4442876F6FD5831.jpg 3
458 | b/0CD8F80A3C9803F22FDCFEDA5D3074A9.jpg 3
459 | b/E13547193B8C726914E729CDA16ACE01.jpg 3
460 | b/D953D8AD757D5D212EDD957D319514FC.jpg 3
461 | b/64A8A592A04C5BFA8D4B7162781D3CA3.jpg 3
462 | b/97528AD153732B5068745E6E2D44B8B7.jpg 3
463 | b/30DB9418D3FB20C0FC1B3B737034721A.jpg 3
464 | b/CB0A27D55803905C32261563CA27710B.jpg 3
465 | b/4FB9D549000737F4950B57BAE0A6C020.jpg 3
466 | b/0C1717639BC4DB6AB6432D16D5BE4A36.jpg 3
467 | b/7EEAD5FCA7472F45D3657A49B44FA630.jpg 3
468 | b/4EB35DD3E37CB2DEE0148DD62D846F2F.jpg 3
469 | b/0CFECC943A10E78B1F334E9890B3422F.jpg 3
470 | b/42617D7195453D01B4B111A25FEFC29C.jpg 3
471 | b/DA60919B42F494EDBCFFCD815E967072.jpg 3
472 | b/6CD65842EC674D9F12FCA544E61DDA28.jpg 3
473 | b/08031F2F2B4C021D60F48026732E00C1.jpg 3
474 | b/BFDEBE9806FC27CFD3690DBA41FDBE9C.jpg 3
475 | b/A012B46899B919BFE0D757D68C881071.jpg 3
476 | b/24572C6C1C529ECB960452FCE75563F6.jpg 3
477 | b/B930D323361F24D9EF58E8445C315233.jpg 3
478 | b/97E78A339A2756D4236FFB5F3E3C9B63.jpg 3
479 | b/984F6499EAE3914660AEA0E9DF7D56E1.jpg 3
480 | b/AFAF6AC01AD231E2B18AFCF43415544A.jpg 3
481 | b/CFAD48F3E12C0C8D438EC95C010BC068.jpg 3
482 | b/F6F06F76058694E41507028B090C0A8B.jpg 3
483 | b/88D0FE64FC6D6A50E6A96FAA7BAAC556.jpg 3
484 | b/5BC690DF7D75B389223E7DEF473D2CC6.jpg 3
485 | b/C094FDB6DCAECFF2F5D58D85DB320747.jpg 3
486 | b/750102ED0679A6D627322CD80030B2D9.jpg 3
487 | b/523DCBEFC4F30B105D7E301F97FFA5F3.jpg 3
488 | b/412B17B2E79287572F2637B919BB4DDF.jpg 3
489 | b/B216913893C6B401DE2C197B1D15EAA2.jpg 3
490 | b/57EC5444895376BC7CA3E9CD5CF897E7.jpg 3
491 | b/D374801C0DF141A571BE581F0809F577.jpg 3
492 | b/4D622E71A186AA212734EDDEF38B6609.jpg 3
493 | b/6FB55C18F30020039C7538E370FE0C98.jpg 3
494 | b/71FD3B82BAE375019956E511975F0805.jpg 3
495 | b/02326FC55FAD21E3EB032C2ADF4E9B9A.jpg 3
496 | b/F2FB4DFD2FDCD6959500F70B09D11A0D.jpg 3
497 | b/244DAC3142BEDCBC42D23B4D55B197A0.jpg 3
498 | b/0760F5D5CAF936B9B1015CBF0D51FC2F.jpg 3
499 | b/A81431784F0A65CBB5C70EFDA72621FA.jpg 3
500 | b/0D9C5CD65F9A0CC60311CF67F6B9BA90.jpg 3
501 | b/EFB685AA6FBC4FAD89001CFB4B0FD6C7.jpg 4
502 | b/FD105C1FB90A02D2255BE57D53325243.jpg 4
503 | b/533D29D9F1F6C2E19F5B5F972C44D99A.jpg 4
504 | b/77C978E318BCD9662EB96B4A5F19F35F.jpg 4
505 | b/CD1B98237D442FDE38B61D99020E7917.jpg 4
506 | b/2CD7FF443E26A0D31021009F46734EB2.jpg 4
507 | b/312445DB721563C7752B34D51DA10454.jpg 4
508 | b/E0D30B7B88E6B718A253C82D44C3F54A.jpg 4
509 | b/2C016F70D7C1E0342F39513BC0B6BC43.jpg 4
510 | b/D5849BCFAF8E1BA0EBA6E2787B8323B1.jpg 4
511 | b/3846B97DC94DA51BFCFCA3E95A287CC0.jpg 4
512 | b/AE872A16D1602FD9B09D75CB8C58F277.jpg 4
513 | b/430ECD392E4424EDA2F74F400F91881E.jpg 4
514 | b/F0C293D43F9A12BD84F9B9C61D317EFB.jpg 4
515 | b/E363DFA32CD85C5649C4F832566365F5.jpg 4
516 | b/5ACAA63B3F6B1F5CC64B5533A38CFC22.jpg 4
517 | b/5FC21CB04B95C90E1476A3CE27A3EA9E.jpg 4
518 | b/65B5ED01282AC4707E6613F75FE6AAD4.jpg 4
519 | b/33A09E862D6FFD5ED7D153C023A9F030.jpg 4
520 | b/B197C4415626DF2BBB8CCC656B891CFE.jpg 4
521 | b/9852BE61326A774F0B174D4F4A7A5D03.jpg 4
522 | b/5EF88A8D0EA57BB9D08A30B303834C29.jpg 4
523 | b/88A72A70A267147B21292A19402F3778.jpg 4
524 | b/BFEA935ED5E0B1544C791AF6C068440D.jpg 4
525 | b/519E025BE461AF37D3420D645E243394.jpg 4
526 | b/444EFD0A5C55381E8A8F79BCF443B7E4.jpg 4
527 | b/639C13BF14259D98FC6D735CE24C420E.jpg 4
528 | b/C6EF53B2FCB9EC242AC3FE334D2130F5.jpg 4
529 | b/B63C63EF218EEF64529B532078BC89B4.jpg 4
530 | b/B63E13EFAB4A3D798C08D55FF091EAB5.jpg 4
531 | b/BC81191105B77516130EAF663AE4D8BC.jpg 4
532 | b/F757946EC23E096CD6FB913DB56882B9.jpg 4
533 | b/BDE75A335C9B90AC5F2EA1625CE96475.jpg 4
534 | b/58283D505FF4FF899D119DCA4EFFFE58.jpg 4
535 | b/5F402D72C71B0517C0EF4D98E1C12905.jpg 4
536 | b/FCBABF0F60B8816680F4731B63B7246A.jpg 4
537 | b/39ACDCCE22EEC3B54879E72969A50FC9.jpg 4
538 | b/8CBE9DE02660DF5AB5DB2C58F44E520F.jpg 4
539 | b/4250D9B8CF2C72C833C4AE52858AE5AF.jpg 4
540 | b/F5D0B77FD3F671FEDB4611DA99605419.jpg 4
541 | b/6E6DD6C0B6C8F6D907B3816D57E6967B.jpg 4
542 | b/2EB3068FDBFCF4EF9F0BF93B6854EE53.jpg 4
543 | b/0E55BCE89CFCFC250051C1E18E4CE483.jpg 4
544 | b/B23FCF5AFA32C175D0D46D68724D7A0A.jpg 4
545 | b/7DC857179CF8863E60CB16A2F99EE572.jpg 4
546 | b/43C8E3F1DB3E2308349366426E2C2B5B.jpg 4
547 | b/C9A2153FEDD0552FEDA60EA661CE5D2D.jpg 4
548 | b/33F04496C0C38E835AE314ED95BE955B.jpg 4
549 | b/70C738ED5F27D6D5A17686090A3A4CD4.jpg 4
550 | b/B11AA83F5D9E776A69FFE740EDB0F6D4.jpg 4
551 | b/E3FF9BF44ABD7A590386FD6DC04441FE.jpg 0
552 | b/1F3E4785017945B9A068DF759CC94DF2.jpg 0
553 | b/CA83C6D5DED56226F8A64C45C2167407.jpg 0
554 | b/CB72DC4FCF9ABCFEF06936C5684581E8.jpg 0
555 | b/E9430381F2493159A32F5F236B4FDF8A.jpg 0
556 | b/620031A94BE8545E599268C596ACA55D.jpg 0
557 | b/18A92EDCCC167E5057B779E3CF142F89.jpg 0
558 | b/2E862EC1688653B70B53A2C1E6A87A11.jpg 0
559 | b/1BF53EEAD6CEF9203DF40C5A1A67744B.jpg 0
560 | b/BD5902152B26285D91A08A86168FF8D1.jpg 0
561 | b/6436B37ADAAAB84B9099841E0EA633F5.jpg 0
562 | b/02B33FC50DBA873198FAED4689BBFBFF.jpg 0
563 | b/78DD91697DE5840D45AF40C888EFB69D.jpg 0
564 | b/7E9F04E7B8523AAB6AE77776C1103883.jpg 0
565 | b/B0113FE6B343D901B163493258BBF919.jpg 0
566 | b/17C58ED7CD8A3CDBB79AD8DD64AB9D0A.jpg 0
567 | b/4C34DA2860E01687A2172F220D39368B.jpg 0
568 | b/C419985418C4740BB85228D8B3F157C4.jpg 0
569 | b/6E3D27967CF815A6DD3BA7260A81ACC8.jpg 0
570 | b/27F8578B1C3157CD25387AE3580E604E.jpg 0
571 | b/AA2F131C6EADCAC7B05A1FAB059DBBFF.jpg 0
572 | b/6E74BE7740A6A0EF30BFEFCC1A7FE2DC.jpg 0
573 | b/92C723F600E72B846A35236CBB4F7A4B.jpg 0
574 | b/C56E1372BFB223FAE049EBC4917D059C.jpg 0
575 | b/A6F9044F6A1275795531794DE44D7D88.jpg 0
576 | b/211C525E9ADF1E92923FBF9FDDAD317E.jpg 0
577 | b/3D07CCD1290EBDFDFC26F2469B8F94A6.jpg 0
578 | b/0F6DD993FA6C0086C3B2D476F3128EEC.jpg 0
579 | b/70214AD47A30556F16A65C256AADD066.jpg 0
580 | b/11AAB1206CBA2BB2F966A53D9D1EE087.jpg 0
581 | b/F9016E99A54A7EE51FBBE6E13FF5AE0F.jpg 0
582 | b/D4821FE0B02A69FCE94ADEA98828C687.jpg 0
583 | b/0CBDC7E01B8280AC537A9FA32D897D62.jpg 0
584 | b/8928965F493DB82C700227DD207C4B6C.jpg 0
585 | b/10EC208950F689A3377AC2F9566BE86C.jpg 0
586 | b/5764B3EB17C3D76AF1826F7EA8B226AF.jpg 0
587 | b/09EC5D53CE789A34F3E05DB048B55849.jpg 0
588 | b/007859D83594D527355CA287E096218B.jpg 0
589 | b/9CEBB04C1B69A165740138ABE077645E.jpg 0
590 | b/060FBA0F642012BCC2F4E427E7444248.jpg 0
591 | b/FDDE05B3132C71CE3E07C4182199B9CB.jpg 0
592 | b/652823C3559FF2C5C15A25ECA120A626.jpg 0
593 | b/3504460129C1001C390F787870CB1DD4.jpg 0
594 | b/8D1A5ADB91499E383ED331ED912B09EC.jpg 0
595 | b/27205A7335FA260E8446F9371AD9A3C4.jpg 0
596 | b/9228DF8B79C57CDEB6A23D94A34232E3.jpg 0
597 | b/5C915F5961F37AD219CA5DCDFF2B78BA.jpg 0
598 | b/95498E27EC444C2BBC56FC86CF245165.jpg 0
599 | b/33D891118F9D9D6090B033394B7D2B7E.jpg 0
600 | b/B08A893A4CFF2B0F3B39CF1CBF2385D2.jpg 0
601 |
--------------------------------------------------------------------------------
/reciprocal_norm.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import torch.nn.functional as F
3 | from torch.nn.modules.module import Module
4 | from torch.nn.parameter import Parameter
5 | from torch.autograd import Variable
6 | import torch
7 | import itertools
8 | from option import args
9 |
10 | gpus = args.gpu_id.split(',')
11 | def last_zero_init(m):
12 | if isinstance(m, nn.Sequential):
13 | nn.init.constant_(m[-1].weight, val=0)
14 | nn.init.constant_(m[-1].bias, val=0)
15 | else:
16 | nn.init.constant_(m, val=0)
17 |
18 | class _ReciprocalNorm(Module):
19 |
20 | def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, track_running_stats=True, is_1d=False):
21 | super(_ReciprocalNorm, self).__init__()
22 | self.num_features = num_features
23 | self.eps = eps
24 | self.momentum = momentum
25 | self.affine = affine
26 | self.track_running_stats = track_running_stats
27 | self.group = 1 if num_features < 512 else num_features // 512
28 | self.my_mean_w_s = nn.Parameter(torch.ones(num_features,1))
29 | self.my_var_w_s = nn.Parameter(torch.ones(num_features,1))
30 | self.my_mean_w_t = nn.Parameter(torch.ones(num_features,1))
31 | self.my_var_w_t = nn.Parameter(torch.ones(num_features,1))
32 |
33 | if self.affine:
34 | self.weight = Parameter(torch.Tensor(num_features))
35 | self.bias = Parameter(torch.Tensor(num_features))
36 | else:
37 | self.register_parameter('weight', None)
38 | self.register_parameter('bias', None)
39 | self.register_parameter('lamda', None)
40 |
41 | if self.track_running_stats:
42 | self.register_buffer('running_mean_source', torch.zeros(num_features))
43 | self.register_buffer('running_mean_target', torch.zeros(num_features))
44 | self.register_buffer('running_var_source', torch.ones(num_features))
45 | self.register_buffer('running_var_target', torch.ones(num_features))
46 | self.register_buffer('num_batches_tracked', torch.tensor(0, dtype=torch.long))
47 | else:
48 | self.register_parameter('running_mean_source', None)
49 | self.register_parameter('running_mean_target', None)
50 | self.register_parameter('running_var_source', None)
51 | self.register_parameter('running_var_target', None)
52 | self.register_parameter('num_batches_tracked', None)
53 | self.reset_parameters()
54 |
55 | def reset_parameters(self):
56 | if self.track_running_stats:
57 | self.running_mean_source.zero_()
58 | self.running_mean_target.zero_()
59 | self.running_var_source.fill_(1)
60 | self.running_var_target.fill_(1)
61 | self.num_batches_tracked.zero_()
62 | if self.affine:
63 | self.weight.data.uniform_()
64 | self.bias.data.zero_()
65 |
66 | def _check_input_dim(self, input):
67 | return NotImplemented
68 |
69 | def _load_from_state_dict_from_pretrained_model(self, state_dict, prefix, metadata, strict, missing_keys,
70 | unexpected_keys, error_msgs):
71 | r"""Copies parameters and buffers from :attr:`state_dict` into only
72 | this module, but not its descendants. This is called on every submodule
73 | in :meth:`~torch.nn.Module.load_state_dict`. Metadata saved for this
74 | module in input :attr:`state_dict` is provided as :attr`metadata`.
75 | For state dicts without meta data, :attr`metadata` is empty.
76 | Subclasses can achieve class-specific backward compatible loading using
77 | the version number at `metadata.get("version", None)`.
78 |
79 | .. note::
80 | :attr:`state_dict` is not the same object as the input
81 | :attr:`state_dict` to :meth:`~torch.nn.Module.load_state_dict`. So
82 | it can be modified.
83 |
84 | Arguments:
85 | state_dict (dict): a dict containing parameters and
86 | persistent buffers.
87 | prefix (str): the prefix for parameters and buffers used in this
88 | module
89 | metadata (dict): a dict containing the metadata for this moodule.
90 | See
91 | strict (bool): whether to strictly enforce that the keys in
92 | :attr:`state_dict` with :attr:`prefix` match the names of
93 | parameters and buffers in this module
94 | missing_keys (list of str): if ``strict=False``, add missing keys to
95 | this list
96 | unexpected_keys (list of str): if ``strict=False``, add unexpected
97 | keys to this list
98 | error_msgs (list of str): error messages should be added to this
99 | list, and will be reported together in
100 | :meth:`~torch.nn.Module.load_state_dict`
101 | """
102 | local_name_params = itertools.chain(self._parameters.items(), self._buffers.items())
103 | local_state = {k: v.data for k, v in local_name_params if v is not None}
104 |
105 | for name, param in local_state.items():
106 | key = prefix + name
107 | if 'source' in key or 'target' in key:
108 | key = key[:-7]
109 | # print(key)
110 | if key in state_dict:
111 | input_param = state_dict[key]
112 | if input_param.shape != param.shape:
113 | # local shape should match the one in checkpoint
114 | error_msgs.append('size mismatch for {}: copying a param of {} from checkpoint, '
115 | 'where the shape is {} in current model.'
116 | .format(key, param.shape, input_param.shape))
117 | continue
118 | if isinstance(input_param, Parameter):
119 | # backwards compatibility for serialized parameters
120 | input_param = input_param.data
121 | try:
122 | param.copy_(input_param)
123 | except Exception:
124 | error_msgs.append('While copying the parameter named "{}", '
125 | 'whose dimensions in the model are {} and '
126 | 'whose dimensions in the checkpoint are {}.'
127 | .format(key, param.size(), input_param.size()))
128 | elif strict:
129 | missing_keys.append(key)
130 |
131 | def forward(self, input, option='residual', running_flag=False, kernel='Student'):
132 | self._check_input_dim(input)
133 |
134 | if self.training: ## train
135 |
136 | ## 1. Domain Specific Mean and Variance.
137 | if input.dim() == 4:
138 | b,c,h,w = input.size()
139 | batch_size = b // 2
140 | else:
141 | b,c = input.size()
142 | batch_size = b // 2
143 | input_source = input[:batch_size]
144 | input_target = input[batch_size:]
145 | source_for_norm = input_source.clone()
146 | target_for_norm = input_target.clone()
147 | # print(input.dim())
148 | if input.dim() == 4: ## TransNorm2d
149 | input_source = input_source.permute(1, 0, 2, 3).contiguous().view(self.num_features, -1) # [c, bhw]
150 | input_target = input_target.permute(1, 0, 2, 3).contiguous().view(self.num_features, -1)
151 | else:
152 | input_source = input_source.permute(1, 0).contiguous().view(self.num_features, -1) # [c, bhw]
153 | input_target = input_target.permute(1, 0).contiguous().view(self.num_features, -1)
154 |
155 | cur_mean_source = torch.mean(input_source, dim=1).view(-1, 1)
156 | cur_var_source = torch.var(input_source, dim=1).view(-1, 1)
157 | cur_mean_target = torch.mean(input_target, dim=1).view(-1, 1)
158 | cur_var_target = torch.var(input_target, dim=1).view(-1, 1)
159 | if self.group > 1:
160 | cur_mean_source = cur_mean_source.view(c // self.group, self.group)
161 | cur_var_source = cur_var_source.view(c // self.group, self.group)
162 | cur_mean_target = cur_mean_target.view(c // self.group, self.group)
163 | cur_var_target = cur_var_target.view(c // self.group, self.group)
164 |
165 |
166 | if args.dist == 'l1':
167 | mean_dis_st = -1 * torch.abs(cur_mean_source - cur_mean_target.permute(1, 0).contiguous())
168 | mean_dis_ts = -1 * torch.abs(cur_mean_target - cur_mean_source.permute(1, 0).contiguous())
169 | var_dis_st = -1 * torch.abs(cur_var_source - cur_var_target.permute(1, 0).contiguous())
170 | var_dis_ts = -1 * torch.abs(cur_var_target - cur_var_source.permute(1, 0).contiguous())
171 |
172 | elif args.dist == 'l2':
173 | ## l2
174 | if self.group > 1:
175 | mean_s_l2, mean_t_l2, var_s_l2, var_t_l2 = cur_mean_source.mean(1, keepdim=True), cur_mean_target.mean(1, keepdim=True), cur_var_source.mean(1,keepdim=True), cur_var_target.mean(1,keepdim=True)
176 | else:
177 | mean_s_l2, mean_t_l2, var_s_l2, var_t_l2 = cur_mean_source, cur_mean_target, cur_var_source, cur_var_target
178 |
179 | mean_dis_st = -1 * torch.pow(mean_s_l2 - mean_t_l2.permute(1, 0).contiguous(), 2) #[c,1] - [1,c]
180 | mean_dis_ts = -1 * torch.pow(mean_t_l2 - mean_s_l2.permute(1, 0).contiguous(), 2) # [c,c]
181 | var_dis_st = -1 * torch.pow(var_s_l2 - var_t_l2.permute(1, 0).contiguous(), 2)
182 | var_dis_ts = -1 * torch.pow(var_t_l2 - var_s_l2.permute(1, 0).contiguous(), 2)
183 |
184 | elif args.dist == 'cosine':
185 | # cosine
186 | mean_dis_st = torch.matmul(cur_mean_source, cur_mean_target.t())
187 | mean_dis_ts = torch.matmul(cur_mean_target, cur_mean_source.t())
188 | var_dis_st = torch.matmul(cur_var_source, cur_var_target.t())
189 | var_dis_ts = torch.matmul(cur_var_target, cur_var_source.t())
190 |
191 | mean_pro_st = F.softmax(mean_dis_st,dim=1) #
192 | mean_pro_ts = F.softmax(mean_dis_ts,dim=1)
193 | var_pro_st = F.softmax(var_dis_st, dim=1) #
194 | var_pro_ts = F.softmax(var_dis_ts, dim=1)
195 |
196 | mean_s_in_t = torch.matmul(mean_pro_st, cur_mean_target) # [c//g,g]
197 | mean_t_in_s = torch.matmul(mean_pro_ts, cur_mean_source)
198 | var_s_in_t = torch.matmul(var_pro_st, cur_var_target)
199 | var_t_in_s = torch.matmul(var_pro_ts, cur_var_source)
200 |
201 | if self.group > 1:
202 | mean_s_in_t = mean_s_in_t.view(c,1)
203 | mean_t_in_s = mean_t_in_s.view(c,1)
204 | var_s_in_t = var_s_in_t.view(c, 1)
205 | var_t_in_s = var_t_in_s.view(c, 1)
206 | cur_mean_source = cur_mean_source.view(c, 1)
207 | cur_mean_target = cur_mean_target.view(c, 1)
208 | cur_var_source = cur_var_source.view(c, 1)
209 | cur_var_target = cur_var_target.view(c, 1)
210 |
211 | mean_source = self.my_mean_w_s * cur_mean_source + (1-self.my_mean_w_s) * mean_s_in_t # [c,1]
212 | mean_target = self.my_mean_w_t * cur_mean_target + (1-self.my_mean_w_t) * mean_t_in_s
213 | var_source = self.my_var_w_s * cur_var_source + (1-self.my_var_w_s) * var_s_in_t
214 | var_target = self.my_var_w_t * cur_var_target + (1-self.my_var_w_t) * var_t_in_s
215 |
216 | with torch.no_grad():
217 | self.running_mean_source = (1-self.momentum) * self.running_mean_source + self.momentum * mean_source.squeeze(1)
218 | self.running_mean_target = (1-self.momentum) * self.running_mean_target + self.momentum * mean_target.squeeze(1)
219 | self.running_var_source = (1-self.momentum) * self.running_var_source + self.momentum * var_source.squeeze(1)
220 | self.running_var_target = (1-self.momentum) * self.running_var_target + self.momentum * var_target.squeeze(1)
221 |
222 | z_source = (input_source - mean_source) / (var_source + self.eps).sqrt()
223 | z_target = (input_target - mean_target) / (var_target + self.eps).sqrt()
224 | if input.dim() == 4:
225 | gamma = self.weight.view(1,self.num_features,1,1)
226 | beta = self.bias.view(1,self.num_features,1,1)
227 | z_source, z_target = z_source.view(c, batch_size, h, w).permute(1,0,2,3).contiguous(), z_target.view(c, batch_size, h, w).permute(1,0,2,3).contiguous()
228 | else:
229 | gamma = self.weight.view(1, self.num_features)
230 | beta = self.bias.view(1, self.num_features)
231 | z_source, z_target = z_source.view(c, batch_size).permute(1,0).contiguous(), z_target.view(c, batch_size).permute(1,0).contiguous()
232 | z_source = gamma * z_source + beta
233 | z_target = gamma * z_target + beta
234 |
235 | z = torch.cat((z_source, z_target), dim=0)
236 | return z
237 |
238 | else: ##test mode
239 | z = F.batch_norm(
240 | input, self.running_mean_target, self.running_var_target, self.weight, self.bias,
241 | self.training or not self.track_running_stats, self.momentum, self.eps)
242 | return z
243 |
244 | def extra_repr(self):
245 | return '{num_features}, eps={eps}, momentum={momentum}, affine={affine}, ' \
246 | 'track_running_stats={track_running_stats}'.format(**self.__dict__)
247 |
248 | def _load_from_state_dict(self, state_dict, prefix, metadata, strict,
249 | missing_keys, unexpected_keys, error_msgs):
250 | version = metadata.get('version', None)
251 | if (version is None or version < 2) and self.track_running_stats:
252 | # at version 2: added num_batches_tracked buffer
253 | # this should have a default value of 0
254 | num_batches_tracked_key = prefix + 'num_batches_tracked'
255 | lamda_key = prefix + 'lamda'
256 | if num_batches_tracked_key not in state_dict:
257 | state_dict[num_batches_tracked_key] = torch.tensor(0, dtype=torch.long)
258 | # if lamda_key not in state_dict:
259 | # lamda = torch.Tensor(1).cuda()
260 | # lamda.data.fill_(0.1).long()
261 | # state_dict[lamda_key] = lamda
262 | # state_dict[lamda_key] = torch.tensor([0.1], dtype=torch.long)
263 |
264 | self._load_from_state_dict_from_pretrained_model(
265 | state_dict, prefix, metadata, strict,
266 | missing_keys, unexpected_keys, error_msgs)
267 |
268 |
269 | class RN1d(_ReciprocalNorm):
270 | r"""Applies Batch Normalization over a 2D or 3D input (a mini-batch of 1D
271 | inputs with optional additional channel dimension) as described in the paper
272 | `Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift`_ .
273 |
274 | .. math::
275 |
276 | y = \frac{x - \mathrm{E}[x]}{\sqrt{\mathrm{Var}[x] + \epsilon}} * \gamma + \beta
277 |
278 | The mean and standard-deviation are calculated per-dimension over
279 | the mini-batches and :math:`\gamma` and :math:`\beta` are learnable parameter vectors
280 | of size `C` (where `C` is the input size).
281 |
282 | By default, during training this layer keeps running estimates of its
283 | computed mean and variance, which are then used for normalization during
284 | evaluation. The running estimates are kept with a default :attr:`momentum`
285 | of 0.1.
286 |
287 | If :attr:`track_running_stats` is set to ``False``, this layer then does not
288 | keep running estimates, and batch statistics are instead used during
289 | evaluation time as well.
290 |
291 | .. note::
292 | This :attr:`momentum` argument is different from one used in optimizer
293 | classes and the conventional notion of momentum. Mathematically, the
294 | update rule for running statistics here is
295 | :math:`\hat{x}_\text{new} = (1 - \text{momentum}) \times \hat{x} + \text{momemtum} \times x_t`,
296 | where :math:`\hat{x}` is the estimated statistic and :math:`x_t` is the
297 | new observed value.
298 |
299 | Because the Batch Normalization is done over the `C` dimension, computing statistics
300 | on `(N, L)` slices, it's common terminology to call this Temporal Batch Normalization.
301 |
302 | Args:
303 | num_features: :math:`C` from an expected input of size
304 | :math:`(N, C, L)` or :math:`L` from input of size :math:`(N, L)`
305 | eps: a value added to the denominator for numerical stability.
306 | Default: 1e-5
307 | momentum: the value used for the running_mean and running_var
308 | computation. Can be set to ``None`` for cumulative moving average
309 | (i.e. simple average). Default: 0.1
310 | affine: a boolean value that when set to ``True``, this module has
311 | learnable affine parameters. Default: ``True``
312 | track_running_stats: a boolean value that when set to ``True``, this
313 | module tracks the running mean and variance, and when set to ``False``,
314 | this module does not track such statistics and always uses batch
315 | statistics in both training and eval modes. Default: ``True``
316 |
317 | Shape:
318 | - Input: :math:`(N, C)` or :math:`(N, C, L)`
319 | - Output: :math:`(N, C)` or :math:`(N, C, L)` (same shape as input)
320 |
321 | Examples::
322 |
323 | >>> # With Learnable Parameters
324 | >>> m = nn.BatchNorm1d(100)
325 | >>> # Without Learnable Parameters
326 | >>> m = nn.BatchNorm1d(100, affine=False)
327 | >>> input = torch.randn(20, 100)
328 | >>> output = m(input)
329 |
330 | .. _`Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift`:
331 | https://arxiv.org/abs/1502.03167
332 | """
333 |
334 | def _check_input_dim(self, input):
335 | if input.dim() != 2 and input.dim() != 3:
336 | raise ValueError('expected 2D or 3D input (got {}D input)'
337 | .format(input.dim()))
338 |
339 |
340 | class RN2d(_ReciprocalNorm):
341 | r"""Applies Batch Normalization over a 4D input (a mini-batch of 2D inputs
342 | with additional channel dimension) as described in the paper
343 | `Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift`_ .
344 |
345 | .. math::
346 |
347 | y = \frac{x - \mathrm{E}[x]}{ \sqrt{\mathrm{Var}[x] + \epsilon}} * \gamma + \beta
348 |
349 | The mean and standard-deviation are calculated per-dimension over
350 | the mini-batches and :math:`\gamma` and :math:`\beta` are learnable parameter vectors
351 | of size `C` (where `C` is the input size).
352 |
353 | By default, during training this layer keeps running estimates of its
354 | computed mean and variance, which are then used for normalization during
355 | evaluation. The running estimates are kept with a default :attr:`momentum`
356 | of 0.1.
357 |
358 | If :attr:`track_running_stats` is set to ``False``, this layer then does not
359 | keep running estimates, and batch statistics are instead used during
360 | evaluation time as well.
361 |
362 | .. note::
363 | This :attr:`momentum` argument is different from one used in optimizer
364 | classes and the conventional notion of momentum. Mathematically, the
365 | update rule for running statistics here is
366 | :math:`\hat{x}_\text{new} = (1 - \text{momentum}) \times \hat{x} + \text{momemtum} \times x_t`,
367 | where :math:`\hat{x}` is the estimated statistic and :math:`x_t` is the
368 | new observed value.
369 |
370 | Because the Batch Normalization is done over the `C` dimension, computing statistics
371 | on `(N, H, W)` slices, it's common terminology to call this Spatial Batch Normalization.
372 |
373 | Args:
374 | num_features: :math:`C` from an expected input of size
375 | :math:`(N, C, H, W)`
376 | eps: a value added to the denominator for numerical stability.
377 | Default: 1e-5
378 | momentum: the value used for the running_mean and running_var
379 | computation. Can be set to ``None`` for cumulative moving average
380 | (i.e. simple average). Default: 0.1
381 | affine: a boolean value that when set to ``True``, this module has
382 | learnable affine parameters. Default: ``True``
383 | track_running_stats: a boolean value that when set to ``True``, this
384 | module tracks the running mean and variance, and when set to ``False``,
385 | this module does not track such statistics and always uses batch
386 | statistics in both training and eval modes. Default: ``True``
387 |
388 | Shape:
389 | - Input: :math:`(N, C, H, W)`
390 | - Output: :math:`(N, C, H, W)` (same shape as input)
391 |
392 | Examples::
393 |
394 | >>> # With Learnable Parameters
395 | >>> m = nn.BatchNorm2d(100)
396 | >>> # Without Learnable Parameters
397 | >>> m = nn.BatchNorm2d(100, affine=False)
398 | >>> input = torch.randn(20, 100, 35, 45)
399 | >>> output = m(input)
400 |
401 | .. _`Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift`:
402 | https://arxiv.org/abs/1502.03167
403 | """
404 |
405 | def _check_input_dim(self, input):
406 | if input.dim() != 4:
407 | raise ValueError('expected 4D input (got {}D input)'
408 | .format(input.dim()))
409 |
410 |
411 | class RN3d(_ReciprocalNorm):
412 | r"""Applies Batch Normalization over a 5D input (a mini-batch of 3D inputs
413 | with additional channel dimension) as described in the paper
414 | `Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift`_ .
415 |
416 | .. math::
417 |
418 | y = \frac{x - \mathrm{E}[x]}{ \sqrt{\mathrm{Var}[x] + \epsilon}} * \gamma + \beta
419 |
420 | The mean and standard-deviation are calculated per-dimension over
421 | the mini-batches and :math:`\gamma` and :math:`\beta` are learnable parameter vectors
422 | of size `C` (where `C` is the input size).
423 |
424 | By default, during training this layer keeps running estimates of its
425 | computed mean and variance, which are then used for normalization during
426 | evaluation. The running estimates are kept with a default :attr:`momentum`
427 | of 0.1.
428 |
429 | If :attr:`track_running_stats` is set to ``False``, this layer then does not
430 | keep running estimates, and batch statistics are instead used during
431 | evaluation time as well.
432 |
433 | .. note::
434 | This :attr:`momentum` argument is different from one used in optimizer
435 | classes and the conventional notion of momentum. Mathematically, the
436 | update rule for running statistics here is
437 | :math:`\hat{x}_\text{new} = (1 - \text{momentum}) \times \hat{x} + \text{momemtum} \times x_t`,
438 | where :math:`\hat{x}` is the estimated statistic and :math:`x_t` is the
439 | new observed value.
440 |
441 | Because the Batch Normalization is done over the `C` dimension, computing statistics
442 | on `(N, D, H, W)` slices, it's common terminology to call this Volumetric Batch Normalization
443 | or Spatio-temporal Batch Normalization.
444 |
445 | Args:
446 | num_features: :math:`C` from an expected input of size
447 | :math:`(N, C, D, H, W)`
448 | eps: a value added to the denominator for numerical stability.
449 | Default: 1e-5
450 | momentum: the value used for the running_mean and running_var
451 | computation. Can be set to ``None`` for cumulative moving average
452 | (i.e. simple average). Default: 0.1
453 | affine: a boolean value that when set to ``True``, this module has
454 | learnable affine parameters. Default: ``True``
455 | track_running_stats: a boolean value that when set to ``True``, this
456 | module tracks the running mean and variance, and when set to ``False``,
457 | this module does not track such statistics and always uses batch
458 | statistics in both training and eval modes. Default: ``True``
459 |
460 | Shape:
461 | - Input: :math:`(N, C, D, H, W)`
462 | - Output: :math:`(N, C, D, H, W)` (same shape as input)
463 |
464 | Examples::
465 |
466 | >>> # With Learnable Parameters
467 | >>> m = nn.BatchNorm3d(100)
468 | >>> # Without Learnable Parameters
469 | >>> m = nn.BatchNorm3d(100, affine=False)
470 | >>> input = torch.randn(20, 100, 35, 45, 10)
471 | >>> output = m(input)
472 |
473 | .. _`Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift`:
474 | https://arxiv.org/abs/1502.03167
475 | """
476 |
477 | def _check_input_dim(self, input):
478 | if input.dim() != 5:
479 | raise ValueError('expected 5D input (got {}D input)'
480 | .format(input.dim()))
481 |
--------------------------------------------------------------------------------