├── .gitignore ├── README.md ├── create_data_lists.py ├── datasets.py ├── detect.py ├── eval.py ├── json ├── clipart │ ├── TEST_images.json │ ├── TEST_objects.json │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── clipart_dt │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── clipart_dt_pl │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── comic │ ├── TEST_images.json │ ├── TEST_objects.json │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── comic_dt │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── comic_dt_pl │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── p_clipart_dt │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── p_clipart_dt_pl │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── p_comic_dt │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── p_comic_dt_pl │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── p_watercolor_dt │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── p_watercolor_dt_pl │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── watercolor │ ├── TEST_images.json │ ├── TEST_objects.json │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── watercolor_dt │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json └── watercolor_dt_pl │ ├── TRAIN_images.json │ ├── TRAIN_objects.json │ └── label_map.json ├── lib ├── label_file.py ├── test.xml └── voc_io.py ├── model.py ├── pseudo_label.py ├── train.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.tar 2 | .vscode 3 | *__pycache__ 4 | *.ipynb* 5 | *.zip 6 | /dataset 7 | test* 8 | result/ 9 | output/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ```bash 2 | python3 pseudo_label.py --root ./dataset/clipart --data_folder ./json/clipart/ --result ./dataset/clipart_dt_pl/ --gpu 7 --checkpoint result/clipart_dt.pth.tar 3 | 4 | python3 eval.py --data_folder ./json/clipart/ --checkpoint result/clipart.tar --gpu 7 5 | 6 | python3 train.py --data_folder ./json/clipart_dt_pl/ --checkpoint result/clipart_dt.pth.tar --gpu 5 --iteration 20000 --result result/clipart_dt_pl 7 | ``` 8 | 9 | | | clipart | watercolor | comic | 10 | | :------: | :-----: | :--------: | :---: | 11 | | baseline | 28.5 | 50.1 | 23.1 | 12 | | ideal case | 53.3 | 59.1 | 44.7 | 13 | | DT | 35.8 | 46.3 | 29.3 | 14 | | DT + PL | 45.1 | 56.9 | 39.7 | 15 | | P_DT | 37.1 | 49.4 | 29.9 | 16 | | P_DT + PL | 45.7 | 57.6 | 39.9 | 17 | 18 | P_DT implies joint training with PASCAL VOC dataset 19 | -------------------------------------------------------------------------------- /create_data_lists.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from utils import create_data_lists 3 | 4 | 5 | if __name__ == '__main__': 6 | parser = argparse.ArgumentParser() 7 | parser.add_argument('--data_type', required=True, choices=('clipart', 'watercolor', 'comic')) 8 | parser.add_argument('--train_type', required=True, choices=('ideal', 'dt', 'dt_pl', 'p_dt', 'p_dt_pl')) 9 | args = parser.parse_args() 10 | 11 | if args.train_type == 'ideal': 12 | voc07_path1 = f'./dataset/{args.data_type}' 13 | voc07_path2 = voc12_path1 = voc12_path2 = None 14 | output_folder = f'./json/{args.data_type}' 15 | elif args.train_type in ('dt', 'p_dt'): 16 | voc07_path1 = f'./dataset/{args.data_type}_dt/VOC2007' 17 | voc12_path1 = f'./dataset/{args.data_type}_dt/VOC2012' 18 | voc07_path2 = voc12_path2 = None 19 | output_folder = f'./json/{args.data_type}_dt' 20 | elif args.train_type in ('dt_pl', 'p_dt_pl'): 21 | voc07_path1 = f'./dataset/{args.data_type}_dt_pl' 22 | voc07_path2 = voc12_path1 = voc12_path2 = None 23 | output_folder = f'./json/{args.data_type}_dt_pl' 24 | if 'p' == args.train_type[0]: 25 | voc07_path2 = './dataset/pascal_voc/VOC2007' 26 | voc12_path2 = './dataset/pascal_voc/VOC2012' 27 | output_folder = output_folder[:7] + 'p_' + output_folder[7:] 28 | 29 | # print(voc07_path1) 30 | # print(voc07_path2) 31 | # print(voc12_path1) 32 | # print(voc12_path2) 33 | # print(output_folder) 34 | 35 | create_data_lists(voc07_path1=voc07_path1, 36 | voc07_path2=voc07_path2, 37 | voc12_path1=voc12_path1, 38 | voc12_path2=voc12_path2, 39 | output_folder=output_folder, 40 | type=args.train_type) 41 | -------------------------------------------------------------------------------- /datasets.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.utils.data import Dataset 3 | import json 4 | import os 5 | from PIL import Image 6 | from utils import transform 7 | 8 | 9 | class PascalVOCDataset(Dataset): 10 | """ 11 | A PyTorch Dataset class to be used in a PyTorch DataLoader to create batches. 12 | """ 13 | 14 | def __init__(self, data_folder, split, keep_difficult=False): 15 | """ 16 | :param data_folder: folder where data files are stored 17 | :param split: split, one of 'TRAIN' or 'TEST' 18 | :param keep_difficult: keep or discard objects that are considered difficult to detect? 19 | """ 20 | self.split = split.upper() 21 | 22 | assert self.split in {'TRAIN', 'TEST'} 23 | 24 | self.data_folder = data_folder 25 | self.keep_difficult = keep_difficult 26 | 27 | # Read data files 28 | with open(os.path.join(data_folder, self.split + '_images.json'), 'r') as j: 29 | self.images = json.load(j) 30 | with open(os.path.join(data_folder, self.split + '_objects.json'), 'r') as j: 31 | self.objects = json.load(j) 32 | 33 | assert len(self.images) == len(self.objects) 34 | 35 | def __getitem__(self, i): 36 | # Read image 37 | image = Image.open(self.images[i], mode='r') 38 | image = image.convert('RGB') 39 | 40 | # Read objects in this image (bounding boxes, labels, difficulties) 41 | objects = self.objects[i] 42 | boxes = torch.FloatTensor(objects['boxes']) # (n_objects, 4) 43 | labels = torch.LongTensor(objects['labels']) # (n_objects) 44 | difficulties = torch.ByteTensor(objects['difficulties']) # (n_objects) 45 | 46 | # Discard difficult objects, if desired 47 | if not self.keep_difficult: 48 | boxes = boxes[1 - difficulties] 49 | labels = labels[1 - difficulties] 50 | difficulties = difficulties[1 - difficulties] 51 | 52 | # Apply transformations 53 | image, boxes, labels, difficulties = transform(image, boxes, labels, difficulties, split=self.split) 54 | 55 | return image, boxes, labels, difficulties 56 | 57 | def __len__(self): 58 | return len(self.images) 59 | 60 | def collate_fn(self, batch): 61 | """ 62 | Since each image may have a different number of objects, we need a collate function (to be passed to the DataLoader). 63 | 64 | This describes how to combine these tensors of different sizes. We use lists. 65 | 66 | Note: this need not be defined in this Class, can be standalone. 67 | 68 | :param batch: an iterable of N sets from __getitem__() 69 | :return: a tensor of images, lists of varying-size tensors of bounding boxes, labels, and difficulties 70 | """ 71 | 72 | images = list() 73 | boxes = list() 74 | labels = list() 75 | difficulties = list() 76 | 77 | for b in batch: 78 | images.append(b[0]) 79 | boxes.append(b[1]) 80 | labels.append(b[2]) 81 | difficulties.append(b[3]) 82 | 83 | images = torch.stack(images, dim=0) 84 | 85 | return images, boxes, labels, difficulties # tensor (N, 3, 300, 300), 3 lists of N tensors each 86 | -------------------------------------------------------------------------------- /detect.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import argparse 3 | import warnings 4 | import os 5 | 6 | from torchvision import transforms 7 | from utils import rev_label_map, label_color_map, label_map 8 | from PIL import Image, ImageDraw, ImageFont 9 | from matplotlib import pyplot as plt 10 | from model import SSD300 11 | from tqdm import tqdm 12 | 13 | 14 | def detect(original_image, min_score, max_overlap, top_k, device, suppress=None): 15 | """ 16 | Detect objects in an image with a trained SSD300, and visualize the results. 17 | 18 | :param original_image: image, a PIL Image 19 | :param min_score: minimum threshold for a detected box to be considered a match for a certain class 20 | :param max_overlap: maximum overlap two boxes can have so that the one with the lower score is not suppressed via Non-Maximum Suppression (NMS) 21 | :param top_k: if there are a lot of resulting detection across all classes, keep only the top 'k' 22 | :param suppress: classes that you know for sure cannot be in the image or you do not want in the image, a list 23 | :return: annotated image, a PIL Image 24 | """ 25 | 26 | # Transform 27 | image = normalize(to_tensor(resize(original_image))) 28 | 29 | # Move to default device 30 | image = image.to(device) 31 | 32 | # Forward prop. 33 | predicted_locs, predicted_scores = model(image.unsqueeze(0)) 34 | 35 | # Detect objects in SSD output 36 | det_boxes, det_labels, det_scores = model.detect_objects(predicted_locs, predicted_scores, min_score=min_score, 37 | max_overlap=max_overlap, top_k=top_k, device=device) 38 | 39 | # Move detections to the CPU 40 | det_boxes = det_boxes[0].to('cpu') 41 | 42 | # Transform to original image dimensions 43 | original_dims = torch.FloatTensor( 44 | [original_image.width, original_image.height, original_image.width, original_image.height]).unsqueeze(0) 45 | det_boxes = det_boxes * original_dims 46 | 47 | # Decode class integer labels 48 | det_labels = [rev_label_map[l] for l in det_labels[0].to('cpu').tolist()] 49 | 50 | # If no objects found, the detected labels will be set to ['0.'], i.e. ['background'] in SSD300.detect_objects() in model.py 51 | if det_labels == ['background']: 52 | # Just return original image 53 | return original_image 54 | 55 | # Annotate 56 | annotated_image = original_image 57 | draw = ImageDraw.Draw(annotated_image) 58 | font = ImageFont.truetype("LiberationSansNarrow-Bold.ttf", 15) 59 | 60 | # Suppress specific classes, if needed 61 | for i in range(det_boxes.size(0)): 62 | if suppress is not None: 63 | if det_labels[i] in suppress: 64 | continue 65 | 66 | # Boxes 67 | box_location = det_boxes[i].tolist() 68 | draw.rectangle(xy=box_location, outline=label_color_map[det_labels[i]]) 69 | draw.rectangle(xy=[l + 1. for l in box_location], outline=label_color_map[ 70 | det_labels[i]]) # a second rectangle at an offset of 1 pixel to increase line thickness 71 | # draw.rectangle(xy=[l + 2. for l in box_location], outline=label_color_map[ 72 | # det_labels[i]]) # a third rectangle at an offset of 1 pixel to increase line thickness 73 | # draw.rectangle(xy=[l + 3. for l in box_location], outline=label_color_map[ 74 | # det_labels[i]]) # a fourth rectangle at an offset of 1 pixel to increase line thickness 75 | 76 | # Text 77 | text_size = font.getsize(det_labels[i].upper()) 78 | text_location = [box_location[0] + 2., box_location[1]] 79 | textbox_location = [box_location[0], box_location[1] + text_size[1], box_location[0] + text_size[0] + 4., 80 | box_location[1]] 81 | draw.rectangle(xy=textbox_location, fill=label_color_map[det_labels[i]]) 82 | draw.text(xy=text_location, text=det_labels[i].upper(), fill='white', 83 | font=font) 84 | del draw 85 | 86 | return annotated_image 87 | 88 | 89 | if __name__ == '__main__': 90 | warnings.filterwarnings('ignore') 91 | parser = argparse.ArgumentParser() 92 | parser.add_argument('--checkpoint', help='path of the pretrained model', required=True) 93 | parser.add_argument('--gpu', type=int, default=0) 94 | args = parser.parse_args() 95 | 96 | os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu) # 防止预训练模型被加载到gpu0上 97 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 98 | 99 | if args.checkpoint == 'pretrained_ssd300.pth.tar': 100 | checkpoint = torch.load(args.checkpoint, map_location=device) 101 | model = checkpoint['model'] 102 | for m in model.modules(): 103 | if 'Conv' in str(type(m)): 104 | setattr(m, 'padding_mode', 'zeros') 105 | else: 106 | model = SSD300(n_classes=len(label_map), device=device) 107 | if args.checkpoint: 108 | checkpoint = torch.load(args.checkpoint, map_location=device) 109 | model.load_state_dict(checkpoint['model']) 110 | 111 | model = model.to(device) 112 | model.eval() 113 | 114 | # Transforms 115 | resize = transforms.Resize((300, 300)) 116 | to_tensor = transforms.ToTensor() 117 | normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], 118 | std=[0.229, 0.224, 0.225]) 119 | 120 | img_path = './dataset/clipart/JPEGImages/10776780.jpg' 121 | original_image = Image.open(img_path, mode='r') 122 | original_image = original_image.convert('RGB') 123 | img = detect(original_image, min_score=0.2, max_overlap=0.5, top_k=200, device=device) 124 | img.save('output.jpg') 125 | img.close() 126 | -------------------------------------------------------------------------------- /eval.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import argparse 3 | import warnings 4 | import os 5 | 6 | from utils import calculate_mAP 7 | from datasets import PascalVOCDataset 8 | from pprint import PrettyPrinter 9 | from model import SSD300 10 | from utils import label_map 11 | from tqdm import tqdm 12 | 13 | 14 | def evaluate(test_loader, model, device): 15 | """ 16 | Evaluate. 17 | 18 | :param test_loader: DataLoader for test data 19 | :param model: model 20 | """ 21 | 22 | # Make sure it's in eval mode 23 | model.eval() 24 | 25 | # Lists to store detected and true boxes, labels, scores 26 | det_boxes = list() 27 | det_labels = list() 28 | det_scores = list() 29 | true_boxes = list() 30 | true_labels = list() 31 | true_difficulties = list() # it is necessary to know which objects are 'difficult', see 'calculate_mAP' in utils.py 32 | 33 | with torch.no_grad(): 34 | # Batches 35 | for i, (images, boxes, labels, difficulties) in enumerate(tqdm(test_loader, desc='evaluating model')): 36 | images = images.to(device) # (N, 3, 300, 300) 37 | 38 | # Forward prop. 39 | predicted_locs, predicted_scores = model(images) 40 | 41 | # Detect objects in SSD output 42 | det_boxes_batch, det_labels_batch, det_scores_batch = model.detect_objects(predicted_locs, predicted_scores, 43 | min_score=0.01, max_overlap=0.45, 44 | top_k=200, device=device) 45 | # Evaluation MUST be at min_score=0.01, max_overlap=0.45, top_k=200 for fair comparision with the paper's results and other repos 46 | 47 | # Store this batch's results for mAP calculation 48 | boxes = [b.to(device) for b in boxes] 49 | labels = [l.to(device) for l in labels] 50 | difficulties = [d.to(device) for d in difficulties] 51 | 52 | det_boxes.extend(det_boxes_batch) 53 | det_labels.extend(det_labels_batch) 54 | det_scores.extend(det_scores_batch) 55 | true_boxes.extend(boxes) 56 | true_labels.extend(labels) 57 | true_difficulties.extend(difficulties) 58 | 59 | # Calculate mAP 60 | print('calculating mAP...') 61 | APs, mAP = calculate_mAP(det_boxes, det_labels, det_scores, true_boxes, true_labels, true_difficulties, device) 62 | 63 | return APs, mAP 64 | 65 | 66 | if __name__ == '__main__': 67 | warnings.filterwarnings('ignore') 68 | 69 | parser = argparse.ArgumentParser() 70 | parser.add_argument('--data_folder', required=True) 71 | parser.add_argument('--checkpoint', help='path of the pretrained model', required=True) 72 | parser.add_argument('--batch_size', type=int, default=32) 73 | parser.add_argument('--gpu', type=int, default=0) 74 | args = parser.parse_args() 75 | 76 | # Good formatting when printing the APs for each class and mAP 77 | pp = PrettyPrinter() 78 | 79 | # Parameters 80 | keep_difficult = True # difficult ground truth objects must always be considered in mAP calculation, because these objects DO exist! 81 | workers = 4 82 | os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu) # 防止预训练模型被加载到gpu0上 83 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 84 | 85 | # Load model checkpoint that is to be evaluated 86 | if args.checkpoint == 'pretrained_ssd300.pth.tar': 87 | checkpoint = torch.load(args.checkpoint, map_location=device) 88 | model = checkpoint['model'] 89 | for m in model.modules(): 90 | if 'Conv' in str(type(m)): 91 | setattr(m, 'padding_mode', 'zeros') 92 | else: 93 | checkpoint = torch.load(args.checkpoint, map_location=device) 94 | model = SSD300(n_classes=len(label_map), device=device) 95 | model.load_state_dict(checkpoint['model']) 96 | 97 | model = model.to(device) 98 | # Switch to eval mode 99 | model.eval() 100 | 101 | # Load test data 102 | test_dataset = PascalVOCDataset(args.data_folder, 103 | split='test', 104 | keep_difficult=keep_difficult) 105 | test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, 106 | collate_fn=test_dataset.collate_fn, num_workers=workers, pin_memory=True) 107 | 108 | APs, mAP = evaluate(test_loader, model, device) 109 | 110 | # Print AP for each class 111 | pp.pprint(APs) 112 | print('\nMean Average Precision (mAP): %.3f' % mAP) 113 | -------------------------------------------------------------------------------- /json/clipart/TEST_images.json: -------------------------------------------------------------------------------- 1 | ["/data/chenfan/CDD/dataset/clipart/JPEGImages/984061861.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/250444150.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/977714986.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/900653777.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/392565807.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/717994158.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/288801295.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/461801337.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/642420100.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/785162636.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/853021914.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/591441588.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/659278177.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/800896301.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/389386258.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/308640976.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/648549411.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/808118710.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/80239027.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/985928764.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/455923173.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/531541538.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/922151610.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/528070616.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/125196156.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/250458926.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/530444472.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/878728577.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/882054078.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/484437126.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/49637470.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/906200169.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/474044400.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/516322275.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/904289995.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/161625329.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/352436190.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/755244283.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/905970045.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/627175756.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/839803721.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/635047070.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/378298317.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/58982565.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/101434689.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/367760602.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/574879210.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/137590443.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/689312946.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/951877763.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/333750753.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/736989021.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/278269401.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/532966446.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/909530758.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/766554308.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/244987939.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/802759644.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/679824075.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/719785473.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/711639827.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/82183397.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/130541569.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/969659092.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/363846781.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/157913704.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/238611995.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/32062364.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/588512642.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/761536280.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/180162001.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/827745318.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/725972116.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/365289927.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/626518687.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/449566508.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/528597177.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/14783686.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/944189628.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/673710174.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/567040628.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/477649768.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/910469356.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/857048916.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/280956806.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/628871545.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/80518337.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/142670304.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/958394158.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/366268721.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/428706709.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/607459295.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/44452569.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/118378507.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/986476160.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/567179813.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/117799313.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/592301017.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/288423470.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/265247670.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/586462748.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/318010106.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/397708780.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/575262860.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/998789393.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/655138674.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/549301096.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/149212779.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/775258139.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/738773822.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/616003168.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/881278985.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/529410954.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/59647922.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/561817613.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/758742031.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/633684592.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/176615987.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/802786056.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/901864925.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/83363463.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/387850319.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/574934199.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/306108257.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/363764070.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/831199240.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/994627511.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/886520972.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/88418728.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/442914135.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/457072286.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/307184534.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/258384245.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/232555401.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/231410085.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/201800020.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/985879950.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/714476558.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/84603130.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/603308039.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/853938634.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/281384169.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/849031494.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/725285535.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/63950905.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/962402592.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/424326572.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/423801757.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/249221002.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/68894053.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/370935691.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/914155930.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/251963504.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/951692035.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/96946298.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/800210279.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/722214553.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/646143279.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/860720941.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/996186070.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/43147676.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/450070519.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/842708753.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/865313168.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/840162628.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/87584798.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/280990598.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/664528263.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/757193552.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/170978779.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/313218447.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/538490100.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/18177672.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/916700383.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/443723348.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/149299948.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/39908473.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/774525357.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/763499528.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/233291860.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/917436676.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/885113305.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/435881913.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/880812906.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/606820152.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/147310995.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/99906268.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/558695503.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/132858229.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/683510377.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/751286420.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/881941707.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/16170368.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/957318031.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/353826451.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/418109653.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/691195026.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/243903597.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/349554188.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/487622748.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/95896680.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/748093750.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/558859187.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/717318456.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/355792201.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/331776560.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/290865447.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/897161600.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/194476053.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/568176390.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/889116279.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/350816999.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/3417179.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/530220689.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/343068655.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/427052664.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/950099351.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/237579483.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/16297344.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/474106058.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/11642343.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/651792991.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/830531377.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/846903424.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/316111064.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/878925719.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/20590882.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/316414995.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/257458588.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/299642795.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/755014547.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/53141183.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/519667263.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/40778804.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/503134531.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/781865089.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/898416590.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/884627658.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/973313274.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/146551239.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/124334477.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/733149230.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/281525952.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/887495043.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/597222494.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/506464566.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/726187251.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/986312817.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/34627046.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/546012296.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/132480784.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/987485457.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/145322427.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/152802436.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/780138737.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/908162406.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/655037693.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/47537035.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/152581000.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/447417311.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/590221841.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/145312282.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/954040015.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/94298870.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/697177762.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/528266282.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/996916252.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/451074320.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/503520407.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/902914507.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/73179515.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/564247117.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/62141604.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/640292549.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/744267142.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/396551308.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/125088898.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/785922213.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/914001303.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/950653536.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/780081623.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/842263487.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/184193092.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/566419832.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/534269432.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/370940208.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/264566369.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/251345590.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/515992498.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/755259615.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/5515950.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/412422765.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/187302388.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/375390294.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/282178954.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/552983721.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/559369083.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/458937561.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/17428418.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/258816198.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/897401114.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/611499981.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/822939167.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/527957724.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/841841083.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/252947588.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/879089.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/287966608.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/526797081.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/515732431.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/993089425.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/372051115.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/833474981.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/109577302.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/541389661.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/811007480.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/283156164.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/204567849.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/381069291.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/37419737.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/270780357.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/540645591.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/635622880.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/220601462.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/801121944.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/189843928.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/30461107.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/774535323.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/121122122.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/181470768.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/598478630.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/630137876.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/401642763.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/229605446.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/793592690.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/336528301.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/802085264.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/749221510.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/107068280.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/551851242.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/466121466.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/226941573.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/732571108.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/543852200.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/70085576.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/659763746.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/786434022.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/682486682.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/100480023.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/91462156.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/613734243.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/30164736.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/627380763.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/666947309.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/845964705.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/608763196.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/107747623.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/701104872.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/933370524.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/362478335.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/655169211.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/876997011.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/119908030.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/106114666.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/810321206.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/94971290.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/812356940.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/997079193.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/31169964.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/204208163.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/410444836.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/364231129.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/376653920.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/689902720.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/932969927.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/160866192.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/613312700.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/721344849.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/602617862.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/438642599.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/490527627.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/61673561.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/990342473.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/914117217.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/664923357.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/572442795.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/352047631.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/497177309.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/800001668.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/95584260.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/832402132.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/729196514.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/877523400.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/498679800.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/282304657.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/728863400.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/64823022.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/192768689.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/440292816.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/419223489.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/959087837.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/664826391.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/559216943.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/161539916.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/582596532.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/527926627.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/450327547.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/293783977.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/452391415.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/377456470.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/377544345.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/35349058.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/207229248.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/710476598.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/70413879.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/642305540.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/133738916.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/680925375.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/709785013.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/340641079.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/519227348.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/902667863.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/562351184.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/895827994.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/782455776.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/70680011.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/668615918.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/247717500.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/936364966.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/815401986.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/35127301.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/938637468.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/114299883.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/895448699.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/182278542.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/762803045.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/307791003.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/483053567.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/629922764.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/863882175.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/422740693.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/89495702.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/93131755.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/902364152.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/240510420.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/238879654.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/902251112.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/237877045.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/761936588.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/83638947.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/993091279.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/815481856.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/231106003.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/373132681.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/261018216.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/473421665.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/326656150.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/779814830.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/917696762.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/259395368.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/689426358.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/385009781.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/61604641.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/523393827.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/13750464.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/644956214.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/870287761.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/466387451.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/233442215.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/855603393.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/946330535.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/931052585.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/21331180.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/225715251.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/830069042.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/265602897.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/453479298.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/347444347.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/124781945.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/679655274.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/144516855.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/507930711.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/229996878.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/951626775.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/877663528.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/711449796.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/96249866.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/483830272.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/669702932.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/598436770.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/786896956.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/922997283.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/692312065.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/807344156.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/738914025.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/97694036.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/845566474.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/470666228.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/812891805.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/991391645.jpg"] -------------------------------------------------------------------------------- /json/clipart/TRAIN_images.json: -------------------------------------------------------------------------------- 1 | ["/data/chenfan/CDD/dataset/clipart/JPEGImages/322583407.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/109865693.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/215078434.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/907960721.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/948932178.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/739578249.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/540160185.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/421478533.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/991075107.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/355558482.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/723607462.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/573846079.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/689237352.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/31130150.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/618155022.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/648679986.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/588503196.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/889085632.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/634963381.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/943535520.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/424271143.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/338694270.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/255113979.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/863510537.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/450628343.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/65898699.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/376202444.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/469282307.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/660323710.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/309678899.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/997522752.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/88851689.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/95413917.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/965597745.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/442756342.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/964622093.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/625426787.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/949261371.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/944986721.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/779228478.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/795048308.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/61545482.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/67317686.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/589254973.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/628079218.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/128656245.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/542030607.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/264457192.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/458165835.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/862236710.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/530352789.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/454030675.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/303081599.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/882123981.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/65937167.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/914219333.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/945172213.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/556518808.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/562953923.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/366647435.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/758092926.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/245724091.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/508821889.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/763226828.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/206081749.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/6348244.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/110547945.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/366549266.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/405144309.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/277425310.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/934936308.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/953730990.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/828395946.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/235728003.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/536291023.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/919939006.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/732955060.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/39978136.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/444585557.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/658116312.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/756056758.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/624301397.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/353042758.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/415187365.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/114059165.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/43468109.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/862616176.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/967743350.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/423895287.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/659935739.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/807916835.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/944214959.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/778558351.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/622428551.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/421209407.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/439118472.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/263643954.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/887409463.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/554665762.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/683716161.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/898753613.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/685358983.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/380963330.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/474642911.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/178952357.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/376933769.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/23197298.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/997040607.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/238070737.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/245839430.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/897568536.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/479165233.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/212587086.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/634473336.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/204471182.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/512795671.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/388486768.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/51587825.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/674836320.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/929590049.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/347189074.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/341721150.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/438325748.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/611583011.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/264890967.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/837350864.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/993318133.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/299367783.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/318862223.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/430400955.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/537837683.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/966973145.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/381551428.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/520593933.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/50627425.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/307331102.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/184425055.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/599829011.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/783743139.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/219524480.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/622144286.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/897119509.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/397479763.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/385294651.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/34081042.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/385489895.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/169416385.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/308591053.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/836001696.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/703254263.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/966517027.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/252705387.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/408115685.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/904194626.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/747509053.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/933297360.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/67983918.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/678271157.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/570680729.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/366198578.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/217113880.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/180961797.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/456836465.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/932401008.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/673304248.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/431777465.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/265871898.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/899348307.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/739799038.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/20176324.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/182665743.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/238790370.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/901210977.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/571187999.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/428037539.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/174170157.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/427135242.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/980777200.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/276964385.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/404261942.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/51660068.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/740064864.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/793297748.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/75474041.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/612455869.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/185613574.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/355714681.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/903774860.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/862523623.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/948367627.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/751595045.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/672741831.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/26795375.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/720127306.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/752914582.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/318034392.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/97473702.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/963910471.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/762789932.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/468663805.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/799867053.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/925011904.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/150911612.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/196451231.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/633487790.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/917158091.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/690466665.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/455953665.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/930431962.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/788430780.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/578714344.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/695799532.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/81743977.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/438692624.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/246481045.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/241813558.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/818340649.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/212536775.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/914794025.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/756409701.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/429467464.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/249477155.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/269287610.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/75025618.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/854099487.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/579645834.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/652164336.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/579503167.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/68395696.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/184157131.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/883867750.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/484544378.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/10922720.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/969743078.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/650070399.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/643205604.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/791357468.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/980585093.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/783686769.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/298525165.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/271907284.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/475521435.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/163095367.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/275585186.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/311183045.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/445694656.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/563088282.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/817610170.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/938074390.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/886607519.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/134807737.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/681633840.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/387489575.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/583588632.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/517323143.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/792417684.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/178338594.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/825841692.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/929544067.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/361887977.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/868938040.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/476110110.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/597826578.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/452390379.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/939804523.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/687262012.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/638895507.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/623629360.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/62132777.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/448216960.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/461654235.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/884933593.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/831099990.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/78125327.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/289563669.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/659518634.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/612423595.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/921922336.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/515164755.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/170272424.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/204225442.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/456137389.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/344840185.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/731946884.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/653094319.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/708473745.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/321632189.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/504559662.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/449711944.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/322988147.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/396789756.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/972736690.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/742967958.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/157371067.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/561780320.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/53811429.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/959437541.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/384035065.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/956391442.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/727859977.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/497841530.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/798383686.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/974368218.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/355651382.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/742516934.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/127481031.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/802641395.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/857955364.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/200520871.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/4106294.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/595053713.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/998730868.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/98358458.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/105805337.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/359198110.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/147520936.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/375778174.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/392955612.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/537392142.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/699449269.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/469655607.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/958622627.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/365317539.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/686054915.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/707993500.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/812909465.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/373278655.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/197940411.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/317796761.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/847697853.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/946714712.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/936719640.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/949623571.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/373129633.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/355042105.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/411493688.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/807496373.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/824239946.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/839100887.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/987223427.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/952301408.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/28305920.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/422910020.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/864080407.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/500468739.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/321976676.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/353634347.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/720700952.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/253851904.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/569113493.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/290626684.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/51448674.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/493357007.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/236527913.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/300123897.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/175572902.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/415550179.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/676710796.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/305679588.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/958455952.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/867689267.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/70295965.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/975325068.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/911034431.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/262950620.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/10776780.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/302859877.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/41307477.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/855806753.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/828693016.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/509949459.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/157141718.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/667813451.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/693330351.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/881998197.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/513310047.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/936245061.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/750963535.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/108952811.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/309860915.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/588839056.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/770475287.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/958320288.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/275184687.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/617238677.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/358846629.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/618735045.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/524162455.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/638923993.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/183462376.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/157738797.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/729100485.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/762604432.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/116421295.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/178264927.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/666275902.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/142939031.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/921026232.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/964211956.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/910626282.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/251085745.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/388348338.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/29866543.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/223517933.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/298868943.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/30949285.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/247559531.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/520764090.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/363251547.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/928501505.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/456338315.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/752313794.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/161523933.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/455171081.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/740166127.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/675104106.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/497192843.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/581235144.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/86747589.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/230797331.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/823981652.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/865318734.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/82022323.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/366484191.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/238735901.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/339821464.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/593960838.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/952898723.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/475572794.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/475141895.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/55499101.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/173580853.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/869094305.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/741034212.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/7837071.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/385932076.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/451695258.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/885273733.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/650570429.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/933800141.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/977082581.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/687010315.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/439826968.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/295396706.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/25494016.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/732270771.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/90182683.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/355832123.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/487442246.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/417176176.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/26184499.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/637649869.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/270468597.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/823421389.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/783263504.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/233816486.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/809302708.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/883741602.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/818938846.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/431430299.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/374348980.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/728271457.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/177315313.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/223513402.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/424413044.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/720653787.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/360756727.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/716746805.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/775850948.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/500559596.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/594525768.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/962247381.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/990612772.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/560537310.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/802967496.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/246775504.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/602790155.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/428959113.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/379872252.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/49494746.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/48044400.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/37298846.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/127873079.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/137880835.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/137029162.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/686304189.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/557215199.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/891240248.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/598819873.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/627845159.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/522518206.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/162090519.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/589225556.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/773036492.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/153248650.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/656824325.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/377242118.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/890989899.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/728008803.jpg", "/data/chenfan/CDD/dataset/clipart/JPEGImages/147057799.jpg"] -------------------------------------------------------------------------------- /json/clipart/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/clipart_dt/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/clipart_dt_pl/TRAIN_images.json: -------------------------------------------------------------------------------- 1 | ["/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/322583407.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/109865693.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/215078434.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/907960721.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/948932178.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/739578249.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/540160185.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/421478533.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/991075107.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/355558482.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/723607462.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/573846079.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/689237352.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/31130150.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/618155022.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/648679986.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/588503196.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/889085632.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/634963381.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/943535520.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/424271143.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/338694270.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/255113979.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/863510537.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/450628343.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/65898699.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/376202444.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/469282307.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/660323710.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/309678899.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/997522752.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/88851689.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/95413917.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/965597745.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/442756342.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/964622093.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/625426787.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/949261371.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/944986721.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/779228478.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/795048308.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/61545482.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/67317686.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/589254973.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/628079218.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/128656245.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/542030607.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/264457192.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/458165835.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/862236710.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/530352789.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/454030675.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/303081599.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/882123981.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/65937167.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/914219333.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/945172213.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/556518808.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/562953923.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/366647435.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/758092926.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/245724091.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/508821889.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/763226828.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/206081749.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/6348244.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/110547945.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/366549266.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/405144309.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/277425310.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/934936308.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/953730990.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/828395946.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/235728003.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/536291023.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/919939006.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/732955060.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/39978136.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/444585557.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/658116312.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/756056758.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/624301397.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/353042758.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/415187365.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/114059165.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/43468109.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/862616176.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/967743350.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/423895287.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/659935739.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/807916835.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/944214959.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/778558351.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/622428551.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/421209407.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/439118472.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/263643954.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/887409463.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/554665762.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/683716161.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/898753613.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/685358983.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/380963330.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/474642911.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/178952357.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/376933769.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/23197298.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/997040607.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/238070737.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/245839430.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/897568536.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/479165233.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/212587086.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/634473336.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/204471182.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/512795671.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/388486768.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/51587825.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/674836320.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/929590049.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/347189074.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/341721150.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/438325748.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/611583011.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/264890967.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/837350864.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/993318133.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/299367783.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/318862223.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/430400955.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/537837683.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/966973145.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/381551428.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/520593933.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/50627425.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/307331102.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/184425055.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/599829011.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/783743139.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/219524480.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/622144286.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/897119509.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/397479763.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/385294651.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/34081042.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/385489895.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/169416385.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/308591053.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/836001696.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/703254263.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/966517027.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/252705387.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/408115685.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/904194626.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/747509053.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/933297360.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/67983918.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/678271157.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/570680729.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/366198578.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/217113880.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/180961797.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/456836465.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/932401008.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/673304248.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/431777465.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/265871898.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/899348307.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/739799038.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/20176324.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/182665743.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/238790370.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/901210977.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/571187999.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/428037539.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/174170157.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/427135242.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/980777200.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/276964385.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/404261942.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/51660068.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/740064864.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/793297748.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/75474041.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/612455869.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/185613574.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/355714681.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/903774860.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/862523623.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/948367627.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/751595045.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/672741831.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/26795375.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/720127306.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/752914582.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/318034392.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/97473702.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/963910471.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/762789932.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/468663805.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/799867053.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/925011904.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/150911612.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/196451231.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/633487790.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/917158091.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/690466665.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/455953665.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/930431962.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/788430780.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/578714344.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/695799532.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/81743977.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/438692624.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/246481045.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/241813558.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/818340649.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/914794025.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/756409701.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/429467464.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/249477155.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/269287610.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/75025618.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/854099487.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/579645834.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/652164336.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/579503167.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/68395696.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/184157131.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/883867750.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/484544378.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/10922720.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/969743078.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/650070399.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/643205604.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/791357468.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/980585093.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/783686769.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/298525165.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/271907284.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/475521435.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/163095367.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/275585186.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/311183045.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/445694656.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/563088282.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/817610170.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/938074390.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/886607519.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/134807737.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/681633840.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/387489575.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/583588632.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/517323143.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/792417684.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/178338594.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/825841692.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/929544067.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/361887977.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/868938040.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/476110110.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/597826578.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/452390379.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/939804523.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/638895507.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/623629360.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/62132777.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/448216960.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/461654235.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/884933593.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/831099990.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/78125327.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/289563669.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/659518634.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/612423595.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/921922336.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/515164755.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/170272424.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/204225442.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/456137389.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/344840185.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/731946884.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/653094319.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/708473745.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/321632189.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/504559662.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/449711944.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/322988147.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/396789756.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/972736690.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/742967958.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/157371067.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/561780320.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/53811429.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/959437541.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/384035065.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/727859977.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/497841530.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/798383686.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/974368218.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/355651382.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/742516934.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/127481031.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/802641395.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/857955364.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/200520871.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/4106294.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/595053713.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/998730868.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/98358458.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/105805337.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/359198110.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/147520936.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/392955612.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/537392142.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/699449269.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/469655607.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/958622627.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/365317539.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/686054915.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/707993500.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/812909465.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/373278655.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/197940411.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/317796761.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/847697853.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/946714712.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/936719640.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/949623571.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/373129633.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/355042105.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/411493688.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/807496373.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/824239946.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/839100887.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/987223427.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/952301408.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/28305920.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/422910020.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/864080407.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/500468739.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/321976676.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/353634347.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/720700952.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/253851904.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/569113493.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/290626684.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/51448674.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/493357007.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/236527913.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/300123897.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/175572902.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/415550179.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/676710796.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/305679588.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/958455952.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/867689267.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/70295965.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/975325068.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/911034431.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/262950620.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/10776780.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/302859877.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/41307477.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/855806753.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/828693016.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/509949459.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/157141718.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/667813451.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/693330351.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/881998197.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/513310047.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/936245061.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/750963535.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/108952811.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/309860915.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/588839056.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/770475287.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/958320288.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/275184687.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/617238677.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/358846629.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/618735045.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/524162455.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/638923993.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/183462376.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/157738797.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/729100485.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/762604432.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/116421295.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/178264927.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/666275902.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/142939031.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/921026232.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/964211956.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/910626282.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/251085745.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/388348338.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/29866543.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/223517933.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/298868943.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/30949285.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/247559531.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/520764090.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/363251547.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/928501505.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/456338315.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/752313794.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/161523933.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/455171081.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/740166127.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/675104106.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/497192843.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/581235144.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/86747589.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/230797331.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/823981652.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/865318734.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/82022323.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/366484191.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/238735901.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/339821464.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/593960838.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/952898723.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/475572794.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/475141895.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/55499101.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/173580853.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/869094305.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/741034212.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/7837071.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/385932076.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/451695258.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/885273733.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/650570429.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/933800141.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/977082581.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/687010315.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/439826968.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/295396706.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/25494016.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/732270771.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/90182683.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/355832123.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/487442246.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/417176176.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/26184499.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/637649869.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/270468597.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/823421389.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/783263504.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/233816486.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/809302708.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/883741602.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/818938846.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/431430299.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/374348980.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/728271457.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/177315313.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/223513402.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/424413044.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/720653787.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/360756727.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/716746805.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/775850948.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/500559596.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/594525768.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/962247381.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/990612772.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/560537310.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/802967496.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/246775504.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/602790155.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/428959113.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/379872252.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/49494746.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/48044400.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/37298846.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/127873079.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/137880835.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/137029162.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/686304189.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/557215199.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/891240248.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/598819873.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/627845159.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/522518206.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/162090519.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/589225556.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/773036492.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/153248650.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/656824325.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/377242118.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/890989899.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/728008803.jpg", "/data/chenfan/CDD/dataset/clipart_dt_pl/JPEGImages/147057799.jpg"] -------------------------------------------------------------------------------- /json/clipart_dt_pl/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/comic/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/comic_dt/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/comic_dt_pl/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/p_clipart_dt/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/p_clipart_dt_pl/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/p_comic_dt/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/p_comic_dt_pl/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/p_watercolor_dt/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/p_watercolor_dt_pl/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/watercolor/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/watercolor_dt/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /json/watercolor_dt_pl/label_map.json: -------------------------------------------------------------------------------- 1 | {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4, "bottle": 5, "bus": 6, "car": 7, "cat": 8, "chair": 9, "cow": 10, "diningtable": 11, "dog": 12, "horse": 13, "motorbike": 14, "person": 15, "pottedplant": 16, "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20, "background": 0} -------------------------------------------------------------------------------- /lib/label_file.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2016 Tzutalin 2 | # Create by TzuTaLin 3 | 4 | import os 5 | 6 | import cv2 7 | 8 | from lib.voc_io import PascalVocWriter 9 | 10 | 11 | class LabelFileError(Exception): 12 | pass 13 | 14 | 15 | class LabelFile(object): 16 | # It might be changed as window creates 17 | suffix = '.lif' 18 | 19 | def __init__(self, filename, imagePath, classes): 20 | assert(os.path.exists(imagePath)) 21 | self.shapes = () 22 | self.classes = classes 23 | self.imagePath = imagePath 24 | self.filename = filename 25 | image = cv2.imread(imagePath, cv2.IMREAD_COLOR) 26 | self.imageShape = image.shape 27 | 28 | def savePascalVocFormat(self, dets): 29 | imgFolderPath = os.path.dirname(self.imagePath) 30 | imgFolderName = os.path.split(imgFolderPath)[-1] 31 | imgFileName = os.path.basename(self.imagePath) 32 | imgFileNameWithoutExt = os.path.splitext(imgFileName)[0] 33 | # Read from file path because self.imageData might be empty if saving to 34 | # Pascal format 35 | writer = PascalVocWriter(imgFolderName, imgFileNameWithoutExt, 36 | self.imageShape, localImgPath=self.imagePath) 37 | 38 | for cls in self.classes: 39 | for bbox in dets[cls]: 40 | bbox = self.prettifyBndBox(bbox) 41 | writer.addBndBox(bbox[0], bbox[1], bbox[2], bbox[3], cls) 42 | 43 | writer.save(targetFile=self.filename) 44 | return 45 | 46 | def toggleVerify(self): 47 | self.verified = not self.verified 48 | 49 | def prettifyBndBox(self, bbox): 50 | xmin, ymin, xmax, ymax = bbox 51 | height, width, _ = self.imageShape 52 | if xmin < 1: 53 | xmin = 1 54 | 55 | if ymin < 1: 56 | ymin = 1 57 | 58 | if xmax > width: 59 | xmax = width 60 | 61 | if ymax > height: 62 | ymax = height 63 | 64 | return (int(xmin), int(ymin), int(xmax), int(ymax)) 65 | 66 | @staticmethod 67 | def isLabelFile(filename): 68 | fileSuffix = os.path.splitext(filename)[1].lower() 69 | return fileSuffix == LabelFile.suffix 70 | 71 | 72 | if __name__ == '__main__': 73 | imgPath = "../output.jpg" 74 | 75 | filename = 'test.xml' 76 | dets = {'car': [(1, 1, 2, 3)], 'dog': [(5, 5, 5, 5)]} 77 | # print(labeler.convertPoints2BndBox([(-1, 6), (5, 3)])) 78 | classes = ('car', 'dog') 79 | labeler = LabelFile(filename, imgPath, classes) 80 | labeler.savePascalVocFormat(dets) 81 | -------------------------------------------------------------------------------- /lib/test.xml: -------------------------------------------------------------------------------- 1 | 2 | .. 3 | output 4 | ../output.jpg 5 | 6 | Unknown 7 | 8 | 9 | 640 10 | 480 11 | 3 12 | 13 | 0 14 | 15 | car 16 | Unspecified 17 | 0 18 | 0 19 | 20 | 1 21 | 1 22 | 2 23 | 3 24 | 25 | 26 | 27 | dog 28 | Unspecified 29 | 0 30 | 0 31 | 32 | 5 33 | 5 34 | 5 35 | 5 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /lib/voc_io.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | 3 | """ 4 | This is originally from labelImg 5 | https://github.com/tzutalin/labelImg/blob/23a9d2d9755d6c5f23ad3705d19914ddf2c83204/libs/pascal_voc_io.py 6 | """ 7 | 8 | from xml.etree import ElementTree 9 | from xml.etree.ElementTree import Element, SubElement 10 | from lxml import etree 11 | import codecs 12 | 13 | XML_EXT = '.xml' 14 | 15 | 16 | class PascalVocWriter: 17 | 18 | def __init__(self, foldername, filename, imgSize, databaseSrc='Unknown', localImgPath=None): 19 | self.foldername = foldername 20 | self.filename = filename 21 | self.databaseSrc = databaseSrc 22 | self.imgSize = imgSize 23 | self.boxlist = [] 24 | self.localImgPath = localImgPath 25 | self.verified = False 26 | 27 | def prettify(self, elem): 28 | """ 29 | Return a pretty-printed XML string for the Element. 30 | """ 31 | rough_string = ElementTree.tostring(elem, 'utf8') 32 | root = etree.fromstring(rough_string) 33 | return etree.tostring(root, pretty_print=True) 34 | # return etree.tostring(root, pretty_print=False) 35 | 36 | def genXML(self): 37 | """ 38 | Return XML root 39 | """ 40 | # Check conditions 41 | if self.filename is None or \ 42 | self.foldername is None or \ 43 | self.imgSize is None: 44 | return None 45 | 46 | top = Element('annotation') 47 | # top.set('verified', 'yes' if self.verified else 'no') 48 | 49 | folder = SubElement(top, 'folder') 50 | folder.text = self.foldername 51 | 52 | filename = SubElement(top, 'filename') 53 | filename.text = self.filename 54 | 55 | localImgPath = SubElement(top, 'path') 56 | localImgPath.text = self.localImgPath 57 | 58 | source = SubElement(top, 'source') 59 | database = SubElement(source, 'database') 60 | database.text = self.databaseSrc 61 | 62 | size_part = SubElement(top, 'size') 63 | width = SubElement(size_part, 'width') 64 | height = SubElement(size_part, 'height') 65 | depth = SubElement(size_part, 'depth') 66 | width.text = str(self.imgSize[1]) 67 | height.text = str(self.imgSize[0]) 68 | if len(self.imgSize) == 3: 69 | depth.text = str(self.imgSize[2]) 70 | else: 71 | depth.text = '1' 72 | 73 | segmented = SubElement(top, 'segmented') 74 | segmented.text = '0' 75 | return top 76 | 77 | def addBndBox(self, xmin, ymin, xmax, ymax, name): 78 | bndbox = {'xmin': xmin, 'ymin': ymin, 'xmax': xmax, 'ymax': ymax} 79 | bndbox['name'] = name 80 | self.boxlist.append(bndbox) 81 | 82 | def appendObjects(self, top): 83 | for each_object in self.boxlist: 84 | object_item = SubElement(top, 'object') 85 | name = SubElement(object_item, 'name') 86 | try: 87 | name.text = unicode(each_object['name']) 88 | except NameError: 89 | # Py3: NameError: name 'unicode' is not defined 90 | name.text = each_object['name'] 91 | pose = SubElement(object_item, 'pose') 92 | pose.text = "Unspecified" 93 | truncated = SubElement(object_item, 'truncated') 94 | truncated.text = "0" 95 | difficult = SubElement(object_item, 'difficult') 96 | difficult.text = "0" 97 | bndbox = SubElement(object_item, 'bndbox') 98 | xmin = SubElement(bndbox, 'xmin') 99 | xmin.text = str(each_object['xmin']) 100 | ymin = SubElement(bndbox, 'ymin') 101 | ymin.text = str(each_object['ymin']) 102 | xmax = SubElement(bndbox, 'xmax') 103 | xmax.text = str(each_object['xmax']) 104 | ymax = SubElement(bndbox, 'ymax') 105 | ymax.text = str(each_object['ymax']) 106 | 107 | def save(self, targetFile=None): 108 | root = self.genXML() 109 | self.appendObjects(root) 110 | out_file = None 111 | if targetFile is None: 112 | out_file = codecs.open( 113 | self.filename + XML_EXT, 'w', encoding='utf-8') 114 | else: 115 | out_file = codecs.open(targetFile, 'w', encoding='utf-8') 116 | 117 | prettifyResult = self.prettify(root) 118 | out_file.write(prettifyResult.decode('utf8')) 119 | out_file.close() 120 | 121 | 122 | class PascalVocReader: 123 | 124 | def __init__(self, filepath): 125 | # shapes type: 126 | # [labbel, [(x1,y1), (x2,y2), (x3,y3), (x4,y4)], color, color] 127 | self.shapes = [] 128 | self.filepath = filepath 129 | self.verified = False 130 | self.parseXML() 131 | 132 | def getShapes(self): 133 | return self.shapes 134 | 135 | def addShape(self, label, bndbox): 136 | xmin = int(bndbox.find('xmin').text) 137 | ymin = int(bndbox.find('ymin').text) 138 | xmax = int(bndbox.find('xmax').text) 139 | ymax = int(bndbox.find('ymax').text) 140 | # points = [(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)] 141 | self.shapes.append((label, (xmin, ymin, xmax, ymax))) 142 | 143 | def parseXML(self): 144 | assert self.filepath.endswith('.xml'), "Unsupport file format" 145 | parser = etree.XMLParser(encoding='utf-8') 146 | xmltree = ElementTree.parse(self.filepath, parser=parser).getroot() 147 | try: 148 | verified = xmltree.attrib['verified'] 149 | if verified == 'yes': 150 | self.verified = True 151 | except KeyError: 152 | self.verified = False 153 | 154 | for object_iter in xmltree.findall('object'): 155 | bndbox = object_iter.find("bndbox") 156 | label = object_iter.find('name').text 157 | self.addShape(label, bndbox) 158 | return True 159 | -------------------------------------------------------------------------------- /model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | from torch import nn 4 | from utils import decimate, cxcy_to_xy, gcxgcy_to_cxcy, cxcy_to_gcxgcy, xy_to_cxcy, find_jaccard_overlap 5 | import torch.nn.functional as F 6 | from math import sqrt 7 | # from itertools import product as product 8 | import torchvision 9 | 10 | 11 | class VGGBase(nn.Module): 12 | """ 13 | VGG base convolutions to produce lower-level feature maps. 14 | """ 15 | 16 | def __init__(self): 17 | super(VGGBase, self).__init__() 18 | 19 | # Standard convolutional layers in VGG16 20 | self.conv1_1 = nn.Conv2d(3, 64, kernel_size=3, padding=1) # stride = 1, by default 21 | self.conv1_2 = nn.Conv2d(64, 64, kernel_size=3, padding=1) 22 | self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2) 23 | 24 | self.conv2_1 = nn.Conv2d(64, 128, kernel_size=3, padding=1) 25 | self.conv2_2 = nn.Conv2d(128, 128, kernel_size=3, padding=1) 26 | self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2) 27 | 28 | self.conv3_1 = nn.Conv2d(128, 256, kernel_size=3, padding=1) 29 | self.conv3_2 = nn.Conv2d(256, 256, kernel_size=3, padding=1) 30 | self.conv3_3 = nn.Conv2d(256, 256, kernel_size=3, padding=1) 31 | self.pool3 = nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True) # ceiling (not floor) here for even dims 32 | 33 | self.conv4_1 = nn.Conv2d(256, 512, kernel_size=3, padding=1) 34 | self.conv4_2 = nn.Conv2d(512, 512, kernel_size=3, padding=1) 35 | self.conv4_3 = nn.Conv2d(512, 512, kernel_size=3, padding=1) 36 | self.pool4 = nn.MaxPool2d(kernel_size=2, stride=2) 37 | 38 | self.conv5_1 = nn.Conv2d(512, 512, kernel_size=3, padding=1) 39 | self.conv5_2 = nn.Conv2d(512, 512, kernel_size=3, padding=1) 40 | self.conv5_3 = nn.Conv2d(512, 512, kernel_size=3, padding=1) 41 | self.pool5 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1) # retains size because stride is 1 (and padding) 42 | 43 | # Replacements for FC6 and FC7 in VGG16 44 | self.conv6 = nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6) # atrous convolution 45 | 46 | self.conv7 = nn.Conv2d(1024, 1024, kernel_size=1) 47 | 48 | # Load pretrained layers 49 | self.load_pretrained_layers() 50 | 51 | def forward(self, image): 52 | """ 53 | Forward propagation. 54 | 55 | :param image: images, a tensor of dimensions (N, 3, 300, 300) 56 | :return: lower-level feature maps conv4_3 and conv7 57 | """ 58 | out = F.relu(self.conv1_1(image)) # (N, 64, 300, 300) 59 | out = F.relu(self.conv1_2(out)) # (N, 64, 300, 300) 60 | out = self.pool1(out) # (N, 64, 150, 150) 61 | 62 | out = F.relu(self.conv2_1(out)) # (N, 128, 150, 150) 63 | out = F.relu(self.conv2_2(out)) # (N, 128, 150, 150) 64 | out = self.pool2(out) # (N, 128, 75, 75) 65 | 66 | out = F.relu(self.conv3_1(out)) # (N, 256, 75, 75) 67 | out = F.relu(self.conv3_2(out)) # (N, 256, 75, 75) 68 | out = F.relu(self.conv3_3(out)) # (N, 256, 75, 75) 69 | out = self.pool3(out) # (N, 256, 38, 38), it would have been 37 if not for ceil_mode = True 70 | 71 | out = F.relu(self.conv4_1(out)) # (N, 512, 38, 38) 72 | out = F.relu(self.conv4_2(out)) # (N, 512, 38, 38) 73 | out = F.relu(self.conv4_3(out)) # (N, 512, 38, 38) 74 | conv4_3_feats = out # (N, 512, 38, 38) 75 | out = self.pool4(out) # (N, 512, 19, 19) 76 | 77 | out = F.relu(self.conv5_1(out)) # (N, 512, 19, 19) 78 | out = F.relu(self.conv5_2(out)) # (N, 512, 19, 19) 79 | out = F.relu(self.conv5_3(out)) # (N, 512, 19, 19) 80 | out = self.pool5(out) # (N, 512, 19, 19), pool5 does not reduce dimensions 81 | 82 | out = F.relu(self.conv6(out)) # (N, 1024, 19, 19) 83 | 84 | conv7_feats = F.relu(self.conv7(out)) # (N, 1024, 19, 19) 85 | 86 | # Lower-level feature maps 87 | return conv4_3_feats, conv7_feats 88 | 89 | def load_pretrained_layers(self): 90 | """ 91 | As in the paper, we use a VGG-16 pretrained on the ImageNet task as the base network. 92 | There's one available in PyTorch, see https://pytorch.org/docs/stable/torchvision/models.html#torchvision.models.vgg16 93 | We copy these parameters into our network. It's straightforward for conv1 to conv5. 94 | However, the original VGG-16 does not contain the conv6 and con7 layers. 95 | Therefore, we convert fc6 and fc7 into convolutional layers, and subsample by decimation. See 'decimate' in utils.py. 96 | """ 97 | # Current state of base 98 | state_dict = self.state_dict() 99 | param_names = list(state_dict.keys()) 100 | 101 | # Pretrained VGG base 102 | pretrained_state_dict = torchvision.models.vgg16(pretrained=False).state_dict() 103 | pretrained_param_names = list(pretrained_state_dict.keys()) 104 | 105 | # Transfer conv. parameters from pretrained model to current model 106 | for i, param in enumerate(param_names[:-4]): # excluding conv6 and conv7 parameters 107 | state_dict[param] = pretrained_state_dict[pretrained_param_names[i]] 108 | 109 | # Convert fc6, fc7 to convolutional layers, and subsample (by decimation) to sizes of conv6 and conv7 110 | # fc6 111 | conv_fc6_weight = pretrained_state_dict['classifier.0.weight'].view(4096, 512, 7, 7) # (4096, 512, 7, 7) 112 | conv_fc6_bias = pretrained_state_dict['classifier.0.bias'] # (4096) 113 | state_dict['conv6.weight'] = decimate(conv_fc6_weight, m=[4, None, 3, 3]) # (1024, 512, 3, 3) 114 | state_dict['conv6.bias'] = decimate(conv_fc6_bias, m=[4]) # (1024) 115 | # fc7 116 | conv_fc7_weight = pretrained_state_dict['classifier.3.weight'].view(4096, 4096, 1, 1) # (4096, 4096, 1, 1) 117 | conv_fc7_bias = pretrained_state_dict['classifier.3.bias'] # (4096) 118 | state_dict['conv7.weight'] = decimate(conv_fc7_weight, m=[4, 4, None, None]) # (1024, 1024, 1, 1) 119 | state_dict['conv7.bias'] = decimate(conv_fc7_bias, m=[4]) # (1024) 120 | 121 | # Note: an FC layer of size (K) operating on a flattened version (C*H*W) of a 2D image of size (C, H, W)... 122 | # ...is equivalent to a convolutional layer with kernel size (H, W), input channels C, output channels K... 123 | # ...operating on the 2D image of size (C, H, W) without padding 124 | 125 | self.load_state_dict(state_dict) 126 | 127 | print("\nLoaded base model.\n") 128 | 129 | 130 | class AuxiliaryConvolutions(nn.Module): 131 | """ 132 | Additional convolutions to produce higher-level feature maps. 133 | """ 134 | 135 | def __init__(self): 136 | super(AuxiliaryConvolutions, self).__init__() 137 | 138 | # Auxiliary/additional convolutions on top of the VGG base 139 | self.conv8_1 = nn.Conv2d(1024, 256, kernel_size=1, padding=0) # stride = 1, by default 140 | self.conv8_2 = nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1) # dim. reduction because stride > 1 141 | 142 | self.conv9_1 = nn.Conv2d(512, 128, kernel_size=1, padding=0) 143 | self.conv9_2 = nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1) # dim. reduction because stride > 1 144 | 145 | self.conv10_1 = nn.Conv2d(256, 128, kernel_size=1, padding=0) 146 | self.conv10_2 = nn.Conv2d(128, 256, kernel_size=3, padding=0) # dim. reduction because padding = 0 147 | 148 | self.conv11_1 = nn.Conv2d(256, 128, kernel_size=1, padding=0) 149 | self.conv11_2 = nn.Conv2d(128, 256, kernel_size=3, padding=0) # dim. reduction because padding = 0 150 | 151 | # Initialize convolutions' parameters 152 | self.init_conv2d() 153 | 154 | def init_conv2d(self): 155 | """ 156 | Initialize convolution parameters. 157 | """ 158 | for c in self.children(): 159 | if isinstance(c, nn.Conv2d): 160 | nn.init.xavier_uniform_(c.weight) 161 | nn.init.constant_(c.bias, 0.) 162 | 163 | def forward(self, conv7_feats): 164 | """ 165 | Forward propagation. 166 | 167 | :param conv7_feats: lower-level conv7 feature map, a tensor of dimensions (N, 1024, 19, 19) 168 | :return: higher-level feature maps conv8_2, conv9_2, conv10_2, and conv11_2 169 | """ 170 | out = F.relu(self.conv8_1(conv7_feats)) # (N, 256, 19, 19) 171 | out = F.relu(self.conv8_2(out)) # (N, 512, 10, 10) 172 | conv8_2_feats = out # (N, 512, 10, 10) 173 | 174 | out = F.relu(self.conv9_1(out)) # (N, 128, 10, 10) 175 | out = F.relu(self.conv9_2(out)) # (N, 256, 5, 5) 176 | conv9_2_feats = out # (N, 256, 5, 5) 177 | 178 | out = F.relu(self.conv10_1(out)) # (N, 128, 5, 5) 179 | out = F.relu(self.conv10_2(out)) # (N, 256, 3, 3) 180 | conv10_2_feats = out # (N, 256, 3, 3) 181 | 182 | out = F.relu(self.conv11_1(out)) # (N, 128, 3, 3) 183 | conv11_2_feats = F.relu(self.conv11_2(out)) # (N, 256, 1, 1) 184 | 185 | # Higher-level feature maps 186 | return conv8_2_feats, conv9_2_feats, conv10_2_feats, conv11_2_feats 187 | 188 | 189 | class PredictionConvolutions(nn.Module): 190 | """ 191 | Convolutions to predict class scores and bounding boxes using lower and higher-level feature maps. 192 | 193 | The bounding boxes (locations) are predicted as encoded offsets w.r.t each of the 8732 prior (default) boxes. 194 | See 'cxcy_to_gcxgcy' in utils.py for the encoding definition. 195 | 196 | The class scores represent the scores of each object class in each of the 8732 bounding boxes located. 197 | A high score for 'background' = no object. 198 | """ 199 | 200 | def __init__(self, n_classes): 201 | """ 202 | :param n_classes: number of different types of objects 203 | """ 204 | super(PredictionConvolutions, self).__init__() 205 | 206 | self.n_classes = n_classes 207 | 208 | # Number of prior-boxes we are considering per position in each feature map 209 | n_boxes = {'conv4_3': 4, 210 | 'conv7': 6, 211 | 'conv8_2': 6, 212 | 'conv9_2': 6, 213 | 'conv10_2': 4, 214 | 'conv11_2': 4} 215 | # 4 prior-boxes implies we use 4 different aspect ratios, etc. 216 | 217 | # Localization prediction convolutions (predict offsets w.r.t prior-boxes) 218 | self.loc_conv4_3 = nn.Conv2d(512, n_boxes['conv4_3'] * 4, kernel_size=3, padding=1) 219 | self.loc_conv7 = nn.Conv2d(1024, n_boxes['conv7'] * 4, kernel_size=3, padding=1) 220 | self.loc_conv8_2 = nn.Conv2d(512, n_boxes['conv8_2'] * 4, kernel_size=3, padding=1) 221 | self.loc_conv9_2 = nn.Conv2d(256, n_boxes['conv9_2'] * 4, kernel_size=3, padding=1) 222 | self.loc_conv10_2 = nn.Conv2d(256, n_boxes['conv10_2'] * 4, kernel_size=3, padding=1) 223 | self.loc_conv11_2 = nn.Conv2d(256, n_boxes['conv11_2'] * 4, kernel_size=3, padding=1) 224 | 225 | # Class prediction convolutions (predict classes in localization boxes) 226 | self.cl_conv4_3 = nn.Conv2d(512, n_boxes['conv4_3'] * n_classes, kernel_size=3, padding=1) 227 | self.cl_conv7 = nn.Conv2d(1024, n_boxes['conv7'] * n_classes, kernel_size=3, padding=1) 228 | self.cl_conv8_2 = nn.Conv2d(512, n_boxes['conv8_2'] * n_classes, kernel_size=3, padding=1) 229 | self.cl_conv9_2 = nn.Conv2d(256, n_boxes['conv9_2'] * n_classes, kernel_size=3, padding=1) 230 | self.cl_conv10_2 = nn.Conv2d(256, n_boxes['conv10_2'] * n_classes, kernel_size=3, padding=1) 231 | self.cl_conv11_2 = nn.Conv2d(256, n_boxes['conv11_2'] * n_classes, kernel_size=3, padding=1) 232 | 233 | # Initialize convolutions' parameters 234 | self.init_conv2d() 235 | 236 | def init_conv2d(self): 237 | """ 238 | Initialize convolution parameters. 239 | """ 240 | for c in self.children(): 241 | if isinstance(c, nn.Conv2d): 242 | nn.init.xavier_uniform_(c.weight) 243 | nn.init.constant_(c.bias, 0.) 244 | 245 | def forward(self, conv4_3_feats, conv7_feats, conv8_2_feats, conv9_2_feats, conv10_2_feats, conv11_2_feats): 246 | """ 247 | Forward propagation. 248 | 249 | :param conv4_3_feats: conv4_3 feature map, a tensor of dimensions (N, 512, 38, 38) 250 | :param conv7_feats: conv7 feature map, a tensor of dimensions (N, 1024, 19, 19) 251 | :param conv8_2_feats: conv8_2 feature map, a tensor of dimensions (N, 512, 10, 10) 252 | :param conv9_2_feats: conv9_2 feature map, a tensor of dimensions (N, 256, 5, 5) 253 | :param conv10_2_feats: conv10_2 feature map, a tensor of dimensions (N, 256, 3, 3) 254 | :param conv11_2_feats: conv11_2 feature map, a tensor of dimensions (N, 256, 1, 1) 255 | :return: 8732 locations and class scores (i.e. w.r.t each prior box) for each image 256 | """ 257 | batch_size = conv4_3_feats.size(0) 258 | 259 | # Predict localization boxes' bounds (as offsets w.r.t prior-boxes) 260 | l_conv4_3 = self.loc_conv4_3(conv4_3_feats) # (N, 16, 38, 38) 261 | l_conv4_3 = l_conv4_3.permute(0, 2, 3, 262 | 1).contiguous() # (N, 38, 38, 16), to match prior-box order (after .view()) 263 | # (.contiguous() ensures it is stored in a contiguous chunk of memory, needed for .view() below) 264 | l_conv4_3 = l_conv4_3.view(batch_size, -1, 4) # (N, 5776, 4), there are a total 5776 boxes on this feature map 265 | 266 | l_conv7 = self.loc_conv7(conv7_feats) # (N, 24, 19, 19) 267 | l_conv7 = l_conv7.permute(0, 2, 3, 1).contiguous() # (N, 19, 19, 24) 268 | l_conv7 = l_conv7.view(batch_size, -1, 4) # (N, 2166, 4), there are a total 2116 boxes on this feature map 269 | 270 | l_conv8_2 = self.loc_conv8_2(conv8_2_feats) # (N, 24, 10, 10) 271 | l_conv8_2 = l_conv8_2.permute(0, 2, 3, 1).contiguous() # (N, 10, 10, 24) 272 | l_conv8_2 = l_conv8_2.view(batch_size, -1, 4) # (N, 600, 4) 273 | 274 | l_conv9_2 = self.loc_conv9_2(conv9_2_feats) # (N, 24, 5, 5) 275 | l_conv9_2 = l_conv9_2.permute(0, 2, 3, 1).contiguous() # (N, 5, 5, 24) 276 | l_conv9_2 = l_conv9_2.view(batch_size, -1, 4) # (N, 150, 4) 277 | 278 | l_conv10_2 = self.loc_conv10_2(conv10_2_feats) # (N, 16, 3, 3) 279 | l_conv10_2 = l_conv10_2.permute(0, 2, 3, 1).contiguous() # (N, 3, 3, 16) 280 | l_conv10_2 = l_conv10_2.view(batch_size, -1, 4) # (N, 36, 4) 281 | 282 | l_conv11_2 = self.loc_conv11_2(conv11_2_feats) # (N, 16, 1, 1) 283 | l_conv11_2 = l_conv11_2.permute(0, 2, 3, 1).contiguous() # (N, 1, 1, 16) 284 | l_conv11_2 = l_conv11_2.view(batch_size, -1, 4) # (N, 4, 4) 285 | 286 | # Predict classes in localization boxes 287 | c_conv4_3 = self.cl_conv4_3(conv4_3_feats) # (N, 4 * n_classes, 38, 38) 288 | c_conv4_3 = c_conv4_3.permute(0, 2, 3, 289 | 1).contiguous() # (N, 38, 38, 4 * n_classes), to match prior-box order (after .view()) 290 | c_conv4_3 = c_conv4_3.view(batch_size, -1, 291 | self.n_classes) # (N, 5776, n_classes), there are a total 5776 boxes on this feature map 292 | 293 | c_conv7 = self.cl_conv7(conv7_feats) # (N, 6 * n_classes, 19, 19) 294 | c_conv7 = c_conv7.permute(0, 2, 3, 1).contiguous() # (N, 19, 19, 6 * n_classes) 295 | c_conv7 = c_conv7.view(batch_size, -1, 296 | self.n_classes) # (N, 2166, n_classes), there are a total 2116 boxes on this feature map 297 | 298 | c_conv8_2 = self.cl_conv8_2(conv8_2_feats) # (N, 6 * n_classes, 10, 10) 299 | c_conv8_2 = c_conv8_2.permute(0, 2, 3, 1).contiguous() # (N, 10, 10, 6 * n_classes) 300 | c_conv8_2 = c_conv8_2.view(batch_size, -1, self.n_classes) # (N, 600, n_classes) 301 | 302 | c_conv9_2 = self.cl_conv9_2(conv9_2_feats) # (N, 6 * n_classes, 5, 5) 303 | c_conv9_2 = c_conv9_2.permute(0, 2, 3, 1).contiguous() # (N, 5, 5, 6 * n_classes) 304 | c_conv9_2 = c_conv9_2.view(batch_size, -1, self.n_classes) # (N, 150, n_classes) 305 | 306 | c_conv10_2 = self.cl_conv10_2(conv10_2_feats) # (N, 4 * n_classes, 3, 3) 307 | c_conv10_2 = c_conv10_2.permute(0, 2, 3, 1).contiguous() # (N, 3, 3, 4 * n_classes) 308 | c_conv10_2 = c_conv10_2.view(batch_size, -1, self.n_classes) # (N, 36, n_classes) 309 | 310 | c_conv11_2 = self.cl_conv11_2(conv11_2_feats) # (N, 4 * n_classes, 1, 1) 311 | c_conv11_2 = c_conv11_2.permute(0, 2, 3, 1).contiguous() # (N, 1, 1, 4 * n_classes) 312 | c_conv11_2 = c_conv11_2.view(batch_size, -1, self.n_classes) # (N, 4, n_classes) 313 | 314 | # A total of 8732 boxes 315 | # Concatenate in this specific order (i.e. must match the order of the prior-boxes) 316 | locs = torch.cat([l_conv4_3, l_conv7, l_conv8_2, l_conv9_2, l_conv10_2, l_conv11_2], dim=1) # (N, 8732, 4) 317 | classes_scores = torch.cat([c_conv4_3, c_conv7, c_conv8_2, c_conv9_2, c_conv10_2, c_conv11_2], 318 | dim=1) # (N, 8732, n_classes) 319 | 320 | return locs, classes_scores 321 | 322 | 323 | class SSD300(nn.Module): 324 | """ 325 | The SSD300 network - encapsulates the base VGG network, auxiliary, and prediction convolutions. 326 | """ 327 | 328 | def __init__(self, n_classes, device): 329 | super(SSD300, self).__init__() 330 | 331 | self.n_classes = n_classes 332 | 333 | self.base = VGGBase() 334 | self.aux_convs = AuxiliaryConvolutions() 335 | self.pred_convs = PredictionConvolutions(n_classes) 336 | 337 | # Since lower level features (conv4_3_feats) have considerably larger scales, we take the L2 norm and rescale 338 | # Rescale factor is initially set at 20, but is learned for each channel during back-prop 339 | self.rescale_factors = nn.Parameter(torch.FloatTensor(1, 512, 1, 1)) # there are 512 channels in conv4_3_feats 340 | nn.init.constant_(self.rescale_factors, 20) 341 | 342 | # Prior boxes 343 | self.priors_cxcy = self.create_prior_boxes(device) 344 | 345 | def forward(self, image): 346 | """ 347 | Forward propagation. 348 | 349 | :param image: images, a tensor of dimensions (N, 3, 300, 300) 350 | :return: 8732 locations and class scores (i.e. w.r.t each prior box) for each image 351 | """ 352 | # Run VGG base network convolutions (lower level feature map generators) 353 | conv4_3_feats, conv7_feats = self.base(image) # (N, 512, 38, 38), (N, 1024, 19, 19) 354 | 355 | # Rescale conv4_3 after L2 norm 356 | norm = conv4_3_feats.pow(2).sum(dim=1, keepdim=True).sqrt() # (N, 1, 38, 38) 357 | conv4_3_feats = conv4_3_feats / norm # (N, 512, 38, 38) 358 | conv4_3_feats = conv4_3_feats * self.rescale_factors # (N, 512, 38, 38) 359 | # (PyTorch autobroadcasts singleton dimensions during arithmetic) 360 | 361 | # Run auxiliary convolutions (higher level feature map generators) 362 | conv8_2_feats, conv9_2_feats, conv10_2_feats, conv11_2_feats = \ 363 | self.aux_convs(conv7_feats) # (N, 512, 10, 10), (N, 256, 5, 5), (N, 256, 3, 3), (N, 256, 1, 1) 364 | 365 | # Run prediction convolutions (predict offsets w.r.t prior-boxes and classes in each resulting localization box) 366 | locs, classes_scores = self.pred_convs(conv4_3_feats, conv7_feats, conv8_2_feats, conv9_2_feats, conv10_2_feats, 367 | conv11_2_feats) # (N, 8732, 4), (N, 8732, n_classes) 368 | 369 | return locs, classes_scores 370 | 371 | def create_prior_boxes(self, device): 372 | """ 373 | Create the 8732 prior (default) boxes for the SSD300, as defined in the paper. 374 | 375 | :return: prior boxes in center-size coordinates, a tensor of dimensions (8732, 4) 376 | """ 377 | fmap_dims = {'conv4_3': 38, 378 | 'conv7': 19, 379 | 'conv8_2': 10, 380 | 'conv9_2': 5, 381 | 'conv10_2': 3, 382 | 'conv11_2': 1} 383 | 384 | obj_scales = {'conv4_3': 0.1, 385 | 'conv7': 0.2, 386 | 'conv8_2': 0.375, 387 | 'conv9_2': 0.55, 388 | 'conv10_2': 0.725, 389 | 'conv11_2': 0.9} 390 | 391 | aspect_ratios = {'conv4_3': [1., 2., 0.5], 392 | 'conv7': [1., 2., 3., 0.5, .333], 393 | 'conv8_2': [1., 2., 3., 0.5, .333], 394 | 'conv9_2': [1., 2., 3., 0.5, .333], 395 | 'conv10_2': [1., 2., 0.5], 396 | 'conv11_2': [1., 2., 0.5]} 397 | 398 | fmaps = list(fmap_dims.keys()) 399 | 400 | prior_boxes = [] 401 | 402 | for k, fmap in enumerate(fmaps): 403 | for i in range(fmap_dims[fmap]): 404 | for j in range(fmap_dims[fmap]): 405 | cx = (j + 0.5) / fmap_dims[fmap] 406 | cy = (i + 0.5) / fmap_dims[fmap] 407 | 408 | for ratio in aspect_ratios[fmap]: 409 | prior_boxes.append([cx, cy, obj_scales[fmap] * sqrt(ratio), obj_scales[fmap] / sqrt(ratio)]) 410 | 411 | # For an aspect ratio of 1, use an additional prior whose scale is the geometric mean of the 412 | # scale of the current feature map and the scale of the next feature map 413 | if ratio == 1.: 414 | try: 415 | additional_scale = sqrt(obj_scales[fmap] * obj_scales[fmaps[k + 1]]) 416 | # For the last feature map, there is no "next" feature map 417 | except IndexError: 418 | additional_scale = 1. 419 | prior_boxes.append([cx, cy, additional_scale, additional_scale]) 420 | 421 | prior_boxes = torch.FloatTensor(prior_boxes).to(device) # (8732, 4) 422 | prior_boxes.clamp_(0, 1) # (8732, 4) 423 | 424 | return prior_boxes 425 | 426 | def detect_objects(self, predicted_locs, predicted_scores, min_score, max_overlap, top_k, device): 427 | """ 428 | Decipher the 8732 locations and class scores (output of ths SSD300) to detect objects. 429 | 430 | For each class, perform Non-Maximum Suppression (NMS) on boxes that are above a minimum threshold. 431 | 432 | :param predicted_locs: predicted locations/boxes w.r.t the 8732 prior boxes, a tensor of dimensions (N, 8732, 4) 433 | :param predicted_scores: class scores for each of the encoded locations/boxes, a tensor of dimensions (N, 8732, n_classes) 434 | :param min_score: minimum threshold for a box to be considered a match for a certain class 435 | :param max_overlap: maximum overlap two boxes can have so that the one with the lower score is not suppressed via NMS 436 | :param top_k: if there are a lot of resulting detection across all classes, keep only the top 'k' 437 | :return: detections (boxes, labels, and scores), lists of length batch_size 438 | """ 439 | batch_size = predicted_locs.size(0) 440 | n_priors = self.priors_cxcy.size(0) 441 | predicted_scores = F.softmax(predicted_scores, dim=2) # (N, 8732, n_classes) 442 | 443 | # Lists to store final predicted boxes, labels, and scores for all images 444 | all_images_boxes = list() 445 | all_images_labels = list() 446 | all_images_scores = list() 447 | 448 | assert n_priors == predicted_locs.size(1) == predicted_scores.size(1) 449 | 450 | for i in range(batch_size): 451 | # Decode object coordinates from the form we regressed predicted boxes to 452 | decoded_locs = cxcy_to_xy( 453 | gcxgcy_to_cxcy(predicted_locs[i], self.priors_cxcy)) # (8732, 4), these are fractional pt. coordinates 454 | 455 | # Lists to store boxes and scores for this image 456 | image_boxes = list() 457 | image_labels = list() 458 | image_scores = list() 459 | 460 | max_scores, best_label = predicted_scores[i].max(dim=1) # (8732) 461 | 462 | # Check for each class 463 | for c in range(1, self.n_classes): 464 | # Keep only predicted boxes and scores where scores for this class are above the minimum score 465 | class_scores = predicted_scores[i][:, c] # (8732) 466 | score_above_min_score = class_scores > min_score # torch.uint8 (byte) tensor, for indexing 467 | n_above_min_score = score_above_min_score.sum().item() 468 | if n_above_min_score == 0: 469 | continue 470 | class_scores = class_scores[score_above_min_score] # (n_qualified), n_min_score <= 8732 471 | class_decoded_locs = decoded_locs[score_above_min_score] # (n_qualified, 4) 472 | 473 | # Sort predicted boxes and scores by scores 474 | class_scores, sort_ind = class_scores.sort(dim=0, descending=True) # (n_qualified), (n_min_score) 475 | class_decoded_locs = class_decoded_locs[sort_ind] # (n_min_score, 4) 476 | 477 | # Find the overlap between predicted boxes 478 | overlap = find_jaccard_overlap(class_decoded_locs, class_decoded_locs) # (n_qualified, n_min_score) 479 | 480 | # Non-Maximum Suppression (NMS) 481 | 482 | # A torch.uint8 (byte) tensor to keep track of which predicted boxes to suppress 483 | # 1 implies suppress, 0 implies don't suppress 484 | suppress = torch.zeros((n_above_min_score), dtype=torch.uint8).to(device) # (n_qualified) 485 | 486 | # Consider each box in order of decreasing scores 487 | for box in range(class_decoded_locs.size(0)): 488 | # If this box is already marked for suppression 489 | if suppress[box] == 1: 490 | continue 491 | 492 | # Suppress boxes whose overlaps (with this box) are greater than maximum overlap 493 | # Find such boxes and update suppress indices 494 | suppress = torch.max(suppress, overlap[box] > max_overlap) 495 | # The max operation retains previously suppressed boxes, like an 'OR' operation 496 | 497 | # Don't suppress this box, even though it has an overlap of 1 with itself 498 | suppress[box] = 0 499 | 500 | # Store only unsuppressed boxes for this class 501 | image_boxes.append(class_decoded_locs[1 - suppress]) 502 | image_labels.append(torch.LongTensor((1 - suppress).sum().item() * [c]).to(device)) 503 | image_scores.append(class_scores[1 - suppress]) 504 | 505 | # If no object in any class is found, store a placeholder for 'background' 506 | if len(image_boxes) == 0: 507 | image_boxes.append(torch.FloatTensor([[0., 0., 1., 1.]]).to(device)) 508 | image_labels.append(torch.LongTensor([0]).to(device)) 509 | image_scores.append(torch.FloatTensor([0.]).to(device)) 510 | 511 | # Concatenate into single tensors 512 | image_boxes = torch.cat(image_boxes, dim=0) # (n_objects, 4) 513 | image_labels = torch.cat(image_labels, dim=0) # (n_objects) 514 | image_scores = torch.cat(image_scores, dim=0) # (n_objects) 515 | n_objects = image_scores.size(0) 516 | 517 | # Keep only the top k objects 518 | if n_objects > top_k: 519 | image_scores, sort_ind = image_scores.sort(dim=0, descending=True) 520 | image_scores = image_scores[:top_k] # (top_k) 521 | image_boxes = image_boxes[sort_ind][:top_k] # (top_k, 4) 522 | image_labels = image_labels[sort_ind][:top_k] # (top_k) 523 | 524 | # Append to lists that store predicted boxes and scores for all images 525 | all_images_boxes.append(image_boxes) 526 | all_images_labels.append(image_labels) 527 | all_images_scores.append(image_scores) 528 | 529 | return all_images_boxes, all_images_labels, all_images_scores # lists of length batch_size 530 | 531 | 532 | class MultiBoxLoss(nn.Module): 533 | """ 534 | The MultiBox loss, a loss function for object detection. 535 | 536 | This is a combination of: 537 | (1) a localization loss for the predicted locations of the boxes, and 538 | (2) a confidence loss for the predicted class scores. 539 | """ 540 | 541 | def __init__(self, priors_cxcy, threshold=0.5, neg_pos_ratio=3, alpha=1.): 542 | super(MultiBoxLoss, self).__init__() 543 | self.priors_cxcy = priors_cxcy 544 | self.priors_xy = cxcy_to_xy(priors_cxcy) 545 | self.threshold = threshold 546 | self.neg_pos_ratio = neg_pos_ratio 547 | self.alpha = alpha 548 | 549 | self.smooth_l1 = nn.L1Loss() 550 | self.cross_entropy = nn.CrossEntropyLoss(reduce=False) # reduce=False,不对结果取均值 551 | 552 | def forward(self, predicted_locs, predicted_scores, boxes, labels, device): 553 | """ 554 | Forward propagation. 555 | 556 | :param predicted_locs: predicted locations/boxes w.r.t the 8732 prior boxes, a tensor of dimensions (N, 8732, 4) 557 | :param predicted_scores: class scores for each of the encoded locations/boxes, a tensor of dimensions (N, 8732, n_classes) 558 | :param boxes: true object bounding boxes in boundary coordinates, a list of N tensors 559 | :param labels: true object labels, a list of N tensors 560 | :return: multibox loss, a scalar 561 | """ 562 | batch_size = predicted_locs.size(0) 563 | n_priors = self.priors_cxcy.size(0) 564 | n_classes = predicted_scores.size(2) 565 | 566 | assert n_priors == predicted_locs.size(1) == predicted_scores.size(1) 567 | 568 | true_locs = torch.zeros((batch_size, n_priors, 4), dtype=torch.float).to(device) # (N, 8732, 4) 569 | true_classes = torch.zeros((batch_size, n_priors), dtype=torch.long).to(device) # (N, 8732) 570 | 571 | # For each image 572 | for i in range(batch_size): 573 | n_objects = boxes[i].size(0) 574 | 575 | overlap = find_jaccard_overlap(boxes[i], 576 | self.priors_xy) # (n_objects, 8732) 577 | 578 | # For each prior, find the object that has the maximum overlap 579 | overlap_for_each_prior, object_for_each_prior = overlap.max(dim=0) # (8732) 580 | 581 | # We don't want a situation where an object is not represented in our positive (non-background) priors - 582 | # 1. An object might not be the best object for all priors, and is therefore not in object_for_each_prior. 583 | # 2. All priors with the object may be assigned as background based on the threshold (0.5). 584 | 585 | # To remedy this - 586 | # First, find the prior that has the maximum overlap for each object. 587 | _, prior_for_each_object = overlap.max(dim=1) # (N_o) 588 | 589 | # Then, assign each object to the corresponding maximum-overlap-prior. (This fixes 1.) 590 | object_for_each_prior[prior_for_each_object] = torch.LongTensor(range(n_objects)).to(device) 591 | 592 | # To ensure these priors qualify, artificially give them an overlap of greater than 0.5. (This fixes 2.) 593 | overlap_for_each_prior[prior_for_each_object] = 1. 594 | 595 | # Labels for each prior 596 | label_for_each_prior = labels[i][object_for_each_prior] # (8732) 597 | # Set priors whose overlaps with objects are less than the threshold to be background (no object) 598 | label_for_each_prior[overlap_for_each_prior < self.threshold] = 0 # (8732) 599 | 600 | # Store 601 | true_classes[i] = label_for_each_prior 602 | 603 | # Encode center-size object coordinates into the form we regressed predicted boxes to 604 | true_locs[i] = cxcy_to_gcxgcy(xy_to_cxcy(boxes[i][object_for_each_prior]), self.priors_cxcy) # (8732, 4) 605 | 606 | # Identify priors that are positive (object/non-background) 607 | positive_priors = true_classes != 0 # (N, 8732) torch.uint8 608 | 609 | # LOCALIZATION LOSS 610 | 611 | # Localization loss is computed only over positive (non-background) priors 612 | loc_loss = self.smooth_l1(predicted_locs[positive_priors], true_locs[positive_priors]) # (), scalar 613 | 614 | # Note: indexing with a torch.uint8 (byte) tensor flattens the tensor when indexing is across multiple dimensions (N & 8732) 615 | # So, if predicted_locs has the shape (N, 8732, 4), predicted_locs[positive_priors] will have (total positives, 4) 616 | 617 | # CONFIDENCE LOSS 618 | 619 | # Confidence loss is computed over positive priors and the most difficult (hardest) negative priors in each image 620 | # That is, FOR EACH IMAGE, 621 | # we will take the hardest (neg_pos_ratio * n_positives) negative priors, i.e where there is maximum loss 622 | # This is called Hard Negative Mining - it concentrates on hardest negatives in each image, and also minimizes pos/neg imbalance 623 | 624 | # Number of positive and hard-negative priors per image 625 | n_positives = positive_priors.sum(dim=1) # (N) 626 | n_hard_negatives = self.neg_pos_ratio * n_positives # (N) 627 | 628 | # First, find the loss for all priors 629 | conf_loss_all = self.cross_entropy(predicted_scores.view(-1, n_classes), true_classes.view(-1)) # (N * 8732) 630 | conf_loss_all = conf_loss_all.view(batch_size, n_priors) # (N, 8732) 631 | 632 | # We already know which priors are positive 633 | conf_loss_pos = conf_loss_all[positive_priors] # (sum(n_positives)) 634 | 635 | # Next, find which priors are hard-negative 636 | # To do this, sort ONLY negative priors in each image in order of decreasing loss and take top n_hard_negatives 637 | conf_loss_neg = conf_loss_all.clone() # (N, 8732) 638 | conf_loss_neg[positive_priors] = 0. # (N, 8732), positive priors are ignored (never in top n_hard_negatives) 639 | conf_loss_neg, _ = conf_loss_neg.sort(dim=1, descending=True) # (N, 8732), sorted by decreasing hardness 640 | hardness_ranks = torch.LongTensor(range(n_priors)).unsqueeze(0).expand_as(conf_loss_neg).to(device) # (N, 8732) 641 | hard_negatives = hardness_ranks < n_hard_negatives.unsqueeze(1) # (N, 8732) 642 | conf_loss_hard_neg = conf_loss_neg[hard_negatives] # (sum(n_hard_negatives)) 643 | 644 | # As in the paper, averaged over positive priors only, although computed over both positive and hard-negative priors 645 | conf_loss = (conf_loss_hard_neg.sum() + conf_loss_pos.sum()) / n_positives.sum().float() # (), scalar 646 | 647 | # Focal loss for confidence loss 648 | # preds = predicted_scores.view(-1, n_classes) 649 | # targets = true_classes.view(-1) 650 | # focal_loss = FocalLoss(n_classes) 651 | # conf_loss = focal_loss(preds, targets) 652 | 653 | # TOTAL LOSS 654 | 655 | return conf_loss + self.alpha * loc_loss 656 | 657 | 658 | class FocalLoss(nn.Module): 659 | def __init__(self, num_classes, alpha=0.25, gamma=2, size_average=True): 660 | """ 661 | :param alpha: 类别权重. 662 | :param gamma: 难易样本调节参数 663 | :param num_classes: 类别数量 664 | :param size_average: 损失计算方式,默认取均值 665 | """ 666 | 667 | super(FocalLoss, self).__init__() 668 | self.size_average = size_average 669 | self.alpha = torch.zeros(num_classes) 670 | self.alpha[0] += (1 - alpha) 671 | self.alpha[1:] += alpha 672 | self.gamma = gamma 673 | 674 | def forward(self, preds, targets): 675 | ''' 676 | :param preds: (N, C) 677 | :param labels: (N) 678 | ''' 679 | preds = preds.view(-1, preds.size(-1)) 680 | self.alpha = self.alpha.to(preds.device) 681 | preds_softmax = F.softmax(preds, dim=1) 682 | preds_logsoft = torch.log(preds_softmax) 683 | preds_softmax = preds_softmax.gather(1, targets.view(-1, 1)) # 这部分实现nll_loss ( crossempty = log_softmax + nll ) 684 | preds_logsoft = preds_logsoft.gather(1, targets.view(-1, 1)) # (N, 1) 685 | self.alpha = self.alpha.gather(0, targets.view(-1)) # (N) 686 | loss = -torch.mul(torch.pow((1 - preds_softmax), self.gamma), preds_logsoft) # (N, 1) 687 | loss = torch.mul(self.alpha, loss.t()) 688 | if self.size_average: 689 | loss = loss.mean() 690 | else: 691 | loss = loss.sum() 692 | return loss 693 | -------------------------------------------------------------------------------- /pseudo_label.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import numpy as np 3 | import os 4 | import torch 5 | import torch.backends.cudnn as cudnn 6 | import json 7 | 8 | from collections import defaultdict 9 | from lib.label_file import LabelFile 10 | from utils import voc_labels, label_map, bam_labels 11 | from model import SSD300 12 | from datasets import PascalVOCDataset 13 | from PIL import Image 14 | from tqdm import tqdm 15 | 16 | 17 | def path_to_id(img): 18 | index = img.find('JPEGImages/') 19 | img = img[index + len('JPEGImages/'):] 20 | index = img.find('.') 21 | return img[:index] 22 | 23 | 24 | def main(): 25 | 26 | parser = argparse.ArgumentParser() 27 | parser.add_argument('--root', required=True) 28 | parser.add_argument('--data_folder', required=True) 29 | parser.add_argument('--result', required=True) 30 | parser.add_argument('--checkpoint', help='path of the pretrained model', required=True) 31 | parser.add_argument('--gpu', type=int, default=0) 32 | parser.add_argument('--batch_size', type=int, default=32) 33 | parser.add_argument('--data_type', choices=('clipart, bam'), required=True) 34 | args = parser.parse_args() 35 | 36 | n_classes = len(label_map) # number of different types of objects 37 | device = torch.device(f"cuda:{args.gpu}" if torch.cuda.is_available() else "cpu") 38 | workers = 4 # number of workers for loading data in the DataLoader 39 | keep_difficult = True 40 | 41 | cudnn.benchmark = True 42 | 43 | if args.checkpoint == 'pretrained_ssd300.pth.tar': 44 | checkpoint = torch.load(args.checkpoint, map_location=device) 45 | model = checkpoint['model'] 46 | for m in model.modules(): 47 | if 'Conv' in str(type(m)): 48 | setattr(m, 'padding_mode', 'zeros') 49 | else: 50 | model = SSD300(n_classes=n_classes, device=device) 51 | checkpoint = torch.load(args.checkpoint, map_location=device) 52 | model.load_state_dict(checkpoint['model']) 53 | 54 | model = model.to(device) 55 | # Switch to eval mode 56 | model.eval() 57 | 58 | dataset = PascalVOCDataset(args.data_folder, split='train', keep_difficult=keep_difficult) 59 | dataloader = torch.utils.data.DataLoader(dataset, batch_size=args.batch_size, shuffle=False, 60 | collate_fn=dataset.collate_fn, num_workers=workers, pin_memory=True) 61 | 62 | det_boxes = list() 63 | det_labels = list() 64 | det_scores = list() 65 | true_labels = list() 66 | 67 | # only use labels of ground truth, not boxes 68 | with torch.no_grad(): 69 | for images, boxes, labels, difficulties in tqdm(dataloader, desc='pseudo labeling'): 70 | images = images.to(device) # (N, 3, 300, 300) 71 | 72 | # Forward prop. 73 | predicted_locs, predicted_scores = model(images) 74 | # Detect objects in SSD output 75 | det_boxes_batch, det_labels_batch, det_scores_batch = model.detect_objects(predicted_locs, predicted_scores, 76 | min_score=0.01, max_overlap=0.45, 77 | top_k=200, device=device) 78 | det_boxes.extend(det_boxes_batch) 79 | det_labels.extend(det_labels_batch) 80 | det_scores.extend(det_scores_batch) 81 | true_labels.extend(labels) 82 | 83 | ids = [] 84 | with open(os.path.join(args.data_folder, 'TRAIN_images.json'), 'r') as j: 85 | images = json.load(j) 86 | ids = [path_to_id(img) for img in images] 87 | 88 | new_ids = [] 89 | 90 | for i, (pred_b, pred_l, pred_s, gt_l) in enumerate( 91 | zip(det_boxes, det_labels, det_scores, true_labels)): 92 | pred_b = pred_b.cpu().numpy() 93 | pred_l = pred_l.cpu().numpy() 94 | pred_s = pred_s.cpu().numpy() 95 | gt_l = gt_l.cpu().numpy() 96 | 97 | labels = ('background',) + voc_labels 98 | proper_dets = defaultdict(list) # 这种dict默认提供了key 99 | name = ids[i] # img的id 100 | cnt = 0 101 | 102 | gt_l = set(gt_l) 103 | for l_ in gt_l: 104 | cnt += 1 105 | class_indices = np.where(pred_l == l_)[0] # 所有预测label正确的坐标 106 | if len(class_indices) == 0: 107 | continue 108 | scores = pred_s[class_indices] 109 | ind = class_indices[np.argsort(scores)[::-1][0]] # top1 110 | assert (l_ == pred_l[ind]) 111 | # Transform to original image dimensions 112 | img_path = os.path.join(args.root, 'JPEGImages', name + '.jpg') 113 | img = Image.open(img_path, mode='r') 114 | original_dims = np.array([img.width, img.height, img.width, img.height]) 115 | pred_b[ind] = pred_b[ind] * original_dims 116 | # 这个label预测正确的bbox+1 117 | proper_dets[labels[l_]].append(pred_b[ind]) 118 | 119 | # 删除这个预测 120 | # pred_b = np.concatenate((pred_b[:ind], pred_b[ind + 1:]), 0) 121 | # pred_l = np.concatenate((pred_l[:ind], pred_l[ind + 1:]), 0) 122 | # pred_s = np.concatenate((pred_s[:ind], pred_s[ind + 1:]), 0) 123 | 124 | if cnt == 0: 125 | continue # 没有ground turth label,直接跳过写入Annotation步骤 126 | 127 | new_ids.append(ids[i] + '\n') 128 | filename = os.path.join(args.result, 'Annotations', name + '.xml') 129 | img_path = os.path.join(args.root, 'JPEGImages', name + '.jpg') 130 | actual_labels = voc_labels if args.data_type == 'clipart' else bam_labels 131 | labeler = LabelFile(filename, img_path, actual_labels) 132 | labeler.savePascalVocFormat(proper_dets) 133 | 134 | txt = 'ImageSets/Main/trainval.txt' 135 | with open(os.path.join(args.result, txt), 'w') as f: 136 | f.writelines(new_ids) # 重写这个文件是为了保证每个图片都有对应的Annotation(上述步骤生成的) 137 | print('Saved to {:s}'.format(args.result)) 138 | 139 | 140 | if __name__ == '__main__': 141 | main() 142 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import time 2 | import argparse 3 | import torch.backends.cudnn as cudnn 4 | import torch.optim 5 | import torch.utils.data 6 | import warnings 7 | import os 8 | from model import SSD300, MultiBoxLoss 9 | from datasets import PascalVOCDataset 10 | from utils import label_map, AverageMeter, save_checkpoint, clip_gradient, adjust_learning_rate 11 | from matplotlib import pyplot as plt 12 | 13 | 14 | def train(train_loader, model, criterion, optimizer, epoch, device, print_freq): 15 | """ 16 | One epoch's training. 17 | :param train_loader: DataLoader for training data 18 | :param model: model 19 | :param criterion: MultiBox loss 20 | :param optimizer: optimizer 21 | :param epoch: epoch number 22 | """ 23 | model.train() # training mode enables dropout 24 | 25 | batch_time = AverageMeter() # forward prop. + back prop. time 26 | data_time = AverageMeter() # data loading time 27 | losses = AverageMeter() # loss 28 | 29 | start = time.time() 30 | 31 | # Batches 32 | for i, (images, boxes, labels, _) in enumerate(train_loader): 33 | data_time.update(time.time() - start) 34 | 35 | # Move to default device 36 | images = images.to(device) # (batch_size (N), 3, 300, 300) 37 | boxes = [b.to(device) for b in boxes] 38 | labels = [l.to(device) for l in labels] 39 | 40 | # Forward prop. 41 | predicted_locs, predicted_scores = model(images) # (N, 8732, 4), (N, 8732, n_classes) 42 | 43 | # Loss 44 | loss = criterion(predicted_locs, predicted_scores, boxes, labels, device) # scalar 45 | 46 | # Backward prop. 47 | optimizer.zero_grad() 48 | loss.backward() 49 | 50 | # Clip gradients, if necessary 51 | if grad_clip is not None: 52 | clip_gradient(optimizer, grad_clip) 53 | 54 | # Update model 55 | optimizer.step() 56 | 57 | losses.update(loss.item(), images.size(0)) 58 | batch_time.update(time.time() - start) 59 | 60 | start = time.time() 61 | 62 | # Print status 63 | if i % print_freq == 0: 64 | print('Epoch: [{0}][{1}/{2}]\t' 65 | 'Batch Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' 66 | 'Data Time {data_time.val:.3f} ({data_time.avg:.3f})\t' 67 | 'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(epoch, i, len(train_loader), 68 | batch_time=batch_time, 69 | data_time=data_time, loss=losses)) 70 | del predicted_locs, predicted_scores, images, boxes, labels # free some memory since their histories may be stored 71 | 72 | return losses.avg 73 | 74 | 75 | if __name__ == '__main__': 76 | warnings.filterwarnings('ignore') 77 | 78 | parser = argparse.ArgumentParser() 79 | parser.add_argument('--data_folder', required=True) 80 | parser.add_argument('--checkpoint', help='path of the pretrained model') 81 | parser.add_argument('--batch_size', type=int, default=32) 82 | parser.add_argument('--gpu', type=int, default=0) 83 | parser.add_argument('--iteration', type=int, default=10000) 84 | parser.add_argument('--lr', type=float, default=1e-5) 85 | parser.add_argument('--result', required=True) 86 | args = parser.parse_args() 87 | # Data parameters 88 | keep_difficult = True # use objects considered difficult to detect? 89 | 90 | if not os.path.exists(args.result): 91 | os.makedirs(args.result) 92 | 93 | # Model parameters 94 | # Not too many here since the SSD300 has a very specific structure 95 | n_classes = len(label_map) # number of different types of objects 96 | os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu) # 防止预训练模型被加载到gpu0上 97 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 98 | # Learning parameters 99 | workers = 4 # number of workers for loading data in the DataLoader 100 | print_freq = 200 # print training status every __ batches 101 | decay_lr_to = 0.1 # decay learning rate to this fraction of the existing learning rate 102 | momentum = 0.9 # momentum 103 | weight_decay = 5e-4 # weight decay 104 | grad_clip = None # clip if gradients are exploding, which may happen at larger batch sizes (sometimes at 32) - you will recognize it by a sorting error in the MuliBox loss calculation 105 | 106 | cudnn.benchmark = True 107 | 108 | """ 109 | Training. 110 | """ 111 | 112 | # Initialize model or load checkpoint 113 | if args.checkpoint == 'pretrained_ssd300.pth.tar': 114 | checkpoint = torch.load(args.checkpoint, map_location=device) 115 | model = checkpoint['model'] 116 | for m in model.modules(): 117 | if 'Conv' in str(type(m)): 118 | setattr(m, 'padding_mode', 'zeros') 119 | else: 120 | model = SSD300(n_classes=n_classes, device=device) 121 | if args.checkpoint: 122 | checkpoint = torch.load(args.checkpoint, map_location=device) 123 | model.load_state_dict(checkpoint['model']) 124 | # Initialize the optimizer, with twice the default learning rate for biases, as in the original Caffe repo 125 | biases = list() 126 | not_biases = list() 127 | for param_name, param in model.named_parameters(): 128 | if param.requires_grad: 129 | if param_name.endswith('.bias'): 130 | biases.append(param) 131 | else: 132 | not_biases.append(param) 133 | optimizer = torch.optim.SGD(params=[{'params': biases, 'lr': 2 * args.lr}, {'params': not_biases}], 134 | lr=args.lr, momentum=momentum, weight_decay=weight_decay) 135 | 136 | # Move to default device 137 | model = model.to(device) 138 | criterion = MultiBoxLoss(priors_cxcy=model.priors_cxcy).to(device) 139 | 140 | # Custom dataloaders 141 | train_dataset = PascalVOCDataset(args.data_folder, 142 | split='train', 143 | keep_difficult=keep_difficult) 144 | train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, 145 | collate_fn=train_dataset.collate_fn, num_workers=workers, 146 | pin_memory=True) # note that we're passing the collate function here 147 | # Calculate total number of epochs to train 148 | # To convert iterations to epochs, divide iterations by the number of iterations per epoch 149 | epochs = args.iteration // (len(train_dataset) // args.batch_size) 150 | decay_lr_at = epochs // 2 151 | 152 | train_losses = [] 153 | 154 | # Epochs 155 | for epoch in range(epochs): 156 | # Decay learning rate at particular epochs 157 | if epoch == decay_lr_at: 158 | adjust_learning_rate(optimizer, decay_lr_to) 159 | # One epoch's training 160 | train_loss = train(train_loader=train_loader, 161 | model=model, 162 | criterion=criterion, 163 | optimizer=optimizer, 164 | epoch=epoch, 165 | device=device, 166 | print_freq=print_freq) 167 | train_losses.append(train_loss) 168 | # Save checkpoint 169 | save_checkpoint(model, os.path.join(args.result, 'model')) 170 | 171 | plt.title('train loss') 172 | plt.xlabel('epochs') 173 | plt.ylabel('loss') 174 | plt.plot(range(len(train_losses)), train_losses) 175 | plt.savefig(os.path.join(args.result, 'train_loss.png')) 176 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import torch 4 | import random 5 | import xml.etree.ElementTree as ET 6 | import torchvision.transforms.functional as FT 7 | 8 | # Label map 9 | voc_labels = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 10 | 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor') 11 | bam_labels = ('bicycle', 'bird', 'car', 'cat', 'dog', 'person') 12 | label_map = {k: v + 1 for v, k in enumerate(voc_labels)} 13 | label_map['background'] = 0 14 | rev_label_map = {v: k for k, v in label_map.items()} # Inverse mapping 15 | 16 | # Color map for bounding boxes of detected objects from https://sashat.me/2017/01/11/list-of-20-simple-distinct-colors/ 17 | distinct_colors = ['#e6194b', '#3cb44b', '#ffe119', '#0082c8', '#f58231', '#911eb4', '#46f0f0', '#f032e6', 18 | '#d2f53c', '#fabebe', '#008080', '#000080', '#aa6e28', '#fffac8', '#800000', '#aaffc3', '#808000', 19 | '#ffd8b1', '#e6beff', '#808080', '#FFFFFF'] 20 | label_color_map = {k: distinct_colors[i] for i, k in enumerate(label_map.keys())} 21 | 22 | 23 | def parse_annotation(annotation_path): 24 | tree = ET.parse(annotation_path) 25 | root = tree.getroot() 26 | 27 | boxes = list() 28 | labels = list() 29 | difficulties = list() 30 | for object in root.iter('object'): 31 | 32 | difficult = int(object.find('difficult').text == '1') 33 | 34 | label = object.find('name').text.lower().strip() 35 | if label not in label_map: 36 | continue 37 | 38 | bbox = object.find('bndbox') 39 | xmin = int(bbox.find('xmin').text) - 1 40 | ymin = int(bbox.find('ymin').text) - 1 41 | xmax = int(bbox.find('xmax').text) - 1 42 | ymax = int(bbox.find('ymax').text) - 1 43 | 44 | boxes.append([xmin, ymin, xmax, ymax]) 45 | labels.append(label_map[label]) 46 | difficulties.append(difficult) 47 | 48 | return {'boxes': boxes, 'labels': labels, 'difficulties': difficulties} 49 | 50 | 51 | def create_data_lists(voc07_path1, voc07_path2, voc12_path1, voc12_path2, output_folder, type): 52 | """ 53 | Create lists of images, the bounding boxes and labels of the objects in these images, and save these to file. 54 | 55 | :param voc07_path1: path to the 'VOC2007' folder 56 | :param voc12_path1: path to the 'VOC2012' folder 57 | :param voc07_path2: path to the 'VOC2007' folder of pascal 58 | :param voc12_path2: path to the 'VOC2012' folder of pascal 59 | :param output_folder: folder where the JSONs must be saved 60 | """ 61 | voc07_path1 = os.path.abspath(voc07_path1) if voc07_path1 is not None else None 62 | voc12_path1 = os.path.abspath(voc12_path1) if voc12_path1 is not None else None 63 | voc07_path2 = os.path.abspath(voc07_path2) if voc07_path2 is not None else None 64 | voc12_path2 = os.path.abspath(voc12_path2) if voc12_path2 is not None else None 65 | 66 | if not os.path.exists(output_folder): 67 | os.makedirs(output_folder) 68 | 69 | train_images = list() 70 | train_objects = list() 71 | n_objects = 0 72 | 73 | paths = [voc07_path1, voc12_path1, voc07_path2, voc12_path2] 74 | paths = [i for i in paths if i is not None] 75 | 76 | # Training data 77 | for path in paths: 78 | # Find IDs of images in training data 79 | with open(os.path.join(path, 'ImageSets/Main/trainval.txt')) as f: 80 | ids = f.read().splitlines() 81 | 82 | for id in ids: 83 | # Parse annotation's XML file 84 | objects = parse_annotation(os.path.join(path, 'Annotations', id + '.xml')) 85 | if len(objects['boxes']) == 0: 86 | continue 87 | n_objects += len(objects['boxes']) 88 | train_objects.append(objects) 89 | train_images.append(os.path.join(path, 'JPEGImages', id + '.jpg')) 90 | 91 | assert len(train_objects) == len(train_images) 92 | 93 | # Save to file 94 | with open(os.path.join(output_folder, 'TRAIN_images.json'), 'w') as j: 95 | json.dump(train_images, j) 96 | with open(os.path.join(output_folder, 'TRAIN_objects.json'), 'w') as j: 97 | json.dump(train_objects, j) 98 | with open(os.path.join(output_folder, 'label_map.json'), 'w') as j: 99 | json.dump(label_map, j) # save label map too 100 | 101 | print('\nThere are %d training images containing a total of %d objects. Files have been saved to %s.' % ( 102 | len(train_images), n_objects, os.path.abspath(output_folder))) 103 | 104 | if type == 'ideal': 105 | # Validation data 106 | test_images = list() 107 | test_objects = list() 108 | n_objects = 0 109 | 110 | # Find IDs of images in validation data 111 | with open(os.path.join(voc07_path1, 'ImageSets/Main/test.txt')) as f: 112 | ids = f.read().splitlines() 113 | 114 | for id in ids: 115 | # Parse annotation's XML file 116 | objects = parse_annotation(os.path.join(voc07_path1, 'Annotations', id + '.xml')) 117 | if len(objects['boxes']) == 0: 118 | continue 119 | test_objects.append(objects) 120 | n_objects += len(objects['boxes']) 121 | test_images.append(os.path.join(voc07_path1, 'JPEGImages', id + '.jpg')) 122 | 123 | assert len(test_objects) == len(test_images) 124 | 125 | # Save to file 126 | with open(os.path.join(output_folder, 'TEST_images.json'), 'w') as j: 127 | json.dump(test_images, j) 128 | with open(os.path.join(output_folder, 'TEST_objects.json'), 'w') as j: 129 | json.dump(test_objects, j) 130 | 131 | print('\nThere are %d validation images containing a total of %d objects. Files have been saved to %s.' % ( 132 | len(test_images), n_objects, os.path.abspath(output_folder))) 133 | 134 | 135 | def decimate(tensor, m): 136 | """ 137 | Decimate a tensor by a factor 'm', i.e. downsample by keeping every 'm'th value. 138 | 139 | This is used when we convert FC layers to equivalent Convolutional layers, BUT of a smaller size. 140 | 141 | :param tensor: tensor to be decimated 142 | :param m: list of decimation factors for each dimension of the tensor; None if not to be decimated along a dimension 143 | :return: decimated tensor 144 | """ 145 | assert tensor.dim() == len(m) 146 | for d in range(tensor.dim()): 147 | if m[d] is not None: 148 | tensor = tensor.index_select(dim=d, 149 | index=torch.arange(start=0, end=tensor.size(d), step=m[d]).long()) 150 | 151 | return tensor 152 | 153 | 154 | def calculate_mAP(det_boxes, det_labels, det_scores, true_boxes, true_labels, true_difficulties, device): 155 | """ 156 | Calculate the Mean Average Precision (mAP) of detected objects. 157 | 158 | See https://medium.com/@jonathan_hui/map-mean-average-precision-for-object-detection-45c121a31173 for an explanation 159 | 160 | :param det_boxes: list of tensors, one tensor for each image containing detected objects' bounding boxes 161 | :param det_labels: list of tensors, one tensor for each image containing detected objects' labels 162 | :param det_scores: list of tensors, one tensor for each image containing detected objects' labels' scores 163 | :param true_boxes: list of tensors, one tensor for each image containing actual objects' bounding boxes 164 | :param true_labels: list of tensors, one tensor for each image containing actual objects' labels 165 | :param true_difficulties: list of tensors, one tensor for each image containing actual objects' difficulty (0 or 1) 166 | :return: list of average precisions for all classes, mean average precision (mAP) 167 | """ 168 | assert len(det_boxes) == len(det_labels) == len(det_scores) == len(true_boxes) == len( 169 | true_labels) == len( 170 | true_difficulties) # these are all lists of tensors of the same length, i.e. number of images 171 | n_classes = len(label_map) 172 | 173 | # Store all (true) objects in a single continuous tensor while keeping track of the image it is from 174 | true_images = list() 175 | for i in range(len(true_labels)): 176 | true_images.extend([i] * true_labels[i].size(0)) 177 | true_images = torch.LongTensor(true_images).to( 178 | device) # (n_objects), n_objects is the total no. of objects across all images 179 | true_boxes = torch.cat(true_boxes, dim=0) # (n_objects, 4) 180 | true_labels = torch.cat(true_labels, dim=0) # (n_objects) 181 | true_difficulties = torch.cat(true_difficulties, dim=0) # (n_objects) 182 | 183 | assert true_images.size(0) == true_boxes.size(0) == true_labels.size(0) 184 | 185 | # Store all detections in a single continuous tensor while keeping track of the image it is from 186 | det_images = list() 187 | for i in range(len(det_labels)): 188 | det_images.extend([i] * det_labels[i].size(0)) 189 | det_images = torch.LongTensor(det_images).to(device) # (n_detections) 190 | det_boxes = torch.cat(det_boxes, dim=0) # (n_detections, 4) 191 | det_labels = torch.cat(det_labels, dim=0) # (n_detections) 192 | det_scores = torch.cat(det_scores, dim=0) # (n_detections) 193 | 194 | assert det_images.size(0) == det_boxes.size(0) == det_labels.size(0) == det_scores.size(0) 195 | 196 | # Calculate APs for each class (except background) 197 | average_precisions = torch.zeros((n_classes - 1), dtype=torch.float) # (n_classes - 1) 198 | for c in range(1, n_classes): 199 | # Extract only objects with this class 200 | true_class_images = true_images[true_labels == c] # (n_class_objects) 201 | true_class_boxes = true_boxes[true_labels == c] # (n_class_objects, 4) 202 | true_class_difficulties = true_difficulties[true_labels == c] # (n_class_objects) 203 | n_easy_class_objects = (1 - true_class_difficulties).sum().item() # ignore difficult objects 所有的正样本 204 | 205 | # Keep track of which true objects with this class have already been 'detected' 206 | # So far, none 207 | true_class_boxes_detected = torch.zeros((true_class_difficulties.size(0)), dtype=torch.uint8).to( 208 | device) # (n_class_objects) 209 | 210 | # Extract only detections with this class 211 | det_class_images = det_images[det_labels == c] # (n_class_detections) 212 | det_class_boxes = det_boxes[det_labels == c] # (n_class_detections, 4) 213 | det_class_scores = det_scores[det_labels == c] # (n_class_detections) 214 | n_class_detections = det_class_boxes.size(0) 215 | if n_class_detections == 0: 216 | continue 217 | 218 | # Sort detections in decreasing order of confidence/scores 219 | det_class_scores, sort_ind = torch.sort(det_class_scores, dim=0, descending=True) # (n_class_detections) 220 | det_class_images = det_class_images[sort_ind] # (n_class_detections) 221 | det_class_boxes = det_class_boxes[sort_ind] # (n_class_detections, 4) 222 | 223 | # In the order of decreasing scores, check if true or false positive 224 | true_positives = torch.zeros((n_class_detections), dtype=torch.float).to(device) # (n_class_detections) 225 | false_positives = torch.zeros((n_class_detections), dtype=torch.float).to(device) # (n_class_detections) 226 | for d in range(n_class_detections): 227 | this_detection_box = det_class_boxes[d].unsqueeze(0) # (1, 4) 228 | this_image = det_class_images[d] # (), scalar 229 | 230 | # Find objects in the same image with this class, their difficulties, and whether they have been detected before 231 | object_boxes = true_class_boxes[true_class_images == this_image] # (n_class_objects_in_img) 232 | object_difficulties = true_class_difficulties[true_class_images == this_image] # (n_class_objects_in_img) 233 | # If no such object in this image, then the detection is a false positive (该图片中没有该类别的目标) 234 | if object_boxes.size(0) == 0: 235 | false_positives[d] = 1 236 | continue 237 | 238 | # Find maximum overlap of this detection with objects in this image of this class 239 | overlaps = find_jaccard_overlap(this_detection_box, object_boxes) # (1, n_class_objects_in_img) 240 | max_overlap, ind = torch.max(overlaps.squeeze(0), dim=0) # (), () - scalars 241 | 242 | # 'ind' is the index of the object in these image-level tensors 'object_boxes', 'object_difficulties' 243 | # In the original class-level tensors 'true_class_boxes', etc., 'ind' corresponds to object with index... 244 | original_ind = torch.LongTensor(range(true_class_boxes.size(0)))[true_class_images == this_image][ind] 245 | # We need 'original_ind' to update 'true_class_boxes_detected' 246 | # 要注意区分 这个类别的所有objects和 这个图片中这个类别的所有objects 247 | 248 | # If the maximum overlap is greater than the threshold of 0.5, it's a match 249 | if max_overlap.item() > 0.5: 250 | # If the object it matched with is 'difficult', ignore it 251 | if object_difficulties[ind] == 0: 252 | # If this object has already not been detected, it's a true positive 253 | if true_class_boxes_detected[original_ind] == 0: 254 | true_positives[d] = 1 255 | true_class_boxes_detected[original_ind] = 1 # this object has now been detected/accounted for 256 | # Otherwise, it's a false positive (since this object is already accounted for) 257 | else: 258 | false_positives[d] = 1 259 | # Otherwise, the detection occurs in a different location than the actual object, and is a false positive 260 | else: 261 | false_positives[d] = 1 262 | 263 | # Compute cumulative precision and recall at each detection in the order of decreasing scores 264 | cumul_true_positives = torch.cumsum(true_positives, dim=0) # (n_class_detections) 265 | cumul_false_positives = torch.cumsum(false_positives, dim=0) # (n_class_detections) 266 | cumul_precision = cumul_true_positives / (cumul_true_positives + cumul_false_positives + 1e-10) # (n_class_detections) 267 | cumul_recall = cumul_true_positives / n_easy_class_objects # (n_class_detections) 268 | 269 | # Find the mean of the maximum of the precisions corresponding to recalls above the threshold 't' 270 | recall_thresholds = torch.arange(start=0, end=1.1, step=.1).tolist() # (11) 271 | precisions = torch.zeros((len(recall_thresholds)), dtype=torch.float).to(device) # (11) 272 | for i, t in enumerate(recall_thresholds): 273 | recalls_above_t = cumul_recall >= t 274 | if recalls_above_t.any(): 275 | precisions[i] = cumul_precision[recalls_above_t].max() 276 | else: 277 | precisions[i] = 0. 278 | average_precisions[c - 1] = precisions.mean() # c is in [1, n_classes - 1] 279 | 280 | # Calculate Mean Average Precision (mAP) 281 | mean_average_precision = average_precisions[average_precisions != 0].mean().item() 282 | 283 | # Keep class-wise average precisions in a dictionary 284 | average_precisions = {rev_label_map[c + 1]: v for c, v in enumerate(average_precisions.tolist()) if v > 0} 285 | 286 | return average_precisions, mean_average_precision 287 | 288 | 289 | def xy_to_cxcy(xy): 290 | """ 291 | Convert bounding boxes from boundary coordinates (x_min, y_min, x_max, y_max) to center-size coordinates (c_x, c_y, w, h). 292 | 293 | :param xy: bounding boxes in boundary coordinates, a tensor of size (n_boxes, 4) 294 | :return: bounding boxes in center-size coordinates, a tensor of size (n_boxes, 4) 295 | """ 296 | return torch.cat([(xy[:, 2:] + xy[:, :2]) / 2, # c_x, c_y 297 | xy[:, 2:] - xy[:, :2]], 1) # w, h 298 | 299 | 300 | def cxcy_to_xy(cxcy): 301 | """ 302 | Convert bounding boxes from center-size coordinates (c_x, c_y, w, h) to boundary coordinates (x_min, y_min, x_max, y_max). 303 | 304 | :param cxcy: bounding boxes in center-size coordinates, a tensor of size (n_boxes, 4) 305 | :return: bounding boxes in boundary coordinates, a tensor of size (n_boxes, 4) 306 | """ 307 | return torch.cat([cxcy[:, :2] - (cxcy[:, 2:] / 2), # x_min, y_min 308 | cxcy[:, :2] + (cxcy[:, 2:] / 2)], 1) # x_max, y_max 309 | 310 | 311 | def cxcy_to_gcxgcy(cxcy, priors_cxcy): 312 | """ 313 | Encode bounding boxes (that are in center-size form) w.r.t. the corresponding prior boxes (that are in center-size form). 314 | 315 | For the center coordinates, find the offset with respect to the prior box, and scale by the size of the prior box. 316 | For the size coordinates, scale by the size of the prior box, and convert to the log-space. 317 | 318 | In the model, we are predicting bounding box coordinates in this encoded form. 319 | 320 | :param cxcy: bounding boxes in center-size coordinates, a tensor of size (n_priors, 4) 321 | :param priors_cxcy: prior boxes with respect to which the encoding must be performed, a tensor of size (n_priors, 4) 322 | :return: encoded bounding boxes, a tensor of size (n_priors, 4) 323 | """ 324 | 325 | # The 10 and 5 below are referred to as 'variances' in the original Caffe repo, completely empirical 326 | # They are for some sort of numerical conditioning, for 'scaling the localization gradient' 327 | # See https://github.com/weiliu89/caffe/issues/155 328 | return torch.cat([(cxcy[:, :2] - priors_cxcy[:, :2]) / (priors_cxcy[:, 2:] / 10), # g_c_x, g_c_y 329 | torch.log(cxcy[:, 2:] / priors_cxcy[:, 2:]) * 5], 1) # g_w, g_h 330 | 331 | 332 | def gcxgcy_to_cxcy(gcxgcy, priors_cxcy): 333 | """ 334 | Decode bounding box coordinates predicted by the model, since they are encoded in the form mentioned above. 335 | 336 | They are decoded into center-size coordinates. 337 | 338 | This is the inverse of the function above. 339 | 340 | :param gcxgcy: encoded bounding boxes, i.e. output of the model, a tensor of size (n_priors, 4) 341 | :param priors_cxcy: prior boxes with respect to which the encoding is defined, a tensor of size (n_priors, 4) 342 | :return: decoded bounding boxes in center-size form, a tensor of size (n_priors, 4) 343 | """ 344 | 345 | return torch.cat([gcxgcy[:, :2] * priors_cxcy[:, 2:] / 10 + priors_cxcy[:, :2], # c_x, c_y 346 | torch.exp(gcxgcy[:, 2:] / 5) * priors_cxcy[:, 2:]], 1) # w, h 347 | 348 | 349 | def find_intersection(set_1, set_2): 350 | """ 351 | Find the intersection of every box combination between two sets of boxes that are in boundary coordinates. 352 | 353 | :param set_1: set 1, a tensor of dimensions (n1, 4) 354 | :param set_2: set 2, a tensor of dimensions (n2, 4) 355 | :return: intersection of each of the boxes in set 1 with respect to each of the boxes in set 2, a tensor of dimensions (n1, n2) 356 | """ 357 | 358 | # PyTorch auto-broadcasts singleton dimensions 359 | # 获取交界区域的左上角和右下角坐标 360 | lower_bounds = torch.max(set_1[:, :2].unsqueeze(1), set_2[:, :2].unsqueeze(0)) # (n1, n2, 2) 361 | upper_bounds = torch.min(set_1[:, 2:].unsqueeze(1), set_2[:, 2:].unsqueeze(0)) # (n1, n2, 2) 362 | # 获取交界区域的宽和高 363 | intersection_dims = torch.clamp(upper_bounds - lower_bounds, min=0) # (n1, n2, 2) 364 | return intersection_dims[:, :, 0] * intersection_dims[:, :, 1] # (n1, n2) 365 | 366 | 367 | def find_jaccard_overlap(set_1, set_2): 368 | """ 369 | Find the Jaccard Overlap (IoU) of every box combination between two sets of boxes that are in boundary coordinates. 370 | 371 | :param set_1: set 1, a tensor of dimensions (n1, 4) 372 | :param set_2: set 2, a tensor of dimensions (n2, 4) 373 | :return: Jaccard Overlap of each of the boxes in set 1 with respect to each of the boxes in set 2, a tensor of dimensions (n1, n2) 374 | """ 375 | 376 | # Find intersections 377 | intersection = find_intersection(set_1, set_2) # (n1, n2) 378 | 379 | # Find areas of each box in both sets 380 | areas_set_1 = (set_1[:, 2] - set_1[:, 0]) * (set_1[:, 3] - set_1[:, 1]) # (n1) 381 | areas_set_2 = (set_2[:, 2] - set_2[:, 0]) * (set_2[:, 3] - set_2[:, 1]) # (n2) 382 | 383 | # Find the union 384 | # PyTorch auto-broadcasts singleton dimensions 385 | union = areas_set_1.unsqueeze(1) + areas_set_2.unsqueeze(0) - intersection # (n1, n2) 386 | 387 | return intersection / union # (n1, n2) 388 | 389 | 390 | # Some augmentation functions below have been adapted from 391 | # From https://github.com/amdegroot/ssd.pytorch/blob/master/utils/augmentations.py 392 | 393 | def expand(image, boxes, filler): 394 | """ 395 | Perform a zooming out operation by placing the image in a larger canvas of filler material. 396 | 397 | Helps to learn to detect smaller objects. 398 | 399 | :param image: image, a tensor of dimensions (3, original_h, original_w) 400 | :param boxes: bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4) 401 | :param filler: RBG values of the filler material, a list like [R, G, B] 402 | :return: expanded image, updated bounding box coordinates 403 | """ 404 | # Calculate dimensions of proposed expanded (zoomed-out) image 405 | original_h = image.size(1) 406 | original_w = image.size(2) 407 | max_scale = 4 408 | scale = random.uniform(1, max_scale) 409 | new_h = int(scale * original_h) 410 | new_w = int(scale * original_w) 411 | 412 | # Create such an image with the filler 413 | filler = torch.FloatTensor(filler) # (3) 414 | new_image = torch.ones((3, new_h, new_w), dtype=torch.float) * filler.unsqueeze(1).unsqueeze(1) # (3, new_h, new_w) 415 | # Note - do not use expand() like new_image = filler.unsqueeze(1).unsqueeze(1).expand(3, new_h, new_w) 416 | # because all expanded values will share the same memory, so changing one pixel will change all 417 | 418 | # Place the original image at random coordinates in this new image (origin at top-left of image) 419 | left = random.randint(0, new_w - original_w) 420 | right = left + original_w 421 | top = random.randint(0, new_h - original_h) 422 | bottom = top + original_h 423 | new_image[:, top:bottom, left:right] = image 424 | 425 | # Adjust bounding boxes' coordinates accordingly 426 | new_boxes = boxes + torch.FloatTensor([left, top, left, top]).unsqueeze( 427 | 0) # (n_objects, 4), n_objects is the no. of objects in this image 428 | 429 | return new_image, new_boxes 430 | 431 | 432 | def random_crop(image, boxes, labels, difficulties): 433 | """ 434 | Performs a random crop in the manner stated in the paper. Helps to learn to detect larger and partial objects. 435 | 436 | Note that some objects may be cut out entirely. 437 | 438 | Adapted from https://github.com/amdegroot/ssd.pytorch/blob/master/utils/augmentations.py 439 | 440 | :param image: image, a tensor of dimensions (3, original_h, original_w) 441 | :param boxes: bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4) 442 | :param labels: labels of objects, a tensor of dimensions (n_objects) 443 | :param difficulties: difficulties of detection of these objects, a tensor of dimensions (n_objects) 444 | :return: cropped image, updated bounding box coordinates, updated labels, updated difficulties 445 | """ 446 | original_h = image.size(1) 447 | original_w = image.size(2) 448 | # Keep choosing a minimum overlap until a successful crop is made 449 | while True: 450 | # Randomly draw the value for minimum overlap 451 | min_overlap = random.choice([0., .1, .3, .5, .7, .9, None]) # 'None' refers to no cropping 452 | 453 | # If not cropping 454 | if min_overlap is None: 455 | return image, boxes, labels, difficulties 456 | 457 | # Try up to 50 times for this choice of minimum overlap 458 | # This isn't mentioned in the paper, of course, but 50 is chosen in paper authors' original Caffe repo 459 | max_trials = 50 460 | for _ in range(max_trials): 461 | # Crop dimensions must be in [0.3, 1] of original dimensions 462 | # Note - it's [0.1, 1] in the paper, but actually [0.3, 1] in the authors' repo 463 | min_scale = 0.3 464 | scale_h = random.uniform(min_scale, 1) 465 | scale_w = random.uniform(min_scale, 1) 466 | new_h = int(scale_h * original_h) 467 | new_w = int(scale_w * original_w) 468 | 469 | # Aspect ratio has to be in [0.5, 2] 470 | aspect_ratio = new_h / new_w 471 | if not 0.5 < aspect_ratio < 2: 472 | continue 473 | 474 | # Crop coordinates (origin at top-left of image) 475 | left = random.randint(0, original_w - new_w) 476 | right = left + new_w 477 | top = random.randint(0, original_h - new_h) 478 | bottom = top + new_h 479 | crop = torch.FloatTensor([left, top, right, bottom]) # (4) 480 | 481 | # Calculate Jaccard overlap between the crop and the bounding boxes 482 | overlap = find_jaccard_overlap(crop.unsqueeze(0), 483 | boxes) # (1, n_objects), n_objects is the no. of objects in this image 484 | overlap = overlap.squeeze(0) # (n_objects) 485 | 486 | # If not a single bounding box has a Jaccard overlap of greater than the minimum, try again 487 | if overlap.max().item() < min_overlap: 488 | continue 489 | 490 | # Crop image 491 | new_image = image[:, top:bottom, left:right] # (3, new_h, new_w) 492 | 493 | # Find centers of original bounding boxes 494 | bb_centers = (boxes[:, :2] + boxes[:, 2:]) / 2. # (n_objects, 2) 495 | 496 | # Find bounding boxes whose centers are in the crop 497 | centers_in_crop = (bb_centers[:, 0] > left) * (bb_centers[:, 0] < right) * (bb_centers[:, 1] > top) * (bb_centers[:, 1] < bottom) # (n_objects), a Torch uInt8/Byte tensor, can be used as a boolean index 498 | 499 | # If not a single bounding box has its center in the crop, try again 500 | if not centers_in_crop.any(): 501 | continue 502 | 503 | # Discard bounding boxes that don't meet this criterion 504 | new_boxes = boxes[centers_in_crop, :] 505 | new_labels = labels[centers_in_crop] 506 | new_difficulties = difficulties[centers_in_crop] 507 | 508 | # Calculate bounding boxes' new coordinates in the crop 509 | new_boxes[:, :2] = torch.max(new_boxes[:, :2], crop[:2]) # crop[:2] is [left, top] 510 | new_boxes[:, :2] -= crop[:2] 511 | new_boxes[:, 2:] = torch.min(new_boxes[:, 2:], crop[2:]) # crop[2:] is [right, bottom] 512 | new_boxes[:, 2:] -= crop[:2] 513 | 514 | return new_image, new_boxes, new_labels, new_difficulties 515 | 516 | 517 | def flip(image, boxes): 518 | """ 519 | Flip image horizontally. 520 | 521 | :param image: image, a PIL Image 522 | :param boxes: bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4) 523 | :return: flipped image, updated bounding box coordinates 524 | """ 525 | # Flip image 526 | new_image = FT.hflip(image) 527 | 528 | # Flip boxes 529 | new_boxes = boxes 530 | new_boxes[:, 0] = image.width - boxes[:, 0] - 1 531 | new_boxes[:, 2] = image.width - boxes[:, 2] - 1 532 | new_boxes = new_boxes[:, [2, 1, 0, 3]] 533 | 534 | return new_image, new_boxes 535 | 536 | 537 | def resize(image, boxes, dims=(300, 300), return_percent_coords=True): 538 | """ 539 | Resize image. For the SSD300, resize to (300, 300). 540 | 541 | Since percent/fractional coordinates are calculated for the bounding boxes (w.r.t image dimensions) in this process, 542 | you may choose to retain them. 543 | 544 | :param image: image, a PIL Image 545 | :param boxes: bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4) 546 | :return: resized image, updated bounding box coordinates (or fractional coordinates, in which case they remain the same) 547 | """ 548 | # Resize image 549 | new_image = FT.resize(image, dims) 550 | 551 | # Resize bounding boxes 552 | old_dims = torch.FloatTensor([image.width, image.height, image.width, image.height]).unsqueeze(0) 553 | new_boxes = boxes / old_dims # percent coordinates 554 | 555 | if not return_percent_coords: 556 | new_dims = torch.FloatTensor([dims[1], dims[0], dims[1], dims[0]]).unsqueeze(0) 557 | new_boxes = new_boxes * new_dims 558 | 559 | return new_image, new_boxes 560 | 561 | 562 | def photometric_distort(image): 563 | """ 564 | Distort brightness, contrast, saturation, and hue, each with a 50% chance, in random order. 565 | 566 | :param image: image, a PIL Image 567 | :return: distorted image 568 | """ 569 | new_image = image 570 | 571 | distortions = [FT.adjust_brightness, 572 | FT.adjust_contrast, 573 | FT.adjust_saturation, 574 | FT.adjust_hue] 575 | 576 | random.shuffle(distortions) 577 | 578 | for d in distortions: 579 | if random.random() < 0.5: 580 | if d.__name__ == 'adjust_hue': 581 | # Caffe repo uses a 'hue_delta' of 18 - we divide by 255 because PyTorch needs a normalized value 582 | adjust_factor = random.uniform(-18 / 255., 18 / 255.) 583 | else: 584 | # Caffe repo uses 'lower' and 'upper' values of 0.5 and 1.5 for brightness, contrast, and saturation 585 | adjust_factor = random.uniform(0.5, 1.5) 586 | 587 | # Apply this distortion 588 | new_image = d(new_image, adjust_factor) 589 | 590 | return new_image 591 | 592 | 593 | def transform(image, boxes, labels, difficulties, split): 594 | """ 595 | Apply the transformations above. 596 | 597 | :param image: image, a PIL Image 598 | :param boxes: bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4) 599 | :param labels: labels of objects, a tensor of dimensions (n_objects) 600 | :param difficulties: difficulties of detection of these objects, a tensor of dimensions (n_objects) 601 | :param split: one of 'TRAIN' or 'TEST', since different sets of transformations are applied 602 | :return: transformed image, transformed bounding box coordinates, transformed labels, transformed difficulties 603 | """ 604 | assert split in {'TRAIN', 'TEST'} 605 | 606 | # Mean and standard deviation of ImageNet data that our base VGG from torchvision was trained on 607 | # see: https://pytorch.org/docs/stable/torchvision/models.html 608 | mean = [0.485, 0.456, 0.406] 609 | std = [0.229, 0.224, 0.225] 610 | 611 | new_image = image 612 | new_boxes = boxes 613 | new_labels = labels 614 | new_difficulties = difficulties 615 | # Skip the following operations if validation/evaluation 616 | if split == 'TRAIN': 617 | # A series of photometric distortions in random order, each with 50% chance of occurrence, as in Caffe repo 618 | new_image = photometric_distort(new_image) 619 | 620 | # Convert PIL image to Torch tensor 621 | new_image = FT.to_tensor(new_image) 622 | 623 | # Expand image (zoom out) with a 50% chance - helpful for training detection of small objects 624 | # Fill surrounding space with the mean of ImageNet data that our base VGG was trained on 625 | if random.random() < 0.5: 626 | new_image, new_boxes = expand(new_image, boxes, filler=mean) 627 | 628 | # Randomly crop image (zoom in) 629 | new_image, new_boxes, new_labels, new_difficulties = random_crop(new_image, new_boxes, new_labels, 630 | new_difficulties) 631 | 632 | # Convert Torch tensor to PIL image 633 | new_image = FT.to_pil_image(new_image) 634 | 635 | # Flip image with a 50% chance 636 | if random.random() < 0.5: 637 | new_image, new_boxes = flip(new_image, new_boxes) 638 | 639 | # Resize image to (300, 300) - this also converts absolute boundary coordinates to their fractional form 640 | new_image, new_boxes = resize(new_image, new_boxes, dims=(300, 300)) 641 | 642 | # Convert PIL image to Torch tensor 643 | new_image = FT.to_tensor(new_image) 644 | 645 | # Normalize by mean and standard deviation of ImageNet data that our base VGG was trained on 646 | new_image = FT.normalize(new_image, mean=mean, std=std) 647 | 648 | return new_image, new_boxes, new_labels, new_difficulties 649 | 650 | 651 | def adjust_learning_rate(optimizer, scale): 652 | """ 653 | Scale learning rate by a specified factor. 654 | 655 | :param optimizer: optimizer whose learning rate must be shrunk. 656 | :param scale: factor to multiply learning rate with. 657 | """ 658 | for param_group in optimizer.param_groups: 659 | param_group['lr'] = param_group['lr'] * scale 660 | print("DECAYING learning rate.\n The new LR is %f\n" % (optimizer.param_groups[1]['lr'],)) 661 | 662 | 663 | def accuracy(scores, targets, k): 664 | """ 665 | Computes top-k accuracy, from predicted and true labels. 666 | 667 | :param scores: scores from the model 668 | :param targets: true labels 669 | :param k: k in top-k accuracy 670 | :return: top-k accuracy 671 | """ 672 | batch_size = targets.size(0) 673 | _, ind = scores.topk(k, 1, True, True) 674 | correct = ind.eq(targets.view(-1, 1).expand_as(ind)) 675 | correct_total = correct.view(-1).float().sum() # 0D tensor 676 | return correct_total.item() * (100.0 / batch_size) 677 | 678 | 679 | def save_checkpoint(model, filename): 680 | """ 681 | Save model checkpoint. 682 | :param epoch: epoch number 683 | :param model: model 684 | :param optimizer: optimizer 685 | """ 686 | state = {'model': model.state_dict()} 687 | torch.save(state, filename + '.pth.tar') 688 | 689 | 690 | class AverageMeter(object): 691 | """ 692 | Keeps track of most recent, average, sum, and count of a metric. 693 | """ 694 | 695 | def __init__(self): 696 | self.reset() 697 | 698 | def reset(self): 699 | self.val = 0 700 | self.avg = 0 701 | self.sum = 0 702 | self.count = 0 703 | 704 | def update(self, val, n=1): 705 | self.val = val 706 | self.sum += val * n 707 | self.count += n 708 | self.avg = self.sum / self.count 709 | 710 | 711 | def clip_gradient(optimizer, grad_clip): 712 | """ 713 | Clips gradients computed during backpropagation to avoid explosion of gradients. 714 | 715 | :param optimizer: optimizer with the gradients to be clipped 716 | :param grad_clip: clip value 717 | """ 718 | for group in optimizer.param_groups: 719 | for param in group['params']: 720 | if param.grad is not None: 721 | param.grad.data.clamp_(-grad_clip, grad_clip) 722 | --------------------------------------------------------------------------------