├── .gitignore ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── __pycache__ ├── convlstm.cpython-37.pyc ├── loss.cpython-36.pyc ├── loss.cpython-37.pyc ├── utility.cpython-36.pyc └── utility.cpython-37.pyc ├── dataset ├── __pycache__ │ ├── dataset.cpython-36.pyc │ └── pre_process.cpython-36.pyc ├── dataset.py ├── pre_process.py ├── train_names.txt └── valid_names.txt ├── loss.py ├── main.py ├── net ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-36.pyc │ ├── __init__.cpython-37.pyc │ ├── adamatting.cpython-36.pyc │ ├── adamatting.cpython-37.pyc │ ├── gcn.cpython-36.pyc │ ├── gcn.cpython-37.pyc │ ├── propunit.cpython-36.pyc │ ├── propunit.cpython-37.pyc │ ├── resblock.cpython-36.pyc │ └── resblock.cpython-37.pyc ├── adamatting.py ├── gcn.py ├── propunit.py └── resblock.py ├── train.sh └── utility.py /.gitignore: -------------------------------------------------------------------------------- 1 | log.txt 2 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.pythonPath": "/home/zhouruifeng/.conda/envs/pytorch/bin/python", 3 | "python.linting.pylintArgs": [ 4 | "--extension-pkg-whitelist=torch", 5 | "--extension-pkg-whitelist=cv2", 6 | "--ignored-modules=torch", 7 | "--ignored-classes=torch", 8 | "--disable=C,R,W", 9 | "--enable=unreachable", 10 | "--enable=duplicate-key", 11 | "--enable=unnecessary-semicolon", 12 | "--enable=global-variable-not-assigned", 13 | "--enable=unused-variable", 14 | "--enable=binary-op-exception", 15 | "--enable=bad-format-string", 16 | "--enable=anomalous-backslash-in-string", 17 | "--enable=bad-open-mode" 18 | ], 19 | "python.linting.pylintEnabled": true, 20 | "python.linting.enabled": true 21 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Ruifeng-Zhou24 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AdaMatting 2 | a pytorch implementation of ICCV 2019 paper "Disentangled Image Matting" 3 | this project is not finished 4 | -------------------------------------------------------------------------------- /__pycache__/convlstm.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/__pycache__/convlstm.cpython-37.pyc -------------------------------------------------------------------------------- /__pycache__/loss.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/__pycache__/loss.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/loss.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/__pycache__/loss.cpython-37.pyc -------------------------------------------------------------------------------- /__pycache__/utility.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/__pycache__/utility.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/utility.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/__pycache__/utility.cpython-37.pyc -------------------------------------------------------------------------------- /dataset/__pycache__/dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/dataset/__pycache__/dataset.cpython-36.pyc -------------------------------------------------------------------------------- /dataset/__pycache__/pre_process.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/dataset/__pycache__/pre_process.cpython-36.pyc -------------------------------------------------------------------------------- /dataset/dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import math 3 | import random 4 | import cv2 as cv 5 | import numpy as np 6 | import torch 7 | from torch.utils.data import Dataset 8 | from torchvision import transforms 9 | 10 | 11 | data_transforms = { 12 | # values from ImageNet 13 | 'train': transforms.Compose([ 14 | transforms.ColorJitter(brightness=0.125, contrast=0.125, saturation=0.125), 15 | transforms.ToTensor(), 16 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 17 | ]), 18 | 'valid': transforms.Compose([ 19 | transforms.ToTensor(), 20 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) 21 | ]), 22 | } 23 | 24 | 25 | class AdaMattingDataset(Dataset): 26 | 27 | def __init__(self, raw_data_path, mode): 28 | self.crop_size = 320 29 | self.unknown_code = 128 30 | self.mode = mode 31 | self.raw_data_path = raw_data_path 32 | 33 | self.fg_path = os.path.join(self.raw_data_path, "train/fg/") 34 | self.bg_path = os.path.join(self.raw_data_path, "train/bg/") 35 | self.a_path = os.path.join(self.raw_data_path, "train/mask/") 36 | 37 | self.transformer = data_transforms[self.mode] 38 | 39 | with open(os.path.join(self.raw_data_path, "Combined_Dataset/Training_set/training_fg_names.txt")) as f: 40 | self.fg_files = f.read().splitlines() 41 | with open(os.path.join(self.raw_data_path, "Combined_Dataset/Training_set/training_bg_names.txt")) as f: 42 | self.bg_files = f.read().splitlines() 43 | 44 | filename = "dataset/{}_names.txt".format(self.mode) 45 | with open(filename, 'r') as file: 46 | self.names = file.read().splitlines() 47 | 48 | 49 | def __len__(self): 50 | return len(self.names) 51 | 52 | 53 | def composite4(self, fg, bg, a, w, h): 54 | fg = np.array(fg, np.float32) 55 | bg_h, bg_w = bg.shape[:2] 56 | x = 0 57 | if bg_w > w: 58 | x = np.random.randint(0, bg_w - w) 59 | y = 0 60 | if bg_h > h: 61 | y = np.random.randint(0, bg_h - h) 62 | bg = np.array(bg[y:y + h, x:x + w], np.float32) 63 | alpha = np.zeros((h, w, 1), np.float32) 64 | alpha[:, :, 0] = a / 255. 65 | im = alpha * fg + (1 - alpha) * bg 66 | im = im.astype(np.uint8) 67 | return im, a, fg, bg 68 | 69 | 70 | def process(self, im_name, bg_name): 71 | im = cv.imread(self.fg_path + im_name) 72 | a = cv.imread(self.a_path + im_name, 0) 73 | h, w = im.shape[:2] 74 | bg = cv.imread(self.bg_path + bg_name) 75 | bh, bw = bg.shape[:2] 76 | wratio = w / bw 77 | hratio = h / bh 78 | ratio = wratio if wratio > hratio else hratio 79 | if ratio > 1: 80 | bg = cv.resize(src=bg, dsize=(math.ceil(bw * ratio), math.ceil(bh * ratio)), interpolation=cv.INTER_CUBIC) 81 | 82 | return self.composite4(im, bg, a, w, h) 83 | 84 | 85 | def __getitem__(self, index): 86 | name = self.names[index] 87 | fcount = int(name.split('.')[0].split('_')[0]) 88 | bcount = int(name.split('.')[0].split('_')[1]) 89 | im_name = self.fg_files[fcount] 90 | bg_name = self.bg_files[bcount] 91 | img, alpha, _, _ = self.process(im_name, bg_name) 92 | 93 | different_sizes = [(320, 320), (800, 800)] 94 | crop_size = random.choice(different_sizes) 95 | 96 | trimap = self.gen_trimap(alpha) 97 | x, y = self.random_choice(trimap, crop_size) 98 | img = self.safe_crop(img, x, y, crop_size) 99 | alpha = self.safe_crop(alpha, x, y, crop_size) 100 | 101 | trimap = self.gen_trimap(alpha) 102 | 103 | # Flip array left to right randomly (prob=1:1) 104 | if np.random.random_sample() > 0.5: 105 | img = np.fliplr(img) 106 | trimap = np.fliplr(trimap) 107 | alpha = np.fliplr(alpha) 108 | 109 | x = torch.zeros((4, self.crop_size, self.crop_size), dtype=torch.float) 110 | img = img[..., ::-1] # RGB 111 | img = transforms.ToPILImage()(img) 112 | img = self.transformer(img) 113 | x[0:3, :, :] = img 114 | x[3, :, :] = torch.from_numpy(trimap.copy() / 255.) 115 | 116 | y = np.empty((2, self.crop_size, self.crop_size), dtype=np.float32) 117 | y[0, :, :] = alpha / 255. 118 | # mask = np.equal(trimap, 128).astype(np.float32) 119 | """ 120 | pred_trimap_argmax 121 | 0: background 122 | 1: unknown 123 | 2: foreground 124 | """ 125 | mask = np.zeros(alpha.shape) 126 | mask.fill(1) 127 | mask[alpha <= 0] = 0 128 | mask[alpha >= 255] = 2 129 | y[1, :, :] = mask 130 | return x, y 131 | 132 | 133 | def gen_trimap(self, alpha): 134 | k_size = random.choice(range(1, 5)) 135 | iterations = np.random.randint(1, 20) 136 | kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (k_size, k_size)) 137 | dilated = cv.dilate(alpha, kernel, iterations) 138 | eroded = cv.erode(alpha, kernel, iterations) 139 | trimap = np.zeros(alpha.shape) 140 | trimap.fill(128) 141 | trimap[eroded >= 255] = 255 142 | trimap[dilated <= 0] = 0 143 | return trimap 144 | 145 | 146 | # Randomly crop (image, trimap) pairs centered on pixels in the unknown regions. 147 | def random_choice(self, trimap, crop_size=(320, 320)): 148 | crop_height, crop_width = crop_size 149 | y_indices, x_indices = np.where(trimap == self.unknown_code) 150 | num_unknowns = len(y_indices) 151 | x, y = 0, 0 152 | if num_unknowns > 0: 153 | ix = np.random.choice(range(num_unknowns)) 154 | center_x = x_indices[ix] 155 | center_y = y_indices[ix] 156 | x = max(0, center_x - int(crop_width / 2)) 157 | y = max(0, center_y - int(crop_height / 2)) 158 | return x, y 159 | 160 | 161 | def safe_crop(self, mat, x, y, crop_size): 162 | crop_height, crop_width = crop_size 163 | if len(mat.shape) == 2: 164 | ret = np.zeros((crop_height, crop_width), np.uint8) 165 | else: 166 | ret = np.zeros((crop_height, crop_width, 3), np.uint8) 167 | crop = mat[y:y + crop_height, x:x + crop_width] 168 | h, w = crop.shape[:2] 169 | ret[0:h, 0:w] = crop 170 | if crop_size != (self.crop_size, self.crop_size): 171 | ret = cv.resize(ret, dsize=(self.crop_size, self.crop_size), interpolation=cv.INTER_NEAREST) 172 | return ret -------------------------------------------------------------------------------- /dataset/pre_process.py: -------------------------------------------------------------------------------- 1 | import os 2 | import math 3 | import shutil 4 | import zipfile 5 | import tarfile 6 | import numpy as np 7 | import cv2 as cv 8 | from tqdm import tqdm 9 | from multiprocessing import Pool 10 | from PIL import Image 11 | import random 12 | 13 | 14 | def composite4(fg, bg, a, w, h): 15 | fg = np.array(fg, np.float32) 16 | bg = np.array(bg[0:h, 0:w], np.float32) 17 | alpha = np.zeros((h, w, 1), np.float32) 18 | alpha[:, :, 0] = a / 255. 19 | comp = alpha * fg + (1 - alpha) * bg 20 | comp = comp.astype(np.uint8) 21 | return comp 22 | 23 | 24 | def process(raw_data_path, im_name, bg_name, fcount, bcount, mode): 25 | fg_path = os.path.join(raw_data_path, '{}/fg/'.format(mode)) 26 | a_path = os.path.join(raw_data_path, '{}/mask/'.format(mode)) 27 | bg_path = os.path.join(raw_data_path, '{}/bg/'.format(mode)) 28 | out_path = os.path.join(raw_data_path, '{}/merged/'.format(mode)) 29 | 30 | im = cv.imread(fg_path + im_name) 31 | a = cv.imread(a_path + im_name, 0) 32 | h, w = im.shape[:2] 33 | bg = cv.imread(bg_path + bg_name) 34 | bh, bw = bg.shape[:2] 35 | wratio = w / bw 36 | hratio = h / bh 37 | ratio = wratio if wratio > hratio else hratio 38 | if ratio > 1: 39 | bg = cv.resize(src=bg, dsize=(math.ceil(bw * ratio), math.ceil(bh * ratio)), interpolation=cv.INTER_CUBIC) 40 | 41 | out = composite4(im, bg, a, w, h) 42 | if mode == "train": 43 | filename = out_path + str(fcount) + '_' + str(bcount) + '.png' 44 | elif mode == "test": 45 | filename = out_path + bg_name.split('.')[0]+'!'+im_name.split('.')[0]+'!'+ str(fcount) + '!' + str(bcount) + '.png' 46 | cv.imwrite(filename, out) 47 | 48 | 49 | def process_one_fg(params): 50 | fcount = params[0] 51 | raw_data_path = params[1] 52 | num_bgs = params[2] 53 | fg_files = params[3] 54 | mode = params[4] 55 | 56 | folder = 'Training_set' if mode == 'train' else 'Test_set' 57 | txt_name = 'training_bg_names' if mode == 'train' else 'test_bg_names' 58 | with open(os.path.join(raw_data_path, 'Combined_Dataset', folder, '{}.txt'.format(txt_name))) as f: 59 | bg_files = f.read().splitlines() 60 | 61 | im_name = fg_files[fcount] 62 | bcount = fcount * num_bgs 63 | 64 | for _ in range(num_bgs): 65 | bg_name = bg_files[bcount] 66 | process(raw_data_path, im_name, bg_name, fcount, bcount, mode) 67 | bcount += 1 68 | 69 | 70 | def do_composite(raw_data_path, num_bgs, mode): 71 | folder = 'Training_set' if mode == 'train' else 'Test_set' 72 | txt_name = 'training_fg_names' if mode == 'train' else 'test_fg_names' 73 | with open(os.path.join(raw_data_path, 'Combined_Dataset', folder, '{}.txt').format(txt_name)) as f: 74 | fg_files = f.read().splitlines() 75 | 76 | with Pool(processes=16) as p: 77 | max_ = len(fg_files) 78 | params = [] 79 | for i in range(max_): 80 | params.append([i, raw_data_path, num_bgs, fg_files, mode]) 81 | with tqdm(total=max_) as pbar: 82 | for _, _ in tqdm(enumerate(p.imap_unordered(process_one_fg, params))): 83 | pbar.update() 84 | 85 | 86 | def composite_dataset(raw_data_path, logger): 87 | # Path to provided foreground images 88 | fg_path = os.path.join(raw_data_path, 'train/fg/') 89 | # Path to provided alpha mattes 90 | a_path = os.path.join(raw_data_path, 'train/mask/') 91 | # Path to background images (MSCOCO) 92 | bg_path = os.path.join(raw_data_path, 'train/bg/') 93 | # Path to folder where you want the composited images to go 94 | out_path = os.path.join(raw_data_path, 'train/merged/') 95 | 96 | train_folder = os.path.join(raw_data_path, 'Combined_Dataset/Training_set/') 97 | 98 | # Extract Adobe dataset 99 | if not os.path.exists(os.path.join(raw_data_path, 'Combined_Dataset/')): 100 | zip_file = os.path.join(raw_data_path, 'Adobe_Deep_Matting_Dataset.zip') 101 | logger.info('Extracting Adobe_Deep_Matting_Dataset.zip') 102 | zip_ref = zipfile.ZipFile(zip_file, 'r') 103 | zip_ref.extractall(raw_data_path) 104 | zip_ref.close() 105 | 106 | # Extract train2014 107 | if not os.path.exists(os.path.join(raw_data_path, 'train2014/')): 108 | zip_file = os.path.join(raw_data_path, 'train2014.zip') 109 | logger.info('Extracting train2014.zip') 110 | zip_ref = zipfile.ZipFile(zip_file, 'r') 111 | zip_ref.extractall(raw_data_path) 112 | zip_ref.close() 113 | 114 | # Move training background images into designated folder 115 | if not os.path.exists(bg_path): 116 | logger.info('Moving training background images into designated folder') 117 | with open(os.path.join(train_folder, 'training_bg_names.txt')) as f: 118 | training_bg_names = f.read().splitlines() 119 | os.makedirs(bg_path) 120 | for bg_name in training_bg_names: 121 | src_path = os.path.join(raw_data_path, 'train2014', bg_name) 122 | dest_path = os.path.join(bg_path, bg_name) 123 | shutil.move(src_path, dest_path) 124 | 125 | # Move training foreground images into designated folder 126 | if not os.path.exists(fg_path): 127 | logger.info('Moving training foreground images into designated folder') 128 | os.makedirs(fg_path) 129 | for old_folder in [train_folder + 'Adobe-licensed images/fg', train_folder + 'Other/fg']: 130 | fg_files = os.listdir(old_folder) 131 | for fg_file in fg_files: 132 | src_path = os.path.join(old_folder, fg_file) 133 | dest_path = os.path.join(fg_path, fg_file) 134 | shutil.move(src_path, dest_path) 135 | 136 | # Move training alpha images into designated folder 137 | if not os.path.exists(a_path): 138 | logger.info('Moving training alpha images into designated folder') 139 | os.makedirs(a_path) 140 | for old_folder in [train_folder + 'Adobe-licensed images/alpha', train_folder + 'Other/alpha']: 141 | a_files = os.listdir(old_folder) 142 | for a_file in a_files: 143 | src_path = os.path.join(old_folder, a_file) 144 | dest_path = os.path.join(a_path, a_file) 145 | shutil.move(src_path, dest_path) 146 | 147 | # Make the folder for composited training images 148 | if not os.path.exists(out_path): 149 | os.makedirs(out_path) 150 | logger.info('Compositing training images') 151 | do_composite(raw_data_path, 100, "train") 152 | logger.info('Training images composited') 153 | 154 | # Path to provided foreground images 155 | fg_test_path = os.path.join(raw_data_path, 'test/fg/') 156 | # Path to provided alpha mattes 157 | a_test_path = os.path.join(raw_data_path, 'test/mask/') 158 | # Path to background images (PASCAL VOC) 159 | bg_test_path = os.path.join(raw_data_path, 'test/bg/') 160 | # Path to folder where you want the composited images to go 161 | out_test_path = os.path.join(raw_data_path, 'test/merged/') 162 | 163 | test_folder = os.path.join(raw_data_path, 'Combined_Dataset/Test_set/') 164 | 165 | if not os.path.exists(os.path.join(raw_data_path, 'VOCdevkit')): 166 | # Extract VOCtrainval 167 | tar_file = os.path.join(raw_data_path, 'VOCtrainval_14-Jul-2008.tar') 168 | logger.info('Extracting VOCtrainval_14-Jul-2008.tar') 169 | tar = tarfile.open(tar_file) 170 | tar.extractall(raw_data_path) 171 | tar.close() 172 | # Extract VOCtest 173 | tar_file = os.path.join(raw_data_path, 'VOC2008test.tar') 174 | logger.info('Extracting VOC2008test.tar') 175 | tar = tarfile.open(tar_file) 176 | tar.extractall(raw_data_path) 177 | tar.close() 178 | 179 | # Move testing background images into designaed folder 180 | if not os.path.exists(bg_test_path): 181 | logger.info('Moving testing background images into designated folder') 182 | os.makedirs(bg_test_path) 183 | with open(os.path.join(test_folder, 'test_bg_names.txt')) as f: 184 | test_bg_names = f.read().splitlines() 185 | 186 | for bg_name in test_bg_names: 187 | _ = bg_name.split('_') 188 | src_path = os.path.join(raw_data_path, 'VOCdevkit/VOC2008/JPEGImages', bg_name) 189 | dest_path = os.path.join(bg_test_path, bg_name) 190 | shutil.move(src_path, dest_path) 191 | 192 | # Move testing foreground images into designated folder 193 | if not os.path.exists(fg_test_path): 194 | logger.info('Moving testing foreground images into designated folder') 195 | os.makedirs(fg_test_path) 196 | for old_folder in [test_folder + 'Adobe-licensed images/fg']: 197 | fg_files = os.listdir(old_folder) 198 | for fg_file in fg_files: 199 | src_path = os.path.join(old_folder, fg_file) 200 | dest_path = os.path.join(fg_test_path, fg_file) 201 | shutil.move(src_path, dest_path) 202 | 203 | # Move testing alpha images into desigated folder 204 | if not os.path.exists(a_test_path): 205 | logger.info('Moving testing alpha images into designated folder') 206 | os.makedirs(a_test_path) 207 | for old_folder in [test_folder + 'Adobe-licensed images/alpha']: 208 | a_files = os.listdir(old_folder) 209 | for a_file in a_files: 210 | src_path = os.path.join(old_folder, a_file) 211 | dest_path = os.path.join(a_test_path, a_file) 212 | shutil.move(src_path, dest_path) 213 | 214 | # Make the folder for composited testing images 215 | if not os.path.exists(out_test_path): 216 | os.makedirs(out_test_path) 217 | logger.info('Compositing testing images') 218 | do_composite(raw_data_path, 20, "test") 219 | logger.info('Testing images composited') 220 | 221 | 222 | def gen_train_valid_names(valid_portion, logger): 223 | logger.info("Start generating train/valid name files") 224 | num_fgs = 431 225 | # num_bgs = 43100 226 | num_bgs_per_fg = 100 227 | num_valid = int(valid_portion / 100 * 43100) 228 | 229 | names = [] 230 | bcount = 0 231 | for fcount in range(num_fgs): 232 | for _ in range(num_bgs_per_fg): 233 | names.append(str(fcount) + '_' + str(bcount) + '.png') 234 | bcount += 1 235 | 236 | valid_names = random.sample(names, num_valid) 237 | train_names = [n for n in names if n not in valid_names] 238 | 239 | with open('dataset/valid_names.txt', 'w') as file: 240 | file.write('\n'.join(valid_names)) 241 | 242 | with open('dataset/train_names.txt', 'w') as file: 243 | file.write('\n'.join(train_names)) 244 | logger.info("Generated train/valid name files") -------------------------------------------------------------------------------- /dataset/valid_names.txt: -------------------------------------------------------------------------------- 1 | 140_14015.png 2 | 427_42739.png 3 | 1_130.png 4 | 32_3287.png 5 | 34_3457.png 6 | 44_4432.png 7 | 137_13790.png 8 | 8_850.png 9 | 236_23638.png 10 | 91_9171.png 11 | 13_1302.png 12 | 238_23884.png 13 | 133_13330.png 14 | 148_14885.png 15 | 49_4968.png 16 | 120_12071.png 17 | 247_24779.png 18 | 349_34951.png 19 | 268_26832.png 20 | 383_38343.png 21 | 46_4641.png 22 | 178_17808.png 23 | 206_20632.png 24 | 348_34882.png 25 | 100_10094.png 26 | 191_19154.png 27 | 384_38435.png 28 | 234_23458.png 29 | 169_16973.png 30 | 395_39544.png 31 | 379_37965.png 32 | 417_41746.png 33 | 194_19404.png 34 | 159_15901.png 35 | 321_32116.png 36 | 58_5865.png 37 | 392_39263.png 38 | 254_25418.png 39 | 169_16988.png 40 | 141_14155.png 41 | 353_35316.png 42 | 331_33129.png 43 | 312_31243.png 44 | 253_25303.png 45 | 271_27179.png 46 | 75_7570.png 47 | 405_40595.png 48 | 155_15587.png 49 | 394_39475.png 50 | 323_32355.png 51 | 10_1062.png 52 | 352_35275.png 53 | 229_22973.png 54 | 397_39777.png 55 | 112_11241.png 56 | 305_30582.png 57 | 9_926.png 58 | 106_10685.png 59 | 71_7133.png 60 | 378_37893.png 61 | 225_22510.png 62 | 34_3488.png 63 | 207_20710.png 64 | 190_19048.png 65 | 240_24024.png 66 | 42_4294.png 67 | 229_22945.png 68 | 98_9830.png 69 | 103_10385.png 70 | 269_26993.png 71 | 270_27048.png 72 | 409_40988.png 73 | 341_34107.png 74 | 13_1386.png 75 | 20_2086.png 76 | 228_22821.png 77 | 373_37362.png 78 | 129_12904.png 79 | 159_15913.png 80 | 411_41198.png 81 | 185_18563.png 82 | 301_30182.png 83 | 326_32631.png 84 | 167_16782.png 85 | 159_15959.png 86 | 267_26725.png 87 | 3_323.png 88 | 110_11083.png 89 | 8_806.png 90 | 428_42819.png 91 | 281_28193.png 92 | 184_18465.png 93 | 304_30472.png 94 | 27_2714.png 95 | 366_36698.png 96 | 212_21244.png 97 | 215_21593.png 98 | 100_10066.png 99 | 87_8721.png 100 | 188_18862.png 101 | 123_12328.png 102 | 220_22079.png 103 | 12_1298.png 104 | 235_23586.png 105 | 42_4222.png 106 | 228_22800.png 107 | 239_23985.png 108 | 201_20149.png 109 | 156_15605.png 110 | 41_4148.png 111 | 398_39870.png 112 | 427_42779.png 113 | 391_39119.png 114 | 423_42361.png 115 | 296_29642.png 116 | 321_32152.png 117 | 414_41496.png 118 | 257_25795.png 119 | 58_5876.png 120 | 409_40918.png 121 | 169_16916.png 122 | 266_26670.png 123 | 37_3729.png 124 | 236_23621.png 125 | 420_42046.png 126 | 230_23008.png 127 | 291_29124.png 128 | 164_16479.png 129 | 421_42105.png 130 | 238_23898.png 131 | 159_15952.png 132 | 28_2836.png 133 | 167_16797.png 134 | 362_36212.png 135 | 102_10216.png 136 | 107_10789.png 137 | 177_17720.png 138 | 108_10821.png 139 | 280_28042.png 140 | 247_24736.png 141 | 154_15425.png 142 | 290_29045.png 143 | 391_39148.png 144 | 141_14133.png 145 | 295_29567.png 146 | 418_41830.png 147 | 212_21241.png 148 | 298_29817.png 149 | 263_26357.png 150 | 156_15650.png 151 | 275_27574.png 152 | 5_588.png 153 | 373_37399.png 154 | 258_25837.png 155 | 35_3567.png 156 | 356_35672.png 157 | 186_18600.png 158 | 134_13483.png 159 | 113_11315.png 160 | 286_28624.png 161 | 189_18958.png 162 | 247_24724.png 163 | 125_12596.png 164 | 244_24421.png 165 | 133_13350.png 166 | 253_25364.png 167 | 110_11071.png 168 | 331_33134.png 169 | 409_40952.png 170 | 201_20139.png 171 | 178_17858.png 172 | 158_15892.png 173 | 414_41454.png 174 | 351_35125.png 175 | 8_845.png 176 | 332_33228.png 177 | 319_31958.png 178 | 132_13263.png 179 | 155_15534.png 180 | 204_20408.png 181 | 191_19143.png 182 | 242_24232.png 183 | 305_30536.png 184 | 75_7555.png 185 | 105_10526.png 186 | 299_29952.png 187 | 265_26531.png 188 | 171_17165.png 189 | 94_9419.png 190 | 384_38468.png 191 | 385_38520.png 192 | 186_18611.png 193 | 202_20210.png 194 | 268_26849.png 195 | 33_3370.png 196 | 303_30350.png 197 | 124_12499.png 198 | 145_14587.png 199 | 277_27796.png 200 | 85_8526.png 201 | 226_22683.png 202 | 145_14560.png 203 | 246_24602.png 204 | 345_34579.png 205 | 247_24726.png 206 | 411_41124.png 207 | 254_25461.png 208 | 394_39444.png 209 | 179_17984.png 210 | 343_34333.png 211 | 232_23250.png 212 | 11_1152.png 213 | 19_1931.png 214 | 403_40313.png 215 | 254_25412.png 216 | 67_6720.png 217 | 112_11254.png 218 | 367_36768.png 219 | 50_5065.png 220 | 110_11036.png 221 | 141_14108.png 222 | 84_8451.png 223 | 82_8233.png 224 | 368_36812.png 225 | 323_32329.png 226 | 88_8897.png 227 | 172_17251.png 228 | 218_21853.png 229 | 67_6785.png 230 | 36_3623.png 231 | 90_9042.png 232 | 401_40149.png 233 | 413_41357.png 234 | 236_23698.png 235 | 58_5875.png 236 | 112_11216.png 237 | 380_38009.png 238 | 418_41849.png 239 | 250_25047.png 240 | 386_38698.png 241 | 284_28451.png 242 | 8_814.png 243 | 361_36123.png 244 | 184_18403.png 245 | 220_22096.png 246 | 330_33035.png 247 | 109_10985.png 248 | 197_19719.png 249 | 161_16138.png 250 | 206_20646.png 251 | 139_13945.png 252 | 59_5959.png 253 | 57_5701.png 254 | 104_10430.png 255 | 269_26923.png 256 | 162_16213.png 257 | 361_36175.png 258 | 238_23838.png 259 | 115_11513.png 260 | 150_15025.png 261 | 335_33542.png 262 | 358_35846.png 263 | 39_3983.png 264 | 271_27158.png 265 | 372_37289.png 266 | 135_13587.png 267 | 81_8137.png 268 | 351_35141.png 269 | 152_15210.png 270 | 151_15165.png 271 | 407_40776.png 272 | 384_38401.png 273 | 314_31499.png 274 | 204_20439.png 275 | 50_5035.png 276 | 310_31046.png 277 | 124_12476.png 278 | 59_5980.png 279 | 174_17487.png 280 | 15_1578.png 281 | 134_13439.png 282 | 184_18493.png 283 | 427_42794.png 284 | 241_24137.png 285 | 272_27250.png 286 | 424_42467.png 287 | 427_42774.png 288 | 107_10711.png 289 | 35_3543.png 290 | 71_7182.png 291 | 368_36895.png 292 | 132_13245.png 293 | 104_10421.png 294 | 356_35624.png 295 | 71_7181.png 296 | 139_13998.png 297 | 366_36688.png 298 | 62_6254.png 299 | 50_5073.png 300 | 65_6546.png 301 | 386_38645.png 302 | 373_37371.png 303 | 97_9700.png 304 | 193_19348.png 305 | 263_26396.png 306 | 231_23152.png 307 | 136_13625.png 308 | 215_21558.png 309 | 253_25325.png 310 | 119_11949.png 311 | 171_17162.png 312 | 16_1610.png 313 | 280_28069.png 314 | 353_35395.png 315 | 156_15601.png 316 | 426_42612.png 317 | 152_15258.png 318 | 122_12292.png 319 | 68_6835.png 320 | 276_27605.png 321 | 302_30261.png 322 | 263_26370.png 323 | 130_13084.png 324 | 371_37176.png 325 | 75_7563.png 326 | 218_21866.png 327 | 360_36013.png 328 | 141_14119.png 329 | 273_27357.png 330 | 41_4131.png 331 | 52_5246.png 332 | 332_33283.png 333 | 85_8523.png 334 | 73_7333.png 335 | 377_37778.png 336 | 280_28077.png 337 | 298_29862.png 338 | 23_2347.png 339 | 258_25873.png 340 | 321_32189.png 341 | 96_9641.png 342 | 271_27143.png 343 | 404_40414.png 344 | 4_435.png 345 | 59_5900.png 346 | 208_20822.png 347 | 227_22712.png 348 | 38_3816.png 349 | 66_6647.png 350 | 378_37806.png 351 | 130_13002.png 352 | 126_12690.png 353 | 149_14942.png 354 | 334_33461.png 355 | 93_9356.png 356 | 362_36245.png 357 | 294_29447.png 358 | 52_5232.png 359 | 202_20282.png 360 | 195_19502.png 361 | 264_26413.png 362 | 25_2530.png 363 | 384_38499.png 364 | 411_41185.png 365 | 75_7596.png 366 | 30_3098.png 367 | 260_26027.png 368 | 364_36417.png 369 | 357_35711.png 370 | 335_33516.png 371 | 206_20609.png 372 | 229_22936.png 373 | 216_21608.png 374 | 381_38134.png 375 | 348_34890.png 376 | 159_15929.png 377 | 371_37101.png 378 | 74_7400.png 379 | 47_4714.png 380 | 70_7006.png 381 | 71_7160.png 382 | 131_13176.png 383 | 247_24716.png 384 | 237_23783.png 385 | 427_42775.png 386 | 83_8363.png 387 | 64_6445.png 388 | 112_11261.png 389 | 168_16898.png 390 | 190_19035.png 391 | 164_16469.png 392 | 281_28161.png 393 | 421_42121.png 394 | 267_26762.png 395 | 193_19358.png 396 | 367_36753.png 397 | 351_35107.png 398 | 12_1228.png 399 | 9_904.png 400 | 161_16158.png 401 | 117_11780.png 402 | 128_12830.png 403 | 267_26701.png 404 | 106_10602.png 405 | 5_583.png 406 | 305_30530.png 407 | 146_14647.png 408 | 127_12776.png 409 | 328_32829.png 410 | 48_4885.png 411 | 59_5986.png 412 | 66_6615.png 413 | 72_7220.png 414 | 425_42532.png 415 | 66_6610.png 416 | 336_33626.png 417 | 386_38695.png 418 | 107_10719.png 419 | 413_41315.png 420 | 328_32818.png 421 | 225_22527.png 422 | 385_38597.png 423 | 249_24906.png 424 | 41_4118.png 425 | 62_6253.png 426 | 257_25723.png 427 | 169_16938.png 428 | 203_20393.png 429 | 266_26698.png 430 | 339_33986.png 431 | 406_40697.png 432 | 243_24327.png 433 | 363_36341.png 434 | 83_8355.png 435 | 320_32016.png 436 | 430_43053.png 437 | 194_19444.png 438 | 304_30490.png 439 | 23_2361.png 440 | 185_18551.png 441 | 98_9850.png 442 | 381_38190.png 443 | 28_2860.png 444 | 352_35213.png 445 | 172_17241.png 446 | 49_4980.png 447 | 364_36439.png 448 | 111_11138.png 449 | 232_23209.png 450 | 360_36080.png 451 | 285_28591.png 452 | 282_28264.png 453 | 166_16685.png 454 | 9_972.png 455 | 410_41095.png 456 | 379_37974.png 457 | 35_3590.png 458 | 47_4700.png 459 | 424_42436.png 460 | 147_14704.png 461 | 234_23446.png 462 | 174_17401.png 463 | 342_34211.png 464 | 88_8874.png 465 | 361_36113.png 466 | 141_14184.png 467 | 190_19094.png 468 | 360_36064.png 469 | 327_32736.png 470 | 318_31863.png 471 | 388_38868.png 472 | 355_35542.png 473 | 171_17119.png 474 | 390_39044.png 475 | 169_16917.png 476 | 383_38302.png 477 | 237_23794.png 478 | 336_33661.png 479 | 349_34905.png 480 | 190_19096.png 481 | 112_11279.png 482 | 111_11143.png 483 | 42_4204.png 484 | 324_32412.png 485 | 242_24290.png 486 | 414_41475.png 487 | 122_12239.png 488 | 225_22522.png 489 | 46_4649.png 490 | 168_16855.png 491 | 44_4443.png 492 | 267_26700.png 493 | 10_1022.png 494 | 209_20989.png 495 | 130_13026.png 496 | 153_15369.png 497 | 289_28931.png 498 | 27_2748.png 499 | 69_6905.png 500 | 153_15388.png 501 | 153_15346.png 502 | 399_39972.png 503 | 121_12130.png 504 | 269_26976.png 505 | 174_17480.png 506 | 220_22037.png 507 | 94_9400.png 508 | 260_26037.png 509 | 345_34574.png 510 | 81_8129.png 511 | 428_42894.png 512 | 8_820.png 513 | 121_12108.png 514 | 308_30868.png 515 | 173_17394.png 516 | 105_10505.png 517 | 111_11179.png 518 | 299_29981.png 519 | 224_22450.png 520 | 396_39618.png 521 | 327_32709.png 522 | 414_41451.png 523 | 324_32475.png 524 | 57_5726.png 525 | 16_1692.png 526 | 215_21501.png 527 | 151_15195.png 528 | 366_36626.png 529 | 58_5880.png 530 | 20_2021.png 531 | 55_5506.png 532 | 187_18777.png 533 | 398_39894.png 534 | 373_37319.png 535 | 272_27270.png 536 | 85_8541.png 537 | 125_12543.png 538 | 339_33934.png 539 | 148_14820.png 540 | 255_25570.png 541 | 23_2385.png 542 | 219_21919.png 543 | 26_2684.png 544 | 7_768.png 545 | 57_5771.png 546 | 128_12887.png 547 | 301_30196.png 548 | 363_36397.png 549 | 217_21758.png 550 | 334_33493.png 551 | 192_19235.png 552 | 405_40556.png 553 | 103_10370.png 554 | 274_27416.png 555 | 313_31383.png 556 | 139_13913.png 557 | 386_38694.png 558 | 414_41470.png 559 | 424_42470.png 560 | 34_3489.png 561 | 45_4581.png 562 | 150_15073.png 563 | 363_36359.png 564 | 335_33595.png 565 | 121_12162.png 566 | 107_10735.png 567 | 408_40886.png 568 | 17_1795.png 569 | 336_33609.png 570 | 160_16082.png 571 | 110_11080.png 572 | 68_6814.png 573 | 104_10444.png 574 | 259_25988.png 575 | 422_42242.png 576 | 127_12769.png 577 | 133_13327.png 578 | 226_22647.png 579 | 191_19127.png 580 | 55_5502.png 581 | 90_9051.png 582 | 414_41441.png 583 | 398_39850.png 584 | 400_40099.png 585 | 418_41889.png 586 | 373_37316.png 587 | 201_20152.png 588 | 81_8131.png 589 | 385_38558.png 590 | 235_23562.png 591 | 89_8947.png 592 | 88_8832.png 593 | 139_13922.png 594 | 15_1563.png 595 | 186_18614.png 596 | 394_39408.png 597 | 265_26599.png 598 | 120_12057.png 599 | 188_18834.png 600 | 205_20515.png 601 | 46_4658.png 602 | 202_20229.png 603 | 250_25029.png 604 | 180_18073.png 605 | 285_28599.png 606 | 33_3300.png 607 | 217_21781.png 608 | 193_19315.png 609 | 42_4206.png 610 | 68_6820.png 611 | 198_19814.png 612 | 242_24200.png 613 | 144_14422.png 614 | 371_37124.png 615 | 167_16710.png 616 | 21_2143.png 617 | 40_4015.png 618 | 382_38293.png 619 | 106_10609.png 620 | 146_14617.png 621 | 231_23175.png 622 | 210_21000.png 623 | 14_1489.png 624 | 399_39964.png 625 | 295_29590.png 626 | 18_1807.png 627 | 56_5626.png 628 | 89_8998.png 629 | 149_14940.png 630 | 392_39248.png 631 | 420_42049.png 632 | 361_36122.png 633 | 243_24314.png 634 | 230_23083.png 635 | 347_34738.png 636 | 418_41851.png 637 | 320_32071.png 638 | 314_31446.png 639 | 424_42427.png 640 | 54_5478.png 641 | 43_4332.png 642 | 347_34777.png 643 | 94_9431.png 644 | 339_33947.png 645 | 413_41322.png 646 | 126_12626.png 647 | 116_11614.png 648 | 216_21692.png 649 | 136_13661.png 650 | 324_32436.png 651 | 109_10914.png 652 | 86_8653.png 653 | 159_15942.png 654 | 404_40456.png 655 | 150_15063.png 656 | 301_30180.png 657 | 397_39799.png 658 | 393_39369.png 659 | 45_4509.png 660 | 190_19021.png 661 | 363_36328.png 662 | 215_21569.png 663 | 243_24347.png 664 | 175_17544.png 665 | 168_16801.png 666 | 367_36754.png 667 | 97_9792.png 668 | 287_28768.png 669 | 384_38488.png 670 | 61_6130.png 671 | 16_1632.png 672 | 125_12587.png 673 | 189_18981.png 674 | 132_13283.png 675 | 182_18230.png 676 | 343_34344.png 677 | 395_39515.png 678 | 77_7773.png 679 | 84_8448.png 680 | 148_14800.png 681 | 392_39284.png 682 | 126_12679.png 683 | 384_38455.png 684 | 172_17217.png 685 | 181_18103.png 686 | 301_30102.png 687 | 73_7364.png 688 | 127_12753.png 689 | 426_42679.png 690 | 83_8327.png 691 | 199_19972.png 692 | 210_21070.png 693 | 396_39663.png 694 | 14_1490.png 695 | 195_19559.png 696 | 309_30968.png 697 | 34_3497.png 698 | 323_32371.png 699 | 317_31767.png 700 | 229_22906.png 701 | 81_8176.png 702 | 209_20922.png 703 | 304_30496.png 704 | 43_4326.png 705 | 169_16979.png 706 | 376_37667.png 707 | 170_17072.png 708 | 388_38806.png 709 | 76_7645.png 710 | 285_28596.png 711 | 253_25370.png 712 | 201_20184.png 713 | 132_13292.png 714 | 427_42733.png 715 | 41_4172.png 716 | 173_17323.png 717 | 180_18004.png 718 | 389_38968.png 719 | 223_22316.png 720 | 219_21947.png 721 | 36_3663.png 722 | 358_35874.png 723 | 346_34647.png 724 | 136_13699.png 725 | 251_25115.png 726 | 298_29895.png 727 | 155_15530.png 728 | 206_20682.png 729 | 56_5680.png 730 | 282_28251.png 731 | 315_31536.png 732 | 126_12665.png 733 | 222_22241.png 734 | 1_141.png 735 | 115_11516.png 736 | 273_27312.png 737 | 144_14416.png 738 | 277_27785.png 739 | 80_8025.png 740 | 154_15498.png 741 | 360_36045.png 742 | 382_38282.png 743 | 214_21423.png 744 | 355_35512.png 745 | 141_14122.png 746 | 67_6784.png 747 | 8_860.png 748 | 409_40940.png 749 | 242_24275.png 750 | 28_2862.png 751 | 54_5426.png 752 | 306_30637.png 753 | 429_42959.png 754 | 279_27934.png 755 | 258_25814.png 756 | 181_18181.png 757 | 212_21251.png 758 | 111_11137.png 759 | 280_28092.png 760 | 379_37982.png 761 | 287_28728.png 762 | 60_6078.png 763 | 206_20642.png 764 | 413_41369.png 765 | 228_22806.png 766 | 63_6362.png 767 | 57_5721.png 768 | 183_18337.png 769 | 321_32191.png 770 | 406_40643.png 771 | 60_6076.png 772 | 68_6897.png 773 | 346_34618.png 774 | 129_12919.png 775 | 273_27305.png 776 | 384_38438.png 777 | 372_37232.png 778 | 414_41428.png 779 | 356_35634.png 780 | 133_13315.png 781 | 152_15250.png 782 | 70_7029.png 783 | 104_10410.png 784 | 328_32893.png 785 | 17_1790.png 786 | 141_14138.png 787 | 56_5613.png 788 | 48_4852.png 789 | 298_29800.png 790 | 63_6386.png 791 | 58_5897.png 792 | 254_25447.png 793 | 321_32171.png 794 | 111_11165.png 795 | 308_30807.png 796 | 82_8214.png 797 | 18_1893.png 798 | 206_20659.png 799 | 397_39771.png 800 | 112_11253.png 801 | 287_28771.png 802 | 384_38417.png 803 | 129_12908.png 804 | 392_39221.png 805 | 395_39509.png 806 | 29_2940.png 807 | 211_21180.png 808 | 281_28112.png 809 | 334_33419.png 810 | 99_9954.png 811 | 191_19153.png 812 | 399_39916.png 813 | 13_1352.png 814 | 154_15429.png 815 | 184_18418.png 816 | 416_41619.png 817 | 368_36873.png 818 | 36_3683.png 819 | 340_34085.png 820 | 350_35064.png 821 | 401_40169.png 822 | 19_1932.png 823 | 201_20196.png 824 | 57_5735.png 825 | 28_2851.png 826 | 347_34704.png 827 | 189_18914.png 828 | 234_23485.png 829 | 120_12000.png 830 | 74_7470.png 831 | 80_8033.png 832 | 42_4201.png 833 | 377_37721.png 834 | 419_41936.png 835 | 249_24945.png 836 | 410_41028.png 837 | 107_10728.png 838 | 325_32590.png 839 | 61_6198.png 840 | 241_24179.png 841 | 415_41566.png 842 | 270_27055.png 843 | 99_9911.png 844 | 141_14150.png 845 | 33_3340.png 846 | 265_26598.png 847 | 56_5603.png 848 | 54_5446.png 849 | 125_12586.png 850 | 43_4343.png 851 | 41_4181.png 852 | 110_11004.png 853 | 89_8951.png 854 | 339_33916.png 855 | 188_18830.png 856 | 240_24076.png 857 | 42_4247.png 858 | 412_41214.png 859 | 416_41625.png 860 | 23_2335.png 861 | 141_14137.png 862 | 338_33829.png 863 | 296_29667.png 864 | 301_30169.png 865 | 260_26061.png 866 | 148_14812.png 867 | 58_5834.png 868 | 397_39762.png 869 | 27_2701.png 870 | 283_28353.png 871 | 23_2353.png 872 | 215_21511.png 873 | 336_33673.png 874 | 209_20958.png 875 | 175_17591.png 876 | 130_13039.png 877 | 129_12920.png 878 | 259_25921.png 879 | 406_40640.png 880 | 196_19642.png 881 | 196_19652.png 882 | 175_17587.png 883 | 348_34892.png 884 | 93_9325.png 885 | 273_27383.png 886 | 169_16937.png 887 | 178_17846.png 888 | 394_39421.png 889 | 106_10677.png 890 | 133_13319.png 891 | 39_3956.png 892 | 386_38677.png 893 | 29_2970.png 894 | 27_2735.png 895 | 29_2908.png 896 | 43_4330.png 897 | 136_13692.png 898 | 412_41220.png 899 | 393_39324.png 900 | 175_17538.png 901 | 14_1426.png 902 | 225_22517.png 903 | 379_37906.png 904 | 210_21056.png 905 | 319_31923.png 906 | 248_24815.png 907 | 255_25526.png 908 | 28_2895.png 909 | 221_22136.png 910 | 296_29648.png 911 | 288_28858.png 912 | 146_14672.png 913 | 90_9045.png 914 | 194_19458.png 915 | 189_18918.png 916 | 112_11244.png 917 | 177_17791.png 918 | 325_32575.png 919 | 64_6429.png 920 | 62_6201.png 921 | 35_3511.png 922 | 151_15118.png 923 | 290_29014.png 924 | 383_38363.png 925 | 94_9414.png 926 | 427_42734.png 927 | 271_27198.png 928 | 104_10455.png 929 | 222_22290.png 930 | 208_20880.png 931 | 212_21277.png 932 | 147_14784.png 933 | 99_9976.png 934 | 247_24786.png 935 | 140_14090.png 936 | 251_25169.png 937 | 51_5115.png 938 | 71_7190.png 939 | 235_23573.png 940 | 398_39825.png 941 | 228_22857.png 942 | 411_41176.png 943 | 286_28609.png 944 | 143_14385.png 945 | 191_19185.png 946 | 331_33167.png 947 | 160_16091.png 948 | 128_12862.png 949 | 417_41729.png 950 | 262_26228.png 951 | 281_28149.png 952 | 272_27206.png 953 | 122_12272.png 954 | 294_29458.png 955 | 380_38036.png 956 | 322_32296.png 957 | 24_2470.png 958 | 79_7970.png 959 | 85_8554.png 960 | 400_40036.png 961 | 242_24264.png 962 | 8_856.png 963 | 385_38567.png 964 | 91_9189.png 965 | 424_42400.png 966 | 224_22456.png 967 | 71_7193.png 968 | 38_3802.png 969 | 275_27533.png 970 | 5_553.png 971 | 418_41838.png 972 | 230_23017.png 973 | 185_18525.png 974 | 177_17793.png 975 | 316_31657.png 976 | 250_25020.png 977 | 18_1853.png 978 | 243_24322.png 979 | 38_3857.png 980 | 186_18661.png 981 | 410_41088.png 982 | 316_31602.png 983 | 165_16556.png 984 | 31_3123.png 985 | 306_30622.png 986 | 147_14720.png 987 | 241_24109.png 988 | 192_19220.png 989 | 186_18664.png 990 | 169_16901.png 991 | 114_11406.png 992 | 427_42738.png 993 | 162_16235.png 994 | 116_11672.png 995 | 310_31096.png 996 | 340_34022.png 997 | 141_14182.png 998 | 99_9942.png 999 | 319_31930.png 1000 | 90_9003.png 1001 | 154_15447.png 1002 | 99_9922.png 1003 | 286_28652.png 1004 | 247_24727.png 1005 | 329_32941.png 1006 | 316_31643.png 1007 | 224_22495.png 1008 | 41_4155.png 1009 | 69_6926.png 1010 | 135_13526.png 1011 | 240_24033.png 1012 | 68_6808.png 1013 | 155_15594.png 1014 | 38_3885.png 1015 | 2_297.png 1016 | 429_42944.png 1017 | 52_5226.png 1018 | 323_32393.png 1019 | 68_6822.png 1020 | 312_31289.png 1021 | 76_7626.png 1022 | 355_35571.png 1023 | 323_32347.png 1024 | 200_20067.png 1025 | 333_33306.png 1026 | 107_10779.png 1027 | 30_3038.png 1028 | 54_5467.png 1029 | 107_10738.png 1030 | 103_10302.png 1031 | 119_11989.png 1032 | 385_38544.png 1033 | 398_39802.png 1034 | 1_190.png 1035 | 402_40299.png 1036 | 113_11364.png 1037 | 365_36589.png 1038 | 2_225.png 1039 | 366_36679.png 1040 | 301_30140.png 1041 | 107_10733.png 1042 | 147_14722.png 1043 | 7_787.png 1044 | 421_42175.png 1045 | 68_6891.png 1046 | 249_24983.png 1047 | 44_4451.png 1048 | 55_5524.png 1049 | 284_28425.png 1050 | 59_5989.png 1051 | 277_27789.png 1052 | 288_28878.png 1053 | 343_34310.png 1054 | 298_29872.png 1055 | 355_35589.png 1056 | 426_42638.png 1057 | 103_10308.png 1058 | 419_41951.png 1059 | 187_18724.png 1060 | 294_29487.png 1061 | 414_41418.png 1062 | 66_6655.png 1063 | 308_30826.png 1064 | 99_9903.png 1065 | 102_10287.png 1066 | 297_29765.png 1067 | 221_22123.png 1068 | 73_7346.png 1069 | 421_42192.png 1070 | 12_1234.png 1071 | 43_4391.png 1072 | 357_35715.png 1073 | 385_38577.png 1074 | 47_4795.png 1075 | 128_12893.png 1076 | 110_11099.png 1077 | 270_27003.png 1078 | 428_42821.png 1079 | 87_8731.png 1080 | 381_38141.png 1081 | 233_23339.png 1082 | 35_3554.png 1083 | 304_30435.png 1084 | 204_20403.png 1085 | 361_36160.png 1086 | 402_40201.png 1087 | 329_32946.png 1088 | 368_36818.png 1089 | 140_14049.png 1090 | 239_23950.png 1091 | 16_1668.png 1092 | 172_17261.png 1093 | 102_10259.png 1094 | 406_40609.png 1095 | 182_18261.png 1096 | 207_20796.png 1097 | 141_14151.png 1098 | 424_42485.png 1099 | 365_36574.png 1100 | 153_15377.png 1101 | 259_25953.png 1102 | 99_9962.png 1103 | 40_4001.png 1104 | 19_1963.png 1105 | 46_4679.png 1106 | 400_40012.png 1107 | 195_19568.png 1108 | 111_11194.png 1109 | 174_17474.png 1110 | 68_6850.png 1111 | 227_22750.png 1112 | 213_21334.png 1113 | 29_2905.png 1114 | 193_19367.png 1115 | 209_20948.png 1116 | 419_41923.png 1117 | 129_12965.png 1118 | 30_3001.png 1119 | 376_37630.png 1120 | 20_2063.png 1121 | 387_38770.png 1122 | 208_20821.png 1123 | 5_516.png 1124 | 327_32730.png 1125 | 114_11419.png 1126 | 74_7471.png 1127 | 407_40786.png 1128 | 309_30998.png 1129 | 281_28125.png 1130 | 45_4547.png 1131 | 230_23030.png 1132 | 73_7331.png 1133 | 278_27827.png 1134 | 117_11796.png 1135 | 197_19786.png 1136 | 327_32726.png 1137 | 425_42539.png 1138 | 349_34949.png 1139 | 370_37078.png 1140 | 96_9634.png 1141 | 419_41920.png 1142 | 134_13426.png 1143 | 98_9885.png 1144 | 36_3660.png 1145 | 356_35617.png 1146 | 264_26440.png 1147 | 48_4877.png 1148 | 51_5180.png 1149 | 250_25063.png 1150 | 248_24824.png 1151 | 152_15279.png 1152 | 7_767.png 1153 | 214_21492.png 1154 | 72_7227.png 1155 | 24_2416.png 1156 | 427_42764.png 1157 | 20_2083.png 1158 | 88_8850.png 1159 | 268_26875.png 1160 | 105_10520.png 1161 | 285_28501.png 1162 | 405_40518.png 1163 | 426_42605.png 1164 | 399_39976.png 1165 | 6_643.png 1166 | 289_28949.png 1167 | 28_2893.png 1168 | 82_8284.png 1169 | 372_37212.png 1170 | 109_10913.png 1171 | 392_39255.png 1172 | 394_39478.png 1173 | 156_15600.png 1174 | 340_34078.png 1175 | 318_31890.png 1176 | 275_27523.png 1177 | 187_18759.png 1178 | 133_13313.png 1179 | 173_17374.png 1180 | 11_1147.png 1181 | 320_32031.png 1182 | 262_26289.png 1183 | 346_34609.png 1184 | 54_5410.png 1185 | 81_8143.png 1186 | 228_22867.png 1187 | 24_2423.png 1188 | 288_28853.png 1189 | 53_5393.png 1190 | 369_36947.png 1191 | 29_2966.png 1192 | 67_6734.png 1193 | 129_12957.png 1194 | 78_7850.png 1195 | 150_15022.png 1196 | 254_25405.png 1197 | 422_42236.png 1198 | 356_35650.png 1199 | 59_5963.png 1200 | 106_10635.png 1201 | 149_14980.png 1202 | 126_12616.png 1203 | 331_33170.png 1204 | 51_5164.png 1205 | 391_39177.png 1206 | 372_37218.png 1207 | 267_26788.png 1208 | 134_13441.png 1209 | 215_21542.png 1210 | 265_26595.png 1211 | 131_13110.png 1212 | 376_37686.png 1213 | 393_39363.png 1214 | 360_36071.png 1215 | 61_6131.png 1216 | 32_3207.png 1217 | 83_8390.png 1218 | 263_26332.png 1219 | 71_7195.png 1220 | 306_30615.png 1221 | 65_6568.png 1222 | 206_20604.png 1223 | 278_27885.png 1224 | 354_35415.png 1225 | 265_26548.png 1226 | 288_28836.png 1227 | 141_14168.png 1228 | 226_22687.png 1229 | 400_40043.png 1230 | 342_34260.png 1231 | 47_4780.png 1232 | 94_9487.png 1233 | 50_5069.png 1234 | 47_4787.png 1235 | 87_8766.png 1236 | 248_24836.png 1237 | 357_35774.png 1238 | 65_6551.png 1239 | 385_38570.png 1240 | 289_28942.png 1241 | 30_3079.png 1242 | 341_34196.png 1243 | 223_22381.png 1244 | 235_23584.png 1245 | 112_11264.png 1246 | 395_39535.png 1247 | 306_30602.png 1248 | 208_20828.png 1249 | 105_10597.png 1250 | 193_19360.png 1251 | 172_17279.png 1252 | 96_9693.png 1253 | 21_2179.png 1254 | 156_15658.png 1255 | 170_17071.png 1256 | 338_33856.png 1257 | 256_25678.png 1258 | 258_25861.png 1259 | 377_37751.png 1260 | 199_19918.png 1261 | 306_30606.png 1262 | 182_18227.png 1263 | 28_2846.png 1264 | 234_23418.png 1265 | 87_8752.png 1266 | 134_13411.png 1267 | 83_8374.png 1268 | 58_5859.png 1269 | 395_39570.png 1270 | 406_40631.png 1271 | 220_22066.png 1272 | 72_7246.png 1273 | 195_19500.png 1274 | 3_312.png 1275 | 105_10547.png 1276 | 368_36864.png 1277 | 86_8662.png 1278 | 422_42207.png 1279 | 126_12672.png 1280 | 429_42916.png 1281 | 39_3981.png 1282 | 37_3739.png 1283 | 153_15395.png 1284 | 309_30988.png 1285 | 212_21266.png 1286 | 188_18897.png 1287 | 96_9682.png 1288 | 235_23589.png 1289 | 194_19464.png 1290 | 105_10502.png 1291 | 133_13399.png 1292 | 133_13347.png 1293 | 362_36257.png 1294 | 308_30896.png 1295 | 362_36231.png 1296 | 341_34153.png 1297 | 143_14354.png 1298 | 321_32141.png 1299 | 115_11502.png 1300 | 26_2603.png 1301 | 32_3228.png 1302 | 292_29252.png 1303 | 359_35942.png 1304 | 261_26183.png 1305 | 147_14740.png 1306 | 398_39828.png 1307 | 292_29297.png 1308 | 101_10124.png 1309 | 186_18621.png 1310 | 207_20776.png 1311 | 265_26537.png 1312 | 256_25692.png 1313 | 104_10477.png 1314 | 74_7487.png 1315 | 61_6103.png 1316 | 144_14439.png 1317 | 330_33023.png 1318 | 266_26665.png 1319 | 295_29564.png 1320 | 424_42469.png 1321 | 406_40604.png 1322 | 218_21802.png 1323 | 58_5830.png 1324 | 156_15657.png 1325 | 34_3491.png 1326 | 139_13905.png 1327 | 202_20246.png 1328 | 136_13648.png 1329 | 410_41087.png 1330 | 315_31555.png 1331 | 252_25272.png 1332 | 57_5790.png 1333 | 63_6331.png 1334 | 210_21017.png 1335 | 292_29231.png 1336 | 419_41911.png 1337 | 336_33699.png 1338 | 213_21355.png 1339 | 22_2217.png 1340 | 105_10572.png 1341 | 140_14089.png 1342 | 157_15704.png 1343 | 402_40266.png 1344 | 206_20672.png 1345 | 167_16777.png 1346 | 100_10018.png 1347 | 11_1116.png 1348 | 265_26542.png 1349 | 331_33125.png 1350 | 267_26740.png 1351 | 264_26420.png 1352 | 78_7871.png 1353 | 391_39146.png 1354 | 324_32443.png 1355 | 107_10739.png 1356 | 180_18023.png 1357 | 337_33733.png 1358 | 402_40281.png 1359 | 390_39036.png 1360 | 350_35020.png 1361 | 397_39781.png 1362 | 362_36206.png 1363 | 173_17388.png 1364 | 308_30802.png 1365 | 261_26185.png 1366 | 132_13265.png 1367 | 188_18819.png 1368 | 56_5635.png 1369 | 425_42573.png 1370 | 389_38925.png 1371 | 108_10820.png 1372 | 186_18625.png 1373 | 154_15423.png 1374 | 53_5352.png 1375 | 406_40645.png 1376 | 185_18596.png 1377 | 337_33707.png 1378 | 418_41852.png 1379 | 279_27920.png 1380 | 358_35889.png 1381 | 107_10742.png 1382 | 389_38949.png 1383 | 391_39173.png 1384 | 360_36070.png 1385 | 185_18592.png 1386 | 165_16538.png 1387 | 340_34026.png 1388 | 215_21587.png 1389 | 363_36318.png 1390 | 187_18768.png 1391 | 304_30483.png 1392 | 33_3303.png 1393 | 380_38092.png 1394 | 341_34117.png 1395 | 190_19019.png 1396 | 367_36761.png 1397 | 100_10087.png 1398 | 159_15994.png 1399 | 54_5459.png 1400 | 15_1509.png 1401 | 115_11529.png 1402 | 163_16398.png 1403 | 98_9801.png 1404 | 403_40393.png 1405 | 10_1016.png 1406 | 378_37829.png 1407 | 222_22245.png 1408 | 347_34716.png 1409 | 92_9226.png 1410 | 200_20074.png 1411 | 148_14853.png 1412 | 409_40964.png 1413 | 101_10186.png 1414 | 278_27816.png 1415 | 97_9761.png 1416 | 373_37379.png 1417 | 310_31064.png 1418 | 196_19608.png 1419 | 251_25138.png 1420 | 57_5705.png 1421 | 380_38051.png 1422 | 103_10327.png 1423 | 24_2431.png 1424 | 158_15824.png 1425 | 343_34314.png 1426 | 266_26628.png 1427 | 211_21191.png 1428 | 387_38732.png 1429 | 377_37767.png 1430 | 141_14163.png 1431 | 216_21699.png 1432 | 293_29383.png 1433 | 256_25647.png 1434 | 244_24428.png 1435 | 225_22547.png 1436 | 318_31840.png 1437 | 251_25181.png 1438 | 339_33973.png 1439 | 186_18634.png 1440 | 25_2586.png 1441 | 72_7279.png 1442 | 126_12689.png 1443 | 261_26190.png 1444 | 196_19698.png 1445 | 13_1361.png 1446 | 197_19717.png 1447 | 69_6964.png 1448 | 8_891.png 1449 | 45_4528.png 1450 | 273_27385.png 1451 | 369_36942.png 1452 | 185_18594.png 1453 | 356_35661.png 1454 | 3_372.png 1455 | 231_23161.png 1456 | 191_19105.png 1457 | 48_4856.png 1458 | 8_889.png 1459 | 390_39055.png 1460 | 244_24424.png 1461 | 42_4224.png 1462 | 125_12571.png 1463 | 84_8469.png 1464 | 194_19405.png 1465 | 197_19711.png 1466 | 23_2302.png 1467 | 274_27446.png 1468 | 269_26926.png 1469 | 410_41022.png 1470 | 191_19146.png 1471 | 174_17443.png 1472 | 278_27847.png 1473 | 83_8341.png 1474 | 220_22070.png 1475 | 397_39792.png 1476 | 19_1993.png 1477 | 43_4340.png 1478 | 84_8494.png 1479 | 321_32158.png 1480 | 395_39562.png 1481 | 284_28482.png 1482 | 185_18568.png 1483 | 278_27825.png 1484 | 331_33189.png 1485 | 379_37951.png 1486 | 71_7128.png 1487 | 5_532.png 1488 | 322_32231.png 1489 | 353_35328.png 1490 | 223_22373.png 1491 | 39_3978.png 1492 | 231_23127.png 1493 | 352_35268.png 1494 | 236_23659.png 1495 | 259_25970.png 1496 | 350_35072.png 1497 | 132_13222.png 1498 | 359_35953.png 1499 | 147_14733.png 1500 | 87_8749.png 1501 | 381_38108.png 1502 | 231_23107.png 1503 | 263_26354.png 1504 | 287_28767.png 1505 | 95_9533.png 1506 | 349_34928.png 1507 | 366_36623.png 1508 | 253_25368.png 1509 | 181_18157.png 1510 | 262_26234.png 1511 | 389_38965.png 1512 | 28_2854.png 1513 | 79_7922.png 1514 | 196_19650.png 1515 | 56_5631.png 1516 | 81_8114.png 1517 | 342_34296.png 1518 | 34_3478.png 1519 | 344_34457.png 1520 | 165_16555.png 1521 | 125_12549.png 1522 | 320_32003.png 1523 | 351_35153.png 1524 | 251_25100.png 1525 | 319_31986.png 1526 | 124_12485.png 1527 | 241_24110.png 1528 | 323_32324.png 1529 | 100_10052.png 1530 | 15_1519.png 1531 | 426_42671.png 1532 | 294_29428.png 1533 | 315_31589.png 1534 | 128_12881.png 1535 | 356_35644.png 1536 | 47_4720.png 1537 | 421_42137.png 1538 | 18_1856.png 1539 | 133_13303.png 1540 | 20_2038.png 1541 | 378_37861.png 1542 | 14_1432.png 1543 | 148_14895.png 1544 | 329_32908.png 1545 | 358_35864.png 1546 | 415_41514.png 1547 | 321_32155.png 1548 | 328_32815.png 1549 | 419_41900.png 1550 | 406_40689.png 1551 | 405_40550.png 1552 | 370_37044.png 1553 | 70_7005.png 1554 | 362_36278.png 1555 | 117_11784.png 1556 | 233_23322.png 1557 | 9_975.png 1558 | 220_22042.png 1559 | 363_36364.png 1560 | 336_33672.png 1561 | 290_29067.png 1562 | 253_25338.png 1563 | 203_20335.png 1564 | 393_39354.png 1565 | 399_39943.png 1566 | 118_11823.png 1567 | 245_24584.png 1568 | 370_37069.png 1569 | 150_15074.png 1570 | 396_39612.png 1571 | 105_10559.png 1572 | 150_15076.png 1573 | 89_8926.png 1574 | 420_42074.png 1575 | 7_734.png 1576 | 81_8182.png 1577 | 55_5543.png 1578 | 394_39472.png 1579 | 395_39536.png 1580 | 343_34369.png 1581 | 372_37240.png 1582 | 138_13898.png 1583 | 350_35031.png 1584 | 383_38383.png 1585 | 182_18243.png 1586 | 205_20560.png 1587 | 304_30495.png 1588 | 187_18737.png 1589 | 29_2944.png 1590 | 191_19155.png 1591 | 317_31756.png 1592 | 335_33554.png 1593 | 24_2446.png 1594 | 99_9967.png 1595 | 200_20087.png 1596 | 253_25332.png 1597 | 267_26798.png 1598 | 406_40605.png 1599 | 53_5359.png 1600 | 112_11239.png 1601 | 415_41525.png 1602 | 8_866.png 1603 | 50_5072.png 1604 | 308_30853.png 1605 | 0_93.png 1606 | 281_28156.png 1607 | 297_29721.png 1608 | 85_8559.png 1609 | 166_16648.png 1610 | 120_12016.png 1611 | 43_4306.png 1612 | 0_89.png 1613 | 65_6516.png 1614 | 247_24723.png 1615 | 102_10240.png 1616 | 132_13202.png 1617 | 296_29627.png 1618 | 22_2272.png 1619 | 398_39874.png 1620 | 40_4069.png 1621 | 297_29786.png 1622 | 294_29407.png 1623 | 170_17055.png 1624 | 391_39154.png 1625 | 135_13500.png 1626 | 368_36855.png 1627 | 19_1977.png 1628 | 316_31648.png 1629 | 290_29061.png 1630 | 280_28027.png 1631 | 109_10983.png 1632 | 20_2016.png 1633 | 212_21253.png 1634 | 132_13279.png 1635 | 32_3239.png 1636 | 108_10831.png 1637 | 317_31718.png 1638 | 325_32538.png 1639 | 182_18218.png 1640 | 378_37870.png 1641 | 121_12188.png 1642 | 70_7042.png 1643 | 9_961.png 1644 | 399_39948.png 1645 | 279_27997.png 1646 | 147_14783.png 1647 | 9_960.png 1648 | 291_29122.png 1649 | 391_39131.png 1650 | 15_1536.png 1651 | 216_21677.png 1652 | 203_20365.png 1653 | 3_349.png 1654 | 399_39905.png 1655 | 8_887.png 1656 | 84_8419.png 1657 | 374_37437.png 1658 | 315_31537.png 1659 | 47_4763.png 1660 | 338_33850.png 1661 | 169_16975.png 1662 | 390_39094.png 1663 | 71_7189.png 1664 | 300_30079.png 1665 | 247_24710.png 1666 | 356_35694.png 1667 | 355_35527.png 1668 | 357_35716.png 1669 | 275_27508.png 1670 | 314_31458.png 1671 | 415_41501.png 1672 | 100_10017.png 1673 | 218_21867.png 1674 | 276_27625.png 1675 | 309_30926.png 1676 | 195_19534.png 1677 | 421_42161.png 1678 | 390_39057.png 1679 | 404_40420.png 1680 | 231_23182.png 1681 | 18_1852.png 1682 | 186_18647.png 1683 | 52_5202.png 1684 | 340_34008.png 1685 | 323_32360.png 1686 | 248_24811.png 1687 | 367_36723.png 1688 | 101_10177.png 1689 | 69_6977.png 1690 | 146_14692.png 1691 | 257_25714.png 1692 | 147_14749.png 1693 | 107_10787.png 1694 | 342_34281.png 1695 | 335_33537.png 1696 | 270_27014.png 1697 | 411_41193.png 1698 | 357_35784.png 1699 | 168_16851.png 1700 | 351_35199.png 1701 | 365_36553.png 1702 | 84_8413.png 1703 | 194_19419.png 1704 | 61_6102.png 1705 | 313_31351.png 1706 | 18_1874.png 1707 | 178_17867.png 1708 | 421_42172.png 1709 | 60_6015.png 1710 | 416_41699.png 1711 | 136_13614.png 1712 | 374_37412.png 1713 | 353_35331.png 1714 | 295_29558.png 1715 | 43_4366.png 1716 | 114_11465.png 1717 | 3_306.png 1718 | 89_8971.png 1719 | 410_41021.png 1720 | 142_14280.png 1721 | 327_32744.png 1722 | 228_22862.png 1723 | 144_14423.png 1724 | 132_13274.png 1725 | 378_37898.png 1726 | 382_38276.png 1727 | 86_8651.png 1728 | 197_19743.png 1729 | 255_25515.png 1730 | 416_41608.png 1731 | 147_14767.png 1732 | 151_15189.png 1733 | 127_12716.png 1734 | 424_42495.png 1735 | 9_907.png 1736 | 117_11781.png 1737 | 107_10747.png 1738 | 137_13773.png 1739 | 201_20102.png 1740 | 262_26258.png 1741 | 173_17304.png 1742 | 3_311.png 1743 | 341_34131.png 1744 | 323_32345.png 1745 | 214_21446.png 1746 | 245_24582.png 1747 | 377_37743.png 1748 | 62_6235.png 1749 | 347_34776.png 1750 | 213_21304.png 1751 | 294_29439.png 1752 | 202_20247.png 1753 | 62_6202.png 1754 | 26_2618.png 1755 | 391_39137.png 1756 | 295_29531.png 1757 | 297_29733.png 1758 | 168_16848.png 1759 | 271_27176.png 1760 | 103_10393.png 1761 | 279_27946.png 1762 | 351_35126.png 1763 | 253_25397.png 1764 | 63_6366.png 1765 | 109_10918.png 1766 | 111_11116.png 1767 | 336_33610.png 1768 | 177_17742.png 1769 | 30_3096.png 1770 | 87_8753.png 1771 | 171_17151.png 1772 | 316_31671.png 1773 | 200_20044.png 1774 | 175_17534.png 1775 | 40_4019.png 1776 | 154_15464.png 1777 | 8_844.png 1778 | 285_28584.png 1779 | 149_14981.png 1780 | 68_6829.png 1781 | 187_18798.png 1782 | 130_13018.png 1783 | 396_39675.png 1784 | 227_22736.png 1785 | 265_26544.png 1786 | 93_9345.png 1787 | 84_8417.png 1788 | 198_19823.png 1789 | 84_8440.png 1790 | 408_40844.png 1791 | 262_26275.png 1792 | 15_1540.png 1793 | 341_34183.png 1794 | 48_4832.png 1795 | 176_17683.png 1796 | 54_5485.png 1797 | 155_15538.png 1798 | 210_21072.png 1799 | 276_27653.png 1800 | 258_25880.png 1801 | 132_13260.png 1802 | 289_28974.png 1803 | 378_37819.png 1804 | 399_39983.png 1805 | 426_42690.png 1806 | 382_38228.png 1807 | 131_13169.png 1808 | 417_41708.png 1809 | 327_32753.png 1810 | 328_32878.png 1811 | 418_41805.png 1812 | 366_36687.png 1813 | 146_14642.png 1814 | 102_10227.png 1815 | 72_7224.png 1816 | 264_26473.png 1817 | 212_21223.png 1818 | 205_20570.png 1819 | 38_3844.png 1820 | 357_35733.png 1821 | 144_14427.png 1822 | 0_85.png 1823 | 177_17714.png 1824 | 111_11104.png 1825 | 280_28000.png 1826 | 114_11447.png 1827 | 317_31798.png 1828 | 418_41846.png 1829 | 263_26381.png 1830 | 24_2426.png 1831 | 366_36606.png 1832 | 85_8545.png 1833 | 239_23903.png 1834 | 255_25518.png 1835 | 387_38706.png 1836 | 153_15356.png 1837 | 327_32701.png 1838 | 61_6184.png 1839 | 31_3152.png 1840 | 212_21240.png 1841 | 228_22832.png 1842 | 150_15023.png 1843 | 285_28576.png 1844 | 115_11590.png 1845 | 397_39784.png 1846 | 80_8000.png 1847 | 24_2451.png 1848 | 233_23385.png 1849 | 250_25001.png 1850 | 237_23718.png 1851 | 232_23222.png 1852 | 53_5394.png 1853 | 93_9339.png 1854 | 14_1407.png 1855 | 229_22923.png 1856 | 286_28660.png 1857 | 108_10859.png 1858 | 190_19058.png 1859 | 59_5982.png 1860 | 374_37418.png 1861 | 59_5972.png 1862 | 297_29734.png 1863 | 71_7112.png 1864 | 344_34414.png 1865 | 118_11806.png 1866 | 104_10490.png 1867 | 78_7844.png 1868 | 98_9811.png 1869 | 13_1373.png 1870 | 95_9582.png 1871 | 288_28875.png 1872 | 307_30700.png 1873 | 227_22760.png 1874 | 354_35494.png 1875 | 404_40428.png 1876 | 15_1575.png 1877 | 286_28657.png 1878 | 132_13217.png 1879 | 29_2915.png 1880 | 252_25269.png 1881 | 94_9427.png 1882 | 95_9552.png 1883 | 407_40727.png 1884 | 368_36897.png 1885 | 288_28865.png 1886 | 112_11238.png 1887 | 372_37270.png 1888 | 24_2490.png 1889 | 100_10008.png 1890 | 390_39006.png 1891 | 30_3050.png 1892 | 105_10599.png 1893 | 294_29403.png 1894 | 42_4229.png 1895 | 426_42636.png 1896 | 414_41408.png 1897 | 168_16882.png 1898 | 323_32344.png 1899 | 332_33291.png 1900 | 270_27044.png 1901 | 395_39539.png 1902 | 123_12336.png 1903 | 323_32337.png 1904 | 40_4084.png 1905 | 416_41618.png 1906 | 254_25465.png 1907 | 352_35234.png 1908 | 168_16836.png 1909 | 360_36074.png 1910 | 331_33186.png 1911 | 118_11852.png 1912 | 325_32518.png 1913 | 221_22101.png 1914 | 404_40401.png 1915 | 93_9305.png 1916 | 283_28390.png 1917 | 162_16206.png 1918 | 261_26117.png 1919 | 236_23632.png 1920 | 419_41905.png 1921 | 217_21759.png 1922 | 344_34459.png 1923 | 342_34264.png 1924 | 122_12201.png 1925 | 39_3947.png 1926 | 299_29913.png 1927 | 182_18260.png 1928 | 137_13745.png 1929 | 403_40390.png 1930 | 371_37106.png 1931 | 195_19509.png 1932 | 368_36885.png 1933 | 142_14226.png 1934 | 185_18512.png 1935 | 189_18971.png 1936 | 268_26881.png 1937 | 90_9053.png 1938 | 207_20701.png 1939 | 116_11691.png 1940 | 180_18030.png 1941 | 37_3719.png 1942 | 190_19026.png 1943 | 188_18813.png 1944 | 177_17789.png 1945 | 303_30307.png 1946 | 38_3877.png 1947 | 400_40025.png 1948 | 34_3498.png 1949 | 146_14688.png 1950 | 158_15816.png 1951 | 22_2252.png 1952 | 65_6545.png 1953 | 183_18314.png 1954 | 274_27409.png 1955 | 269_26918.png 1956 | 290_29043.png 1957 | 278_27850.png 1958 | 428_42884.png 1959 | 291_29148.png 1960 | 337_33775.png 1961 | 380_38058.png 1962 | 359_35957.png 1963 | 57_5766.png 1964 | 311_31140.png 1965 | 167_16732.png 1966 | 185_18552.png 1967 | 92_9236.png 1968 | 410_41049.png 1969 | 143_14310.png 1970 | 76_7629.png 1971 | 20_2052.png 1972 | 107_10720.png 1973 | 198_19877.png 1974 | 274_27465.png 1975 | 174_17444.png 1976 | 65_6529.png 1977 | 202_20255.png 1978 | 390_39016.png 1979 | 17_1712.png 1980 | 29_2958.png 1981 | 421_42100.png 1982 | 409_40946.png 1983 | 107_10761.png 1984 | 334_33476.png 1985 | 167_16743.png 1986 | 184_18487.png 1987 | 251_25137.png 1988 | 253_25375.png 1989 | 412_41288.png 1990 | 135_13518.png 1991 | 290_29089.png 1992 | 405_40567.png 1993 | 195_19576.png 1994 | 80_8031.png 1995 | 83_8351.png 1996 | 52_5266.png 1997 | 305_30562.png 1998 | 153_15398.png 1999 | 246_24687.png 2000 | 83_8340.png 2001 | 178_17880.png 2002 | 174_17482.png 2003 | 177_17772.png 2004 | 342_34222.png 2005 | 187_18747.png 2006 | 187_18772.png 2007 | 183_18368.png 2008 | 64_6498.png 2009 | 223_22340.png 2010 | 380_38083.png 2011 | 336_33663.png 2012 | 54_5443.png 2013 | 241_24181.png 2014 | 227_22787.png 2015 | 153_15358.png 2016 | 41_4146.png 2017 | 396_39689.png 2018 | 323_32366.png 2019 | 185_18565.png 2020 | 369_36937.png 2021 | 380_38084.png 2022 | 27_2712.png 2023 | 153_15343.png 2024 | 37_3755.png 2025 | 373_37388.png 2026 | 109_10955.png 2027 | 5_569.png 2028 | 189_18912.png 2029 | 374_37460.png 2030 | 272_27261.png 2031 | 135_13570.png 2032 | 296_29685.png 2033 | 197_19708.png 2034 | 405_40562.png 2035 | 239_23933.png 2036 | 139_13901.png 2037 | 249_24917.png 2038 | 399_39997.png 2039 | 6_698.png 2040 | 354_35496.png 2041 | 23_2368.png 2042 | 57_5758.png 2043 | 91_9139.png 2044 | 147_14778.png 2045 | 148_14892.png 2046 | 130_13030.png 2047 | 27_2709.png 2048 | 393_39391.png 2049 | 295_29597.png 2050 | 52_5256.png 2051 | 34_3492.png 2052 | 23_2327.png 2053 | 50_5011.png 2054 | 67_6722.png 2055 | 136_13624.png 2056 | 419_41943.png 2057 | 264_26439.png 2058 | 83_8385.png 2059 | 315_31531.png 2060 | 136_13678.png 2061 | 18_1849.png 2062 | 34_3414.png 2063 | 358_35814.png 2064 | 283_28320.png 2065 | 326_32656.png 2066 | 139_13993.png 2067 | 175_17574.png 2068 | 171_17131.png 2069 | 402_40227.png 2070 | 419_41995.png 2071 | 4_430.png 2072 | 181_18113.png 2073 | 90_9061.png 2074 | 398_39849.png 2075 | 294_29453.png 2076 | 233_23394.png 2077 | 425_42528.png 2078 | 424_42404.png 2079 | 127_12771.png 2080 | 22_2249.png 2081 | 293_29393.png 2082 | 16_1671.png 2083 | 125_12579.png 2084 | 409_40935.png 2085 | 149_14960.png 2086 | 234_23457.png 2087 | 72_7250.png 2088 | 63_6325.png 2089 | 248_24858.png 2090 | 41_4170.png 2091 | 361_36146.png 2092 | 337_33701.png 2093 | 260_26042.png 2094 | 253_25351.png 2095 | 298_29833.png 2096 | 116_11677.png 2097 | 150_15044.png 2098 | 301_30100.png 2099 | 397_39774.png 2100 | 203_20355.png 2101 | 418_41833.png 2102 | 247_24756.png 2103 | 42_4202.png 2104 | 36_3624.png 2105 | 179_17933.png 2106 | 205_20586.png 2107 | 209_20942.png 2108 | 101_10119.png 2109 | 303_30392.png 2110 | 12_1222.png 2111 | 57_5700.png 2112 | 52_5292.png 2113 | 119_11988.png 2114 | 101_10195.png 2115 | 121_12165.png 2116 | 412_41290.png 2117 | 60_6069.png 2118 | 143_14388.png 2119 | 10_1091.png 2120 | 331_33103.png 2121 | 67_6782.png 2122 | 100_10013.png 2123 | 348_34858.png 2124 | 215_21562.png 2125 | 54_5472.png 2126 | 20_2034.png 2127 | 251_25108.png 2128 | 403_40389.png 2129 | 182_18204.png 2130 | 273_27321.png 2131 | 218_21886.png 2132 | 383_38358.png 2133 | 235_23518.png 2134 | 241_24120.png 2135 | 147_14786.png 2136 | 162_16208.png 2137 | 148_14823.png 2138 | 88_8888.png 2139 | 271_27129.png 2140 | 76_7643.png 2141 | 382_38251.png 2142 | 321_32160.png 2143 | 358_35887.png 2144 | 111_11190.png 2145 | 39_3939.png 2146 | 60_6032.png 2147 | 405_40533.png 2148 | 46_4634.png 2149 | 314_31422.png 2150 | 258_25813.png 2151 | 254_25479.png 2152 | 17_1743.png 2153 | 138_13859.png 2154 | 5_544.png 2155 | 194_19400.png -------------------------------------------------------------------------------- /loss.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | 5 | def trimap_adaptation_loss(pred_trimap, gt_trimap): 6 | loss = nn.CrossEntropyLoss() 7 | return loss(pred_trimap, gt_trimap) 8 | 9 | 10 | def alpha_estimation_loss(pred_alpha, gt_alpha, pred_trimap_argmax): 11 | """ 12 | pred_trimap_argmax 13 | 0: background 14 | 1: unknown 15 | 2: foreground 16 | """ 17 | mask = (pred_trimap_argmax == 1) 18 | mask = mask.float() 19 | num_unknown_pixel = torch.sum(mask) 20 | masked_pred_alpha = pred_alpha.mul(mask) 21 | masked_gt_alpha = gt_alpha.mul(mask) 22 | 23 | loss = nn.L1Loss() 24 | return loss(masked_pred_alpha, masked_gt_alpha) / (num_unknown_pixel + 1e-8) 25 | 26 | 27 | def task_uncertainty_loss(pred_trimap, pred_trimap_argmax, pred_alpha, gt_trimap, gt_alpha, log_sigma_t_sqr, log_sigma_a_sqr): 28 | Lt = trimap_adaptation_loss(pred_trimap, gt_trimap) 29 | La = alpha_estimation_loss(pred_alpha, gt_alpha, pred_trimap_argmax) 30 | overall = Lt / (2 * torch.exp(log_sigma_t_sqr)) + La / torch.exp(log_sigma_a_sqr) + log_sigma_t_sqr / 2 + log_sigma_a_sqr / 2 31 | return overall, Lt, La 32 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | import torchvision 4 | from tensorboardX import SummaryWriter 5 | from tqdm import tqdm 6 | 7 | from dataset.dataset import AdaMattingDataset 8 | from dataset.pre_process import composite_dataset, gen_train_valid_names 9 | from net.adamatting import AdaMatting 10 | from loss import task_uncertainty_loss 11 | from utility import get_args, get_logger, poly_lr_scheduler, save_checkpoint, AverageMeter, \ 12 | compute_mse, compute_sad 13 | 14 | 15 | def train(model, optimizer, device, args, logger, multi_gpu): 16 | torch.manual_seed(7) 17 | writer = SummaryWriter() 18 | 19 | logger.info("Initializing data loaders") 20 | train_dataset = AdaMattingDataset(args.raw_data_path, 'train') 21 | train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, 22 | num_workers=16, pin_memory=True) 23 | valid_dataset = AdaMattingDataset(args.raw_data_path, 'valid') 24 | valid_loader = torch.utils.data.DataLoader(valid_dataset, batch_size=args.batch_size, shuffle=False, 25 | num_workers=16, pin_memory=True) 26 | 27 | if args.resume: 28 | logger.info("Start training from saved ckpt") 29 | ckpt = torch.load(args.ckpt_path) 30 | model = ckpt["model"].module 31 | model = model.to(device) 32 | optimizer = ckpt["optimizer"] 33 | 34 | start_epoch = ckpt["epoch"] + 1 35 | max_iter = ckpt["max_iter"] 36 | cur_iter = ckpt["cur_iter"] 37 | init_lr = ckpt["init_lr"] 38 | best_loss = ckpt["best_loss"] 39 | else: 40 | logger.info("Start training from scratch") 41 | start_epoch = 0 42 | max_iter = 43100 * (1 - args.valid_portion) / args.batch_size * args.epochs 43 | cur_iter = 0 44 | init_lr = args.lr 45 | best_loss = float('inf') 46 | 47 | for epoch in range(start_epoch, args.epochs): 48 | # Training 49 | torch.set_grad_enabled(True) 50 | model.train() 51 | for index, (img, gt) in enumerate(train_loader): 52 | cur_lr = poly_lr_scheduler(optimizer=optimizer, init_lr=init_lr, iter=cur_iter, max_iter=max_iter) 53 | 54 | img = img.type(torch.FloatTensor).to(device) # [bs, 4, 320, 320] 55 | gt_alpha = (gt[:, 0, :, :].unsqueeze(1)).type(torch.FloatTensor).to(device) # [bs, 1, 320, 320] 56 | gt_trimap = gt[:, 1, :, :].type(torch.LongTensor).to(device) # [bs, 320, 320] 57 | 58 | optimizer.zero_grad() 59 | trimap_adaption, t_argmax, alpha_estimation = model(img) 60 | L_overall, L_t, L_a = task_uncertainty_loss(pred_trimap=trimap_adaption, pred_trimap_argmax=t_argmax, 61 | pred_alpha=alpha_estimation, gt_trimap=gt_trimap, 62 | gt_alpha=gt_alpha, log_sigma_t_sqr=model.log_sigma_t_sqr, log_sigma_a_sqr=model.log_sigma_a_sqr) 63 | # if multi_gpu: 64 | # L_overall, L_t, L_a = L_overall.mean(), L_t.mean(), L_a.mean() 65 | optimizer.zero_grad() 66 | L_overall.backward() 67 | optimizer.step() 68 | 69 | if cur_iter % 10 == 0: 70 | logger.info("Epoch: {:03d} | Iter: {:05d}/{} | Loss: {:.4e} | L_t: {:.4e} | L_a: {:.4e}" 71 | .format(epoch, index, len(train_loader), L_overall.item(), L_t.item(), L_a.item())) 72 | writer.add_scalar("loss/L_overall", L_overall.item(), cur_iter) 73 | writer.add_scalar("loss/L_t", L_t.item(), cur_iter) 74 | writer.add_scalar("loss/L_a", L_a.item(), cur_iter) 75 | sigma_t = torch.exp(model.log_sigma_t_sqr / 2) 76 | sigma_a = torch.exp(model.log_sigma_a_sqr / 2) 77 | writer.add_scalar("sigma/sigma_t", sigma_t, cur_iter) 78 | writer.add_scalar("sigma/sigma_a", sigma_a, cur_iter) 79 | writer.add_scalar("lr", cur_lr, cur_iter) 80 | 81 | cur_iter += 1 82 | 83 | # Validation 84 | logger.info("Validating after the {}th epoch".format(epoch)) 85 | avg_loss = AverageMeter() 86 | avg_l_t = AverageMeter() 87 | avg_l_a = AverageMeter() 88 | torch.cuda.empty_cache() 89 | torch.set_grad_enabled(False) 90 | model.eval() 91 | with tqdm(total=len(valid_loader)) as pbar: 92 | for index, (img, gt) in enumerate(valid_loader): 93 | img = img.type(torch.FloatTensor).to(device) # [bs, 4, 320, 320] 94 | gt_alpha = (gt[:, 0, :, :].unsqueeze(1)).type(torch.FloatTensor).to(device) # [bs, 1, 320, 320] 95 | gt_trimap = gt[:, 1, :, :].type(torch.LongTensor).to(device) # [bs, 320, 320] 96 | 97 | trimap_adaption, t_argmax, alpha_estimation = model(img) 98 | L_overall_valid, L_t_valid, L_a_valid = task_uncertainty_loss(pred_trimap=trimap_adaption, pred_trimap_argmax=t_argmax, 99 | pred_alpha=alpha_estimation, gt_trimap=gt_trimap, 100 | gt_alpha=gt_alpha, log_sigma_t_sqr=model.log_sigma_t_sqr, log_sigma_a_sqr=model.log_sigma_a_sqr) 101 | # if multi_gpu: 102 | # L_overall, L_t, L_a = L_overall.mean(), L_t.mean(), L_a.mean() 103 | avg_loss.update(L_overall_valid.item()) 104 | avg_l_t.update(L_t_valid.item()) 105 | avg_l_a.update(L_a_valid.item()) 106 | 107 | if index == 0: 108 | trimap_adaption_res = torchvision.utils.make_grid(t_argmax.type(torch.FloatTensor) / 2, normalize=True, scale_each=True) 109 | writer.add_image('valid_image/trimap_adaptation', trimap_adaption_res, cur_iter) 110 | alpha_estimation_res = torchvision.utils.make_grid(alpha_estimation, normalize=True, scale_each=True) 111 | writer.add_image('valid_image/alpha_estimation', alpha_estimation_res, cur_iter) 112 | 113 | pbar.update() 114 | 115 | logger.info("Average loss overall: {:.4e}".format(avg_loss.avg)) 116 | logger.info("Average loss of trimap adaptation: {:.4e}".format(avg_l_t.avg)) 117 | logger.info("Average loss of alpha estimation: {:.4e}".format(avg_l_a.avg)) 118 | writer.add_scalar("valid_loss/L_overall", avg_loss.avg, cur_iter) 119 | writer.add_scalar("valid_loss/L_t", avg_l_t.avg, cur_iter) 120 | writer.add_scalar("valid_loss/L_a", avg_l_a.avg, cur_iter) 121 | 122 | is_best = avg_loss.avg < best_loss 123 | best_loss = min(avg_loss.avg, best_loss) 124 | if is_best or (args.save_ckpt and epoch % 10 == 0): 125 | if not os.path.exists("ckpts"): 126 | os.makedirs("ckpts") 127 | logger.info("Checkpoint saved") 128 | if (is_best): 129 | logger.info("Best checkpoint saved") 130 | save_checkpoint(epoch, model, optimizer, cur_iter, max_iter, init_lr, avg_loss.avg, is_best, args.ckpt_path) 131 | 132 | writer.export_scalars_to_json("./all_scalars.json") 133 | writer.close() 134 | 135 | 136 | def test(): 137 | pass 138 | 139 | 140 | def main(): 141 | args = get_args() 142 | logger = get_logger(args.write_log) 143 | 144 | os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu 145 | device_ids_str = args.gpu.split(',') 146 | device_ids = [] 147 | for i in range(len(device_ids_str)): 148 | device_ids.append(i) 149 | 150 | multi_gpu = False 151 | if args.mode != "prep": 152 | logger.info("Loading network") 153 | model = AdaMatting(in_channel=4) 154 | optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=0) 155 | if args.cuda: 156 | device = torch.device("cuda:{}".format(device_ids[0])) 157 | if len(device_ids) > 1 and args.mode=="train": 158 | logger.info("Loading with multiple GPUs") 159 | model = torch.nn.DataParallel(model, device_ids=device_ids) 160 | multi_gpu = True 161 | model = model.cuda(device=device_ids[0]) 162 | else: 163 | device = torch.device("cpu") 164 | 165 | if args.mode == "train": 166 | logger.info("Program runs in train mode") 167 | train(model=model, optimizer=optimizer, device=device, args=args, logger=logger, multi_gpu=multi_gpu) 168 | elif args.mode == "test": 169 | logger.info("Program runs in test mode") 170 | test() 171 | elif args.mode == "prep": 172 | logger.info("Program runs in prep mode") 173 | # composite_dataset(args.raw_data_path, logger) 174 | gen_train_valid_names(args.valid_portion, logger) 175 | 176 | 177 | if __name__ == "__main__": 178 | main() 179 | -------------------------------------------------------------------------------- /net/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__init__.py -------------------------------------------------------------------------------- /net/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /net/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /net/__pycache__/adamatting.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/adamatting.cpython-36.pyc -------------------------------------------------------------------------------- /net/__pycache__/adamatting.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/adamatting.cpython-37.pyc -------------------------------------------------------------------------------- /net/__pycache__/gcn.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/gcn.cpython-36.pyc -------------------------------------------------------------------------------- /net/__pycache__/gcn.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/gcn.cpython-37.pyc -------------------------------------------------------------------------------- /net/__pycache__/propunit.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/propunit.cpython-36.pyc -------------------------------------------------------------------------------- /net/__pycache__/propunit.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/propunit.cpython-37.pyc -------------------------------------------------------------------------------- /net/__pycache__/resblock.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/resblock.cpython-36.pyc -------------------------------------------------------------------------------- /net/__pycache__/resblock.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/waterbearbee/AdaMatting/9cc88aeacbd7c95d006d9556d98a9395c90903b0/net/__pycache__/resblock.cpython-37.pyc -------------------------------------------------------------------------------- /net/adamatting.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import math 5 | import os 6 | import sys 7 | 8 | cur_dir = os.getcwd() 9 | sys.path.append(os.path.join(cur_dir, "net")) 10 | from resblock import Bottleneck, make_resblock 11 | from gcn import GCN 12 | from propunit import PropUnit 13 | 14 | 15 | class AdaMatting(nn.Module): 16 | 17 | def __init__(self, in_channel): 18 | super(AdaMatting, self).__init__() 19 | 20 | # Encoder 21 | encoder_inplanes = 64 22 | self.encoder_conv = nn.Sequential( 23 | nn.Conv2d(in_channel, 64, kernel_size=7, stride=1, padding=3, bias=True), 24 | nn.BatchNorm2d(64), 25 | nn.ReLU(inplace=True) 26 | ) 27 | self.encoder_maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) 28 | self.encoder_resblock1, encoder_inplanes = make_resblock(encoder_inplanes, 64, blocks=2, stride=2, block=Bottleneck) 29 | self.encoder_resblock2, encoder_inplanes = make_resblock(encoder_inplanes, 64, blocks=2, stride=2, block=Bottleneck) 30 | self.encoder_resblock3, encoder_inplanes = make_resblock(encoder_inplanes, 64, blocks=2, stride=2, block=Bottleneck) 31 | for m in self.modules(): 32 | if isinstance(m, nn.Conv2d): 33 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 34 | m.weight.data.normal_(0, math.sqrt(2. / n)) 35 | elif isinstance(m, nn.BatchNorm2d): 36 | m.weight.data.fill_(1) 37 | m.bias.data.zero_() 38 | 39 | # Shortcuts 40 | self.shortcut_shallow = GCN(64, 16) 41 | self.shortcut_middle = GCN(64 * Bottleneck.expansion, 32) 42 | self.shortcut_deep = GCN(64 * Bottleneck.expansion, 64) 43 | 44 | # T-decoder 45 | self.t_decoder_upscale1 = nn.Sequential( 46 | nn.Conv2d(64 * Bottleneck.expansion, 64 * 4, kernel_size=7, stride=1, padding=3, bias=True), 47 | nn.PixelShuffle(2) 48 | ) 49 | self.t_decoder_upscale2 = nn.Sequential( 50 | nn.Conv2d(64, 32 * 4, kernel_size=7, stride=1, padding=3, bias=True), 51 | nn.PixelShuffle(2) 52 | ) 53 | self.t_decoder_upscale3 = nn.Sequential( 54 | nn.Conv2d(32, 16 * 4, kernel_size=7, stride=1, padding=3, bias=True), 55 | nn.PixelShuffle(2) 56 | ) 57 | self.t_decoder_upscale4 = nn.Sequential( 58 | nn.Conv2d(16, 3 * (2 ** 2), kernel_size=7, stride=1, padding=3, bias=True), 59 | nn.PixelShuffle(2) 60 | ) 61 | 62 | # A-deocder 63 | self.a_decoder_upscale1 = nn.Sequential( 64 | nn.Conv2d(64 * Bottleneck.expansion, 64 * 4, kernel_size=7, stride=1, padding=3, bias=True), 65 | nn.PixelShuffle(2) 66 | ) 67 | self.a_decoder_upscale2 = nn.Sequential( 68 | nn.Conv2d(64, 32 * 4, kernel_size=7, stride=1, padding=3, bias=True), 69 | nn.PixelShuffle(2) 70 | ) 71 | self.a_decoder_upscale3 = nn.Sequential( 72 | nn.Conv2d(32, 16 * 4, kernel_size=7, stride=1, padding=3, bias=True), 73 | nn.PixelShuffle(2) 74 | ) 75 | self.a_decoder_upscale4 = nn.Sequential( 76 | nn.Conv2d(16, 1 * (2 ** 2), kernel_size=7, stride=1, padding=3, bias=True), 77 | nn.PixelShuffle(2) 78 | ) 79 | 80 | # Propagation unit 81 | self.propunit = PropUnit( 82 | input_dim=4 + 1 + 1, 83 | hidden_dim=[1], 84 | kernel_size=(3, 3), 85 | num_layers=1, 86 | seq_len=3, 87 | bias=True) 88 | 89 | # Task uncertainty loss 90 | self.log_sigma_t_sqr = nn.Parameter(torch.log(torch.Tensor([16.0]))) 91 | self.log_sigma_a_sqr = nn.Parameter(torch.log(torch.Tensor([16.0]))) 92 | 93 | 94 | def forward(self, x): 95 | raw = x 96 | x = self.encoder_conv(x) # 64 97 | encoder_shallow = self.encoder_maxpool(x) # 64 98 | encoder_middle = self.encoder_resblock1(encoder_shallow) # 256 99 | encoder_deep = self.encoder_resblock2(encoder_middle) # 256 100 | encoder_result = self.encoder_resblock3(encoder_deep) # 256 101 | 102 | shortcut_deep = self.shortcut_deep(encoder_deep) # 256 103 | shortcut_middle = self.shortcut_middle(encoder_middle) # 64 104 | shortcut_shallow = self.shortcut_shallow(encoder_shallow) # 32 105 | 106 | t_decoder_deep = self.t_decoder_upscale1(encoder_result) + shortcut_deep # 64 107 | t_decoder_middle = self.t_decoder_upscale2(t_decoder_deep) + shortcut_middle # 32 108 | t_decoder_shallow = self.t_decoder_upscale3(t_decoder_middle) # 16 109 | trimap_adaption = self.t_decoder_upscale4(t_decoder_shallow) # 3 110 | t_argmax = trimap_adaption.argmax(dim=1) 111 | 112 | a_decoder_deep = self.a_decoder_upscale1(encoder_result) # 64 113 | a_decoder_middle = self.a_decoder_upscale2(a_decoder_deep) + shortcut_middle # 32 114 | a_decoder_shallow = self.a_decoder_upscale3(a_decoder_middle) + shortcut_shallow # 16 115 | a_decoder = self.a_decoder_upscale4(a_decoder_shallow) # 1 116 | 117 | propunit_input = torch.cat((raw, torch.unsqueeze(t_argmax, dim=1).float(), a_decoder), dim=1) # 118 | alpha_estimation = self.propunit(propunit_input) 119 | 120 | return trimap_adaption, t_argmax, alpha_estimation 121 | -------------------------------------------------------------------------------- /net/gcn.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | 5 | class GCN(nn.Module): 6 | def __init__(self,c,out_c,k=(7,7)): # out_Channel=21 in paper 7 | super(GCN, self).__init__() 8 | self.conv_l1 = nn.Conv2d(c, out_c, kernel_size=(k[0],1), padding =(int((k[0]-1)/2),0)) 9 | self.conv_l2 = nn.Conv2d(out_c, out_c, kernel_size=(1,k[0]), padding =(0,int((k[0]-1)/2))) 10 | self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=(1,k[1]), padding =(0,int((k[1]-1)/2))) 11 | self.conv_r2 = nn.Conv2d(out_c, out_c, kernel_size=(k[1],1), padding =(int((k[1]-1)/2),0)) 12 | 13 | def forward(self, x): 14 | x_l = self.conv_l1(x) 15 | x_l = self.conv_l2(x_l) 16 | 17 | x_r = self.conv_r1(x) 18 | x_r = self.conv_r2(x_r) 19 | 20 | x = x_l + x_r 21 | 22 | return x -------------------------------------------------------------------------------- /net/propunit.py: -------------------------------------------------------------------------------- 1 | # this implementation is based on https://github.com/ndrplz/ConvLSTM_pytorch/blob/master/convlstm.py 2 | 3 | import torch.nn as nn 4 | import torch 5 | from resblock import Bottleneck, make_resblock 6 | 7 | 8 | class PropUnitCell(nn.Module): 9 | 10 | def __init__(self, input_dim, hidden_dim, kernel_size, bias): 11 | """ 12 | Initialize ConvLSTM cell. 13 | Parameters 14 | ---------- 15 | input_dim: int 16 | Number of channels of input tensor. 17 | hidden_dim: int 18 | Number of channels of hidden state. 19 | kernel_size: (int, int) 20 | Size of the convolutional kernel. 21 | bias: bool 22 | Whether or not to add the bias. 23 | """ 24 | 25 | super(PropUnitCell, self).__init__() 26 | 27 | self.input_dim = input_dim 28 | self.hidden_dim = hidden_dim 29 | 30 | self.kernel_size = kernel_size 31 | self.padding = kernel_size[0] // 2, kernel_size[1] // 2 32 | self.bias = bias 33 | 34 | self.resblock, _ = make_resblock(self.input_dim, 32, blocks=2, stride=1, block=Bottleneck) 35 | 36 | self.conv = nn.Conv2d(in_channels=32 * Bottleneck.expansion + self.hidden_dim, 37 | out_channels=4 * self.hidden_dim, 38 | kernel_size=self.kernel_size, 39 | padding=self.padding, 40 | bias=self.bias) 41 | 42 | 43 | def forward(self, input_tensor, cur_state): 44 | h_cur, c_cur = cur_state 45 | 46 | input_features = self.resblock(input_tensor) 47 | combined = torch.cat([input_features, h_cur], dim=1) # concatenate along channel axis 48 | combined_conv = self.conv(combined) 49 | cc_i, cc_f, cc_o, cc_g = torch.split(combined_conv, self.hidden_dim, dim=1) 50 | i = torch.sigmoid(cc_i) 51 | f = torch.sigmoid(cc_f) 52 | o = torch.sigmoid(cc_o) 53 | g = torch.tanh(cc_g) 54 | 55 | c_next = f * c_cur + i * g 56 | h_next = o * torch.tanh(c_next) 57 | 58 | return h_next, c_next 59 | 60 | 61 | def init_hidden(self, batch_size, image_size): 62 | height, width = image_size 63 | return (torch.zeros(batch_size, self.hidden_dim, height, width, device=self.conv.weight.device), 64 | torch.zeros(batch_size, self.hidden_dim, height, width, device=self.conv.weight.device)) 65 | 66 | 67 | class PropUnit(nn.Module): 68 | 69 | """ 70 | Parameters: 71 | input_dim: Number of channels in input 72 | hidden_dim: Number of hidden channels 73 | kernel_size: Size of kernel in convolutions 74 | num_layers: Number of LSTM layers stacked on each other 75 | batch_first: Whether or not dimension 0 is the batch or not 76 | bias: Bias or no bias in Convolution 77 | Note: Will do same padding. 78 | Input: 79 | A tensor of size B, T, C, H, W or T, B, C, H, W 80 | Output: 81 | A tuple of two lists of length num_layers (or length 1 if return_all_layers is False). 82 | 0 - layer_output_list is the list of lists of length T of each output 83 | 1 - last_state_list is the list of last states 84 | each element of the list is a tuple (h, c) for hidden state and memory 85 | Example: 86 | >> x = torch.rand((32, 10, 64, 128, 128)) 87 | >> convlstm = ConvLSTM(64, 16, 3, 1, True, True, False) 88 | >> _, last_states = convlstm(x) 89 | >> h = last_states[0][0] # 0 for layer index, 0 for h index 90 | """ 91 | 92 | def __init__(self, input_dim, hidden_dim, kernel_size, num_layers, seq_len, bias=True): 93 | super(PropUnit, self).__init__() 94 | 95 | self._check_kernel_size_consistency(kernel_size) 96 | 97 | # Make sure that both `kernel_size` and `hidden_dim` are lists having len == num_layers 98 | kernel_size = self._extend_for_multilayer(kernel_size, num_layers) 99 | hidden_dim = self._extend_for_multilayer(hidden_dim, num_layers) 100 | if not len(kernel_size) == len(hidden_dim) == num_layers: 101 | raise ValueError('Inconsistent list length.') 102 | 103 | self.input_dim = input_dim 104 | self.hidden_dim = hidden_dim 105 | self.kernel_size = kernel_size 106 | self.num_layers = num_layers 107 | self.seq_len = seq_len 108 | self.bias = bias 109 | 110 | cell_list = [] 111 | for i in range(0, self.num_layers): 112 | cur_input_dim = self.input_dim if i == 0 else self.hidden_dim[i - 1] 113 | cell_list.append(PropUnitCell(input_dim=cur_input_dim, 114 | hidden_dim=self.hidden_dim[i], 115 | kernel_size=self.kernel_size[i], 116 | bias=self.bias)) 117 | 118 | self.cell_list = nn.ModuleList(cell_list) 119 | 120 | 121 | def forward(self, input_tensor, hidden_state=None): 122 | """ 123 | Parameters 124 | ---------- 125 | input_tensor: todo 126 | 5-D Tensor either of shape (t, b, c, h, w) or (b, t, c, h, w) 127 | hidden_state: todo 128 | None. todo implement stateful 129 | Returns 130 | ------- 131 | last_state_list, layer_output 132 | """ 133 | 134 | b, _, h, w = input_tensor.size() 135 | 136 | # Implement stateful ConvLSTM 137 | if hidden_state is not None: 138 | raise NotImplementedError() 139 | else: 140 | # Since the init is done in forward. Can send image size here 141 | hidden_state = self._init_hidden(batch_size=b, 142 | image_size=(h, w)) 143 | 144 | layer_output_list = [] 145 | last_state_list = [] 146 | cur_layer_input = torch.stack([input_tensor] * 3, dim = 1) 147 | 148 | for layer_idx in range(self.num_layers): 149 | 150 | h, c = hidden_state[layer_idx] 151 | output_inner = [] 152 | for t in range(self.seq_len): 153 | h, c = self.cell_list[layer_idx](input_tensor=cur_layer_input[:, t, :, :, :], 154 | cur_state=[h, c]) 155 | output_inner.append(h) 156 | 157 | layer_output = torch.stack(output_inner, dim=1) 158 | cur_layer_input = layer_output 159 | 160 | layer_output_list.append(layer_output) 161 | last_state_list.append([h, c]) 162 | 163 | output_layer = layer_output_list[-1] 164 | output_layer = output_layer[:, -1, :, :, :] 165 | output_layer.squeeze(dim=1) 166 | return output_layer 167 | 168 | 169 | def _init_hidden(self, batch_size, image_size): 170 | init_states = [] 171 | for i in range(self.num_layers): 172 | init_states.append(self.cell_list[i].init_hidden(batch_size, image_size)) 173 | return init_states 174 | 175 | 176 | @staticmethod 177 | def _check_kernel_size_consistency(kernel_size): 178 | if not (isinstance(kernel_size, tuple) or 179 | (isinstance(kernel_size, list) and all([isinstance(elem, tuple) for elem in kernel_size]))): 180 | raise ValueError('`kernel_size` must be tuple or list of tuples') 181 | 182 | 183 | @staticmethod 184 | def _extend_for_multilayer(param, num_layers): 185 | if not isinstance(param, list): 186 | param = [param] * num_layers 187 | return param 188 | -------------------------------------------------------------------------------- /net/resblock.py: -------------------------------------------------------------------------------- 1 | # this implementation is based on https://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/fbresnet.py 2 | 3 | import torch 4 | import torch.nn as nn 5 | 6 | 7 | class Bottleneck(nn.Module): 8 | expansion = 4 9 | 10 | def __init__(self, inplanes, planes, stride=1, downsample=None): 11 | super(Bottleneck, self).__init__() 12 | self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=True) 13 | self.bn1 = nn.BatchNorm2d(planes) 14 | self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, 15 | padding=1, bias=True) 16 | self.bn2 = nn.BatchNorm2d(planes) 17 | self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=True) 18 | self.bn3 = nn.BatchNorm2d(planes * 4) 19 | self.relu = nn.ReLU(inplace=True) 20 | self.downsample = downsample 21 | self.stride = stride 22 | 23 | def forward(self, x): 24 | residual = x 25 | 26 | out = self.conv1(x) 27 | out = self.bn1(out) 28 | out = self.relu(out) 29 | 30 | out = self.conv2(out) 31 | out = self.bn2(out) 32 | out = self.relu(out) 33 | 34 | out = self.conv3(out) 35 | out = self.bn3(out) 36 | 37 | if self.downsample is not None: 38 | residual = self.downsample(x) 39 | 40 | out += residual 41 | out = self.relu(out) 42 | 43 | return out 44 | 45 | 46 | def make_resblock(inplanes, planes, blocks, stride=1, block=Bottleneck): 47 | downsample = None 48 | if stride != 1 or inplanes != planes * block.expansion: 49 | downsample = nn.Sequential( 50 | nn.Conv2d(inplanes, planes * block.expansion, 51 | kernel_size=1, stride=stride, bias=True), 52 | nn.BatchNorm2d(planes * block.expansion), 53 | ) 54 | 55 | layers = [] 56 | layers.append(block(inplanes, planes, stride, downsample)) 57 | inplanes = planes * block.expansion 58 | for _ in range(1, blocks): 59 | layers.append(block(inplanes, planes)) 60 | 61 | return nn.Sequential(*layers), inplanes 62 | -------------------------------------------------------------------------------- /train.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | 3 | python main.py \ 4 | --mode=train \ 5 | --valid_portion=5 \ 6 | --batch_size=4 \ 7 | --epochs=120 \ 8 | --lr=0.0001 \ 9 | --cuda \ 10 | --gpu=2 \ 11 | --write_log \ 12 | --save_ckpt 13 | 14 | #--resume=model/stage1/ckpt_e1.pth \ 15 | -------------------------------------------------------------------------------- /utility.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import argparse 3 | import logging 4 | import numpy as np 5 | 6 | 7 | def save_checkpoint(epoch, model, optimizer, cur_iter, max_iter, init_lr, loss, is_best, ckpt_path): 8 | state = {'epoch': epoch, 9 | 'model': model, 10 | 'optimizer': optimizer, 11 | 'cur_iter': cur_iter, 12 | 'max_iter': max_iter, 13 | 'best_loss': loss, 14 | 'init_lr': init_lr} 15 | filename = ckpt_path + "ckpt_{:03d}_{:.4f}.tar".format(epoch, loss) 16 | torch.save(state, filename) 17 | # If this checkpoint is the best so far, store a copy so it doesn't get overwritten by a worse checkpoint 18 | if is_best: 19 | filename = ckpt_path + "ckpt_{:03d}_{:.4f}_best.tar".format(epoch, loss) 20 | torch.save(state, filename) 21 | 22 | 23 | def poly_lr_scheduler(optimizer, init_lr, iter, max_iter=100, power=0.9): 24 | """Polynomial decay of learning rate 25 | :param init_lr is base learning rate 26 | :param iter is a current iteration 27 | :param max_iter is number of maximum iterations 28 | :param power is a polymomial power 29 | 30 | """ 31 | lr = init_lr * (1 - iter / max_iter) ** power 32 | for param_group in optimizer.param_groups: 33 | param_group['lr'] = lr 34 | 35 | return lr 36 | 37 | 38 | class AverageMeter(object): 39 | """ 40 | Keeps track of the most recent value, average, sum, and count of a metric. 41 | """ 42 | 43 | def __init__(self): 44 | self.reset() 45 | 46 | def reset(self): 47 | self.val = 0 48 | self.avg = 0 49 | self.sum = 0 50 | self.count = 0 51 | 52 | def update(self, val, n=1): 53 | self.val = val 54 | self.sum += val * n 55 | self.count += n 56 | self.avg = self.sum / self.count 57 | 58 | 59 | def compute_mse(pred, alpha, trimap): 60 | """ 61 | compute the MSE error given a prediction, a ground truth and a trimap. 62 | pred: the predicted alpha matte 63 | target: the ground truth alpha matte 64 | trimap: the given trimap 65 | """ 66 | num_pixels = float((trimap == 128).sum()) 67 | return ((pred - alpha) ** 2).sum() / num_pixels 68 | 69 | 70 | def compute_sad(pred, alpha): 71 | """ 72 | compute the SAD error given a prediction and a ground truth. 73 | """ 74 | diff = np.abs(pred - alpha) 75 | return np.sum(diff) / 1000 76 | 77 | 78 | def get_args(): 79 | # Training settings 80 | parser = argparse.ArgumentParser(description='set arguments') 81 | parser.add_argument('--mode', type=str, required=True, choices=["train", "test", "prep"], help="set the program to \'train\', \'test\', or \'prep\'") 82 | parser.add_argument('--valid_portion', type=int, required=True, help="percentage of valid data in all training samples") 83 | parser.add_argument('--batch_size', type=int, default=64, help='training batch size') 84 | parser.add_argument('--epochs', type=int, default=20, help='number of epochs to train for') 85 | parser.add_argument('--lr', type=float, default=0.001, help='Learning Rate. Default=0.01') 86 | parser.add_argument('--cuda', action='store_true', default=False, help='use cuda?') 87 | parser.add_argument('--gpu', type=str, default="0", help="choose gpus") 88 | parser.add_argument('--write_log', action="store_true", default=False, help="whether store log to log.txt") 89 | parser.add_argument('--raw_data_path', type=str, default="/data/datasets/im/AdaMatting/", help="dir where datasets are stored") 90 | parser.add_argument('--ckpt_path', type=str, default="ckpts/") 91 | parser.add_argument('--save_ckpt', action="store_true", default=False, help="whether save checkpoint every 10 epochs") 92 | parser.add_argument('--resume', action="store_true", default=False, help="whether resume training from a ckpt") 93 | args = parser.parse_args() 94 | return args 95 | 96 | 97 | def get_logger(flag): 98 | logger = logging.getLogger("AdaMatting") 99 | logger.setLevel(level=logging.INFO) 100 | formatter = logging.Formatter("[%(asctime)s] %(lineno)d: %(levelname)s - %(message)s") 101 | 102 | # log file stream 103 | if (flag): 104 | handler = logging.FileHandler("log.txt") 105 | handler.setLevel(logging.INFO) 106 | handler.setFormatter(formatter) 107 | logger.addHandler(handler) 108 | 109 | # log console stream 110 | console = logging.StreamHandler() 111 | console.setLevel(logging.INFO) 112 | console.setFormatter(formatter) 113 | 114 | logger.addHandler(console) 115 | 116 | return logger 117 | --------------------------------------------------------------------------------