├── .gitignore ├── LICENSE ├── README.md ├── align_faces.py ├── config.py ├── data_gen.py ├── demo.py ├── export.py ├── extract.py ├── focal_loss.py ├── image_aug.py ├── images └── theta_dist.png ├── lfw_eval.py ├── megaface.py ├── megaface_eval.py ├── megaface_utils.py ├── mobilefacenet.py ├── optimizer.py ├── pre_process.py ├── requirements.txt ├── retinaface ├── data │ ├── FDDB │ │ └── img_list.txt │ ├── __init__.py │ ├── config.py │ ├── data_augment.py │ └── wider_face.py ├── detector.py ├── layers │ ├── __init__.py │ ├── functions │ │ └── prior_box.py │ └── modules │ │ ├── __init__.py │ │ └── multibox_loss.py ├── loader.py ├── models │ ├── __init__.py │ ├── net.py │ └── retinaface.py ├── utils │ ├── __init__.py │ ├── box_utils.py │ ├── nms │ │ ├── __init__.py │ │ └── py_cpu_nms.py │ └── timer.py └── weights │ └── mobilenet0.25_Final.pth ├── train.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | runs/ 3 | data/CASIA-WebFace 4 | data/lfw_funneled 5 | __pycache__/ 6 | *.tar -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MobileFaceNets 2 | 3 | ![apm](https://img.shields.io/apm/l/vim-mode.svg) 4 | 5 | PyTorch implementation of MobileFaceNets: Efficient CNNs for Accurate Real-Time Face Verification on Mobile Devices. 6 | [paper](https://arxiv.org/abs/1804.07573). 7 | 8 | ## Performance 9 | 10 | |Accuracy|LFW|MegaFace|Download| 11 | |---|---|---|---| 12 | |paper|99.55%|92.59%|| 13 | |ours|99.48%|82.55%|[Link](https://github.com/foamliu/MobileFaceNet/releases/download/v1.0/mobilefacenet_scripted.pt)| 14 | 15 | ## Dataset 16 | ### Introduction 17 | 18 | Refined MS-Celeb-1M dataset for training, 3,804,846 faces over 85,164 identities. 19 | LFW and Megaface datasets for testing. 20 | 21 | ## Dependencies 22 | - Python 3.6.8 23 | - PyTorch 1.3.0 24 | 25 | ## Usage 26 | 27 | ### Data preprocess 28 | Extract images: 29 | ```bash 30 | $ python extract.py 31 | $ python pre_process.py 32 | ``` 33 | 34 | ### Train 35 | ```bash 36 | $ python train.py 37 | ``` 38 | 39 | To visualize the training process: 40 | ```bash 41 | $ tensorboard --logdir=runs 42 | ``` 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /align_faces.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon Apr 24 15:43:29 2017 4 | @author: zhaoy 5 | """ 6 | import cv2 7 | import numpy as np 8 | from skimage import transform as trans 9 | 10 | # reference facial points, a list of coordinates (x,y) 11 | REFERENCE_FACIAL_POINTS = [ 12 | [30.29459953, 51.69630051], 13 | [65.53179932, 51.50139999], 14 | [48.02519989, 71.73660278], 15 | [33.54930115, 92.3655014], 16 | [62.72990036, 92.20410156] 17 | ] 18 | 19 | DEFAULT_CROP_SIZE = (96, 112) 20 | 21 | 22 | class FaceWarpException(Exception): 23 | def __str__(self): 24 | return 'In File {}:{}'.format( 25 | __file__, super.__str__(self)) 26 | 27 | 28 | def get_reference_facial_points(output_size=None, 29 | inner_padding_factor=0.0, 30 | outer_padding=(0, 0), 31 | default_square=False): 32 | tmp_5pts = np.array(REFERENCE_FACIAL_POINTS) 33 | tmp_crop_size = np.array(DEFAULT_CROP_SIZE) 34 | 35 | # 0) make the inner region a square 36 | if default_square: 37 | size_diff = max(tmp_crop_size) - tmp_crop_size 38 | tmp_5pts += size_diff / 2 39 | tmp_crop_size += size_diff 40 | 41 | # print('---> default:') 42 | # print(' crop_size = ', tmp_crop_size) 43 | # print(' reference_5pts = ', tmp_5pts) 44 | 45 | if (output_size and 46 | output_size[0] == tmp_crop_size[0] and 47 | output_size[1] == tmp_crop_size[1]): 48 | # print('output_size == DEFAULT_CROP_SIZE {}: return default reference points'.format(tmp_crop_size)) 49 | return tmp_5pts 50 | 51 | if (inner_padding_factor == 0 and 52 | outer_padding == (0, 0)): 53 | if output_size is None: 54 | print('No paddings to do: return default reference points') 55 | return tmp_5pts 56 | else: 57 | raise FaceWarpException( 58 | 'No paddings to do, output_size must be None or {}'.format(tmp_crop_size)) 59 | 60 | # check output size 61 | if not (0 <= inner_padding_factor <= 1.0): 62 | raise FaceWarpException('Not (0 <= inner_padding_factor <= 1.0)') 63 | 64 | if ((inner_padding_factor > 0 or outer_padding[0] > 0 or outer_padding[1] > 0) 65 | and output_size is None): 66 | output_size = tmp_crop_size * \ 67 | (1 + inner_padding_factor * 2).astype(np.int32) 68 | output_size += np.array(outer_padding) 69 | print(' deduced from paddings, output_size = ', output_size) 70 | 71 | if not (outer_padding[0] < output_size[0] 72 | and outer_padding[1] < output_size[1]): 73 | raise FaceWarpException('Not (outer_padding[0] < output_size[0]' 74 | 'and outer_padding[1] < output_size[1])') 75 | 76 | # 1) pad the inner region according inner_padding_factor 77 | # print('---> STEP1: pad the inner region according inner_padding_factor') 78 | if inner_padding_factor > 0: 79 | size_diff = tmp_crop_size * inner_padding_factor * 2 80 | tmp_5pts += size_diff / 2 81 | tmp_crop_size += np.round(size_diff).astype(np.int32) 82 | 83 | # print(' crop_size = ', tmp_crop_size) 84 | # print(' reference_5pts = ', tmp_5pts) 85 | 86 | # 2) resize the padded inner region 87 | # print('---> STEP2: resize the padded inner region') 88 | size_bf_outer_pad = np.array(output_size) - np.array(outer_padding) * 2 89 | # print(' crop_size = ', tmp_crop_size) 90 | # print(' size_bf_outer_pad = ', size_bf_outer_pad) 91 | 92 | if size_bf_outer_pad[0] * tmp_crop_size[1] != size_bf_outer_pad[1] * tmp_crop_size[0]: 93 | raise FaceWarpException('Must have (output_size - outer_padding)' 94 | '= some_scale * (crop_size * (1.0 + inner_padding_factor)') 95 | 96 | scale_factor = size_bf_outer_pad[0].astype(np.float32) / tmp_crop_size[0] 97 | # print(' resize scale_factor = ', scale_factor) 98 | tmp_5pts = tmp_5pts * scale_factor 99 | # size_diff = tmp_crop_size * (scale_factor - min(scale_factor)) 100 | # tmp_5pts = tmp_5pts + size_diff / 2 101 | tmp_crop_size = size_bf_outer_pad 102 | # print(' crop_size = ', tmp_crop_size) 103 | # print(' reference_5pts = ', tmp_5pts) 104 | 105 | # 3) add outer_padding to make output_size 106 | reference_5point = tmp_5pts + np.array(outer_padding) 107 | tmp_crop_size = output_size 108 | # print('---> STEP3: add outer_padding to make output_size') 109 | # print(' crop_size = ', tmp_crop_size) 110 | # print(' reference_5pts = ', tmp_5pts) 111 | # 112 | # print('===> end get_reference_facial_points\n') 113 | 114 | return reference_5point 115 | 116 | 117 | def get_affine_transform_matrix(src_pts, dst_pts): 118 | tfm = np.float32([[1, 0, 0], [0, 1, 0]]) 119 | n_pts = src_pts.shape[0] 120 | ones = np.ones((n_pts, 1), src_pts.dtype) 121 | src_pts_ = np.hstack([src_pts, ones]) 122 | dst_pts_ = np.hstack([dst_pts, ones]) 123 | 124 | A, res, rank, s = np.linalg.lstsq(src_pts_, dst_pts_) 125 | 126 | if rank == 3: 127 | tfm = np.float32([ 128 | [A[0, 0], A[1, 0], A[2, 0]], 129 | [A[0, 1], A[1, 1], A[2, 1]] 130 | ]) 131 | elif rank == 2: 132 | tfm = np.float32([ 133 | [A[0, 0], A[1, 0], 0], 134 | [A[0, 1], A[1, 1], 0] 135 | ]) 136 | 137 | return tfm 138 | 139 | 140 | def warp_and_crop_face(src_img, # BGR 141 | facial_pts, 142 | reference_pts=None, 143 | crop_size=(96, 112), 144 | align_type='smilarity'): 145 | if reference_pts is None: 146 | if crop_size[0] == 96 and crop_size[1] == 112: 147 | reference_pts = REFERENCE_FACIAL_POINTS 148 | else: 149 | default_square = False 150 | inner_padding_factor = 0 151 | outer_padding = (0, 0) 152 | output_size = crop_size 153 | 154 | reference_pts = get_reference_facial_points(output_size, 155 | inner_padding_factor, 156 | outer_padding, 157 | default_square) 158 | 159 | ref_pts = np.float32(reference_pts) 160 | ref_pts_shp = ref_pts.shape 161 | if max(ref_pts_shp) < 3 or min(ref_pts_shp) != 2: 162 | raise FaceWarpException( 163 | 'reference_pts.shape must be (K,2) or (2,K) and K>2') 164 | 165 | if ref_pts_shp[0] == 2: 166 | ref_pts = ref_pts.T 167 | 168 | src_pts = np.float32(facial_pts) 169 | src_pts_shp = src_pts.shape 170 | if max(src_pts_shp) < 3 or min(src_pts_shp) != 2: 171 | raise FaceWarpException( 172 | 'facial_pts.shape must be (K,2) or (2,K) and K>2') 173 | 174 | if src_pts_shp[0] == 2: 175 | src_pts = src_pts.T 176 | 177 | if src_pts.shape != ref_pts.shape: 178 | raise FaceWarpException( 179 | 'facial_pts and reference_pts must have the same shape') 180 | 181 | if align_type is 'cv2_affine': 182 | tfm = cv2.getAffineTransform(src_pts[0:3], ref_pts[0:3]) 183 | # print('cv2.getAffineTransform() returns tfm=\n' + str(tfm)) 184 | elif align_type is 'affine': 185 | tfm = get_affine_transform_matrix(src_pts, ref_pts) 186 | # print('get_affine_transform_matrix() returns tfm=\n' + str(tfm)) 187 | else: 188 | # tfm = get_similarity_transform_for_cv2(src_pts, ref_pts) 189 | tform = trans.SimilarityTransform() 190 | tform.estimate(src_pts, ref_pts) 191 | tfm = tform.params[0:2, :] 192 | 193 | face_img = cv2.warpAffine(src_img, tfm, (crop_size[0], crop_size[1])) 194 | 195 | return face_img # BGR 196 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import torch 4 | 5 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # sets device for model and PyTorch tensors 6 | 7 | # Model parameters 8 | image_w = 112 9 | image_h = 112 10 | channel = 3 11 | emb_size = 128 12 | 13 | # Training parameters 14 | num_workers = 4 # for data-loading; right now, only 1 works with h5py 15 | grad_clip = 5. # clip gradients at an absolute value of 16 | print_freq = 100 # print training/validation stats every __ batches 17 | checkpoint = None # path to checkpoint, None if none 18 | 19 | # Data parameters 20 | num_classes = 85742 21 | num_samples = 5822653 22 | DATA_DIR = 'data' 23 | faces_ms1m_folder = 'data/faces_emore' 24 | path_imgidx = os.path.join(faces_ms1m_folder, 'train.idx') 25 | path_imgrec = os.path.join(faces_ms1m_folder, 'train.rec') 26 | IMG_DIR = 'data/images' 27 | pickle_file = 'data/faces_ms1m_112x112.pickle' 28 | -------------------------------------------------------------------------------- /data_gen.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | 4 | from PIL import Image 5 | from torch.utils.data import Dataset 6 | from torchvision import transforms 7 | 8 | from config import IMG_DIR, pickle_file 9 | 10 | # Data augmentation and normalization for training 11 | # Just normalization for validation 12 | data_transforms = { 13 | 'train': transforms.Compose([ 14 | transforms.RandomHorizontalFlip(), 15 | transforms.ColorJitter(brightness=0.125, contrast=0.125, saturation=0.125), 16 | transforms.ToTensor(), 17 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 18 | ]), 19 | 'val': transforms.Compose([ 20 | transforms.ToTensor(), 21 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) 22 | ]), 23 | } 24 | 25 | 26 | class ArcFaceDataset(Dataset): 27 | def __init__(self, split): 28 | with open(pickle_file, 'rb') as file: 29 | data = pickle.load(file) 30 | 31 | self.split = split 32 | self.samples = data 33 | 34 | self.transformer = data_transforms['train'] 35 | 36 | def __getitem__(self, i): 37 | sample = self.samples[i] 38 | filename = sample['img'] 39 | filename = os.path.join(IMG_DIR, filename) 40 | img = Image.open(filename).convert('RGB') 41 | img = self.transformer(img) 42 | 43 | label = sample['label'] 44 | 45 | return img, label 46 | 47 | def __len__(self): 48 | return len(self.samples) 49 | -------------------------------------------------------------------------------- /demo.py: -------------------------------------------------------------------------------- 1 | import cv2 as cv 2 | import numpy as np 3 | import torch 4 | from torchvision import transforms 5 | 6 | data_transforms = { 7 | 'train': transforms.Compose([ 8 | transforms.RandomHorizontalFlip(), 9 | transforms.ToTensor(), 10 | transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]) 11 | ]), 12 | 'val': transforms.Compose([ 13 | transforms.ToTensor(), 14 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) 15 | ]), 16 | } 17 | transformer = data_transforms['train'] 18 | 19 | if __name__ == "__main__": 20 | img = cv.imread('images/0_fn_0.jpg') 21 | img = transforms.ToPILImage()(img) 22 | arr = np.array(img) 23 | print(arr) 24 | print(np.max(arr)) 25 | print(np.min(arr)) 26 | print(np.mean(arr)) 27 | print(np.std(arr)) 28 | 29 | arr = arr.astype(np.float) 30 | arr = (arr - 127.5) / 128 31 | print(arr) 32 | print(np.max(arr)) 33 | print(np.min(arr)) 34 | print(np.mean(arr)) 35 | print(np.std(arr)) 36 | 37 | img = transformer(img) 38 | print(img) 39 | print(torch.max(img)) 40 | print(torch.min(img)) 41 | print(torch.mean(img)) 42 | print(torch.std(img)) 43 | -------------------------------------------------------------------------------- /export.py: -------------------------------------------------------------------------------- 1 | import time 2 | from mobilefacenet import MobileFaceNet 3 | import torch 4 | 5 | if __name__ == '__main__': 6 | checkpoint = 'BEST_checkpoint.tar' 7 | print('loading {}...'.format(checkpoint)) 8 | start = time.time() 9 | checkpoint = torch.load(checkpoint) 10 | print('elapsed {} sec'.format(time.time() - start)) 11 | model = checkpoint['model'].module 12 | print(type(model)) 13 | 14 | filename = 'mobilefacenet.pt' 15 | print('saving {}...'.format(filename)) 16 | start = time.time() 17 | torch.save(model.state_dict(), filename) 18 | print('elapsed {} sec'.format(time.time() - start)) 19 | 20 | print('loading {}...'.format(filename)) 21 | start = time.time() 22 | model = MobileFaceNet() 23 | model.load_state_dict(torch.load(filename)) 24 | print('elapsed {} sec'.format(time.time() - start)) 25 | 26 | scripted_model_file = 'mobilefacenet_scripted.pt' 27 | print('saving {}...'.format(scripted_model_file)) 28 | torch.jit.save(torch.jit.script(model), scripted_model_file) 29 | -------------------------------------------------------------------------------- /extract.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import zipfile 4 | 5 | 6 | def extract(filename): 7 | print('Extracting {}...'.format(filename)) 8 | zip_ref = zipfile.ZipFile(filename, 'r') 9 | zip_ref.extractall('data') 10 | zip_ref.close() 11 | 12 | 13 | if __name__ == "__main__": 14 | if not os.path.isdir('data/faces_emore'): 15 | extract('data/faces_emore.zip') 16 | -------------------------------------------------------------------------------- /focal_loss.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | 5 | class FocalLoss(nn.Module): 6 | 7 | def __init__(self, gamma=0): 8 | super(FocalLoss, self).__init__() 9 | self.gamma = gamma 10 | self.ce = torch.nn.CrossEntropyLoss() 11 | 12 | def forward(self, input, target): 13 | logp = self.ce(input, target) 14 | p = torch.exp(-logp) 15 | loss = (1 - p) ** self.gamma * logp 16 | return loss.mean() 17 | 18 | # class FocalLoss(nn.Module): 19 | # def __init__(self, gamma=0, size_average=True): 20 | # super(FocalLoss, self).__init__() 21 | # self.gamma = gamma 22 | # 23 | # def forward(self, input, target): 24 | # if input.dim() > 2: 25 | # input = input.view(input.size(0), input.size(1), -1) # N,C,H,W => N,C,H*W 26 | # input = input.transpose(1, 2) # N,C,H*W => N,H*W,C 27 | # input = input.contiguous().view(-1, input.size(2)) # N,H*W,C => N*H*W,C 28 | # target = target.view(-1, 1) 29 | # 30 | # logpt = F.log_softmax(input) 31 | # logpt = logpt.gather(1, target) 32 | # logpt = logpt.view(-1) 33 | # pt = Variable(logpt.data.exp()) 34 | # 35 | # loss = -1 * (1 - pt) ** self.gamma * logpt 36 | # return loss.mean() 37 | -------------------------------------------------------------------------------- /image_aug.py: -------------------------------------------------------------------------------- 1 | import cv2 as cv 2 | 3 | from utils import image_aug, get_central_face_attributes, align_face 4 | 5 | if __name__ == "__main__": 6 | filename = 'data/lfw_funneled/Aaron_Eckhart/Aaron_Eckhart_0001.jpg' 7 | print(filename) 8 | img = cv.imread(filename) # BGR 9 | cv.imwrite('1.jpg', img) 10 | 11 | is_valid, bounding_boxes, landmarks = get_central_face_attributes(filename) 12 | img = align_face(filename, landmarks) 13 | cv.imwrite('2.jpg', img) 14 | 15 | img = image_aug(img) # RGB 16 | cv.imwrite('3.jpg', img) 17 | 18 | img = cv.cvtColor(img, cv.COLOR_BGR2GRAY) 19 | cv.imwrite('4.jpg', img) 20 | -------------------------------------------------------------------------------- /images/theta_dist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/foamliu/MobileFaceNet-PyTorch/2c720d6875488e94f4d4eb870936cb05613b74d5/images/theta_dist.png -------------------------------------------------------------------------------- /lfw_eval.py: -------------------------------------------------------------------------------- 1 | import math 2 | import os 3 | import pickle 4 | import tarfile 5 | import time 6 | 7 | import cv2 as cv 8 | import numpy as np 9 | import scipy.stats 10 | import torch 11 | from PIL import Image 12 | from matplotlib import pyplot as plt 13 | from tqdm import tqdm 14 | 15 | from config import device 16 | from data_gen import data_transforms 17 | from utils import align_face, get_central_face_attributes, get_all_face_attributes, draw_bboxes, ensure_folder 18 | 19 | angles_file = 'data/angles.txt' 20 | lfw_pickle = 'data/lfw_funneled.pkl' 21 | transformer = data_transforms['val'] 22 | 23 | 24 | def extract(filename): 25 | with tarfile.open(filename, 'r') as tar: 26 | tar.extractall('data') 27 | 28 | 29 | def process(): 30 | subjects = [d for d in os.listdir('data/lfw_funneled') if os.path.isdir(os.path.join('data/lfw_funneled', d))] 31 | assert (len(subjects) == 5749), "Number of subjects is: {}!".format(len(subjects)) 32 | 33 | print('Collecting file names...') 34 | file_names = [] 35 | for i in tqdm(range(len(subjects))): 36 | sub = subjects[i] 37 | folder = os.path.join('data/lfw_funneled', sub) 38 | files = [f for f in os.listdir(folder) if 39 | os.path.isfile(os.path.join(folder, f)) and f.lower().endswith('.jpg')] 40 | for file in files: 41 | filename = os.path.join(folder, file) 42 | file_names.append({'filename': filename, 'class_id': i, 'subject': sub}) 43 | 44 | assert (len(file_names) == 13233), "Number of files is: {}!".format(len(file_names)) 45 | 46 | print('Aligning faces...') 47 | samples = [] 48 | for item in tqdm(file_names): 49 | filename = item['filename'] 50 | class_id = item['class_id'] 51 | sub = item['subject'] 52 | is_valid, bounding_boxes, landmarks = get_central_face_attributes(filename) 53 | 54 | if is_valid: 55 | samples.append( 56 | {'class_id': class_id, 'subject': sub, 'full_path': filename, 'bounding_boxes': bounding_boxes, 57 | 'landmarks': landmarks}) 58 | 59 | with open(lfw_pickle, 'wb') as file: 60 | save = { 61 | 'samples': samples 62 | } 63 | pickle.dump(save, file, pickle.HIGHEST_PROTOCOL) 64 | 65 | 66 | def get_image(samples, file): 67 | filtered = [sample for sample in samples if file in sample['full_path'].replace('\\', '/')] 68 | assert (len(filtered) == 1), 'len(filtered): {} file:{}'.format(len(filtered), file) 69 | sample = filtered[0] 70 | full_path = sample['full_path'] 71 | landmarks = sample['landmarks'] 72 | img = align_face(full_path, landmarks) # BGR 73 | return img 74 | 75 | 76 | def transform(img, flip=False): 77 | if flip: 78 | img = cv.flip(img, 1) 79 | img = img[..., ::-1] # RGB 80 | img = Image.fromarray(img, 'RGB') # RGB 81 | img = transformer(img) 82 | img = img.to(device) 83 | return img 84 | 85 | 86 | def get_feature(model, samples, file): 87 | imgs = torch.zeros([2, 3, 112, 112], dtype=torch.float, device=device) 88 | img = get_image(samples, file) 89 | imgs[0] = transform(img.copy(), False) 90 | imgs[1] = transform(img.copy(), True) 91 | with torch.no_grad(): 92 | output = model(imgs) 93 | feature_0 = output[0].cpu().numpy() 94 | feature_1 = output[1].cpu().numpy() 95 | feature = feature_0 + feature_1 96 | return feature / np.linalg.norm(feature) 97 | 98 | 99 | def evaluate(model): 100 | model.eval() 101 | 102 | with open(lfw_pickle, 'rb') as file: 103 | data = pickle.load(file) 104 | 105 | samples = data['samples'] 106 | 107 | filename = 'data/lfw_test_pair.txt' 108 | with open(filename, 'r') as file: 109 | lines = file.readlines() 110 | 111 | angles = [] 112 | 113 | elapsed = 0 114 | 115 | for line in tqdm(lines): 116 | tokens = line.split() 117 | 118 | start = time.time() 119 | x0 = get_feature(model, samples, tokens[0]) 120 | x1 = get_feature(model, samples, tokens[1]) 121 | end = time.time() 122 | elapsed += end - start 123 | 124 | cosine = np.dot(x0, x1) 125 | cosine = np.clip(cosine, -1.0, 1.0) 126 | theta = math.acos(cosine) 127 | theta = theta * 180 / math.pi 128 | is_same = tokens[2] 129 | angles.append('{} {}\n'.format(theta, is_same)) 130 | 131 | print('elapsed: {} ms'.format(elapsed / (6000 * 2) * 1000)) 132 | 133 | with open('data/angles.txt', 'w') as file: 134 | file.writelines(angles) 135 | 136 | 137 | def visualize(threshold): 138 | with open(angles_file) as file: 139 | lines = file.readlines() 140 | 141 | ones = [] 142 | zeros = [] 143 | 144 | for line in lines: 145 | tokens = line.split() 146 | angle = float(tokens[0]) 147 | type = int(tokens[1]) 148 | if type == 1: 149 | ones.append(angle) 150 | else: 151 | zeros.append(angle) 152 | 153 | bins = np.linspace(0, 180, 181) 154 | 155 | plt.hist(zeros, bins, density=True, alpha=0.5, label='0', facecolor='red') 156 | plt.hist(ones, bins, density=True, alpha=0.5, label='1', facecolor='blue') 157 | 158 | mu_0 = np.mean(zeros) 159 | sigma_0 = np.std(zeros) 160 | y_0 = scipy.stats.norm.pdf(bins, mu_0, sigma_0) 161 | plt.plot(bins, y_0, 'r--') 162 | mu_1 = np.mean(ones) 163 | sigma_1 = np.std(ones) 164 | y_1 = scipy.stats.norm.pdf(bins, mu_1, sigma_1) 165 | plt.plot(bins, y_1, 'b--') 166 | plt.xlabel('theta') 167 | plt.ylabel('theta j Distribution') 168 | plt.title( 169 | r'Histogram : mu_0={:.4f},sigma_0={:.4f}, mu_1={:.4f},sigma_1={:.4f}'.format(mu_0, sigma_0, mu_1, sigma_1)) 170 | 171 | print('threshold: ' + str(threshold)) 172 | print('mu_0: ' + str(mu_0)) 173 | print('sigma_0: ' + str(sigma_0)) 174 | print('mu_1: ' + str(mu_1)) 175 | print('sigma_1: ' + str(sigma_1)) 176 | 177 | plt.legend(loc='upper right') 178 | plt.plot([threshold, threshold], [0, 0.05], 'k-', lw=2) 179 | ensure_folder('images') 180 | plt.savefig('images/theta_dist.png') 181 | # plt.show() 182 | 183 | 184 | def accuracy(threshold): 185 | with open(angles_file) as file: 186 | lines = file.readlines() 187 | 188 | wrong = 0 189 | for line in lines: 190 | tokens = line.split() 191 | angle = float(tokens[0]) 192 | type = int(tokens[1]) 193 | if type == 1: 194 | if angle > threshold: 195 | wrong += 1 196 | else: 197 | if angle <= threshold: 198 | wrong += 1 199 | 200 | accuracy = 1 - wrong / 6000 201 | return accuracy 202 | 203 | 204 | def show_bboxes(folder): 205 | with open(lfw_pickle, 'rb') as file: 206 | data = pickle.load(file) 207 | 208 | samples = data['samples'] 209 | for sample in tqdm(samples): 210 | full_path = sample['full_path'] 211 | bounding_boxes = sample['bounding_boxes'] 212 | landmarks = sample['landmarks'] 213 | img = cv.imread(full_path) 214 | img = draw_bboxes(img, bounding_boxes, landmarks) 215 | filename = os.path.basename(full_path) 216 | filename = os.path.join(folder, filename) 217 | cv.imwrite(filename, img) 218 | 219 | 220 | def error_analysis(threshold): 221 | with open(angles_file) as file: 222 | angle_lines = file.readlines() 223 | 224 | fp = [] 225 | fn = [] 226 | for i, line in enumerate(angle_lines): 227 | tokens = line.split() 228 | angle = float(tokens[0]) 229 | type = int(tokens[1]) 230 | if angle <= threshold and type == 0: 231 | fp.append(i) 232 | if angle > threshold and type == 1: 233 | fn.append(i) 234 | 235 | print('len(fp): ' + str(len(fp))) 236 | print('len(fn): ' + str(len(fn))) 237 | 238 | num_fp = len(fp) 239 | num_fn = len(fn) 240 | 241 | filename = 'data/lfw_test_pair.txt' 242 | with open(filename, 'r') as file: 243 | pair_lines = file.readlines() 244 | 245 | for i in range(num_fp): 246 | fp_id = fp[i] 247 | fp_line = pair_lines[fp_id] 248 | tokens = fp_line.split() 249 | file0 = tokens[0] 250 | copy_file(file0, '{}_fp_0.jpg'.format(i)) 251 | save_aligned(file0, '{}_fp_0_aligned.jpg'.format(i)) 252 | file1 = tokens[1] 253 | copy_file(file1, '{}_fp_1.jpg'.format(i)) 254 | save_aligned(file1, '{}_fp_1_aligned.jpg'.format(i)) 255 | 256 | for i in range(num_fn): 257 | fn_id = fn[i] 258 | fn_line = pair_lines[fn_id] 259 | tokens = fn_line.split() 260 | file0 = tokens[0] 261 | copy_file(file0, '{}_fn_0.jpg'.format(i)) 262 | save_aligned(file0, '{}_fn_0_aligned.jpg'.format(i)) 263 | file1 = tokens[1] 264 | copy_file(file1, '{}_fn_1.jpg'.format(i)) 265 | save_aligned(file1, '{}_fn_1_aligned.jpg'.format(i)) 266 | 267 | 268 | def save_aligned(old_fn, new_fn): 269 | old_fn = os.path.join('data/lfw_funneled', old_fn) 270 | is_valid, bounding_boxes, landmarks = get_central_face_attributes(old_fn) 271 | img = align_face(old_fn, landmarks) 272 | new_fn = os.path.join('images', new_fn) 273 | cv.imwrite(new_fn, img) 274 | 275 | 276 | def copy_file(old, new): 277 | old_fn = os.path.join('data/lfw_funneled', old) 278 | img = cv.imread(old_fn) 279 | bounding_boxes, landmarks = get_all_face_attributes(old_fn) 280 | draw_bboxes(img, bounding_boxes, landmarks) 281 | cv.resize(img, (224, 224)) 282 | new_fn = os.path.join('images', new) 283 | cv.imwrite(new_fn, img) 284 | 285 | 286 | def get_threshold(): 287 | with open(angles_file, 'r') as file: 288 | lines = file.readlines() 289 | 290 | data = [] 291 | 292 | for line in lines: 293 | tokens = line.split() 294 | angle = float(tokens[0]) 295 | type = int(tokens[1]) 296 | data.append({'angle': angle, 'type': type}) 297 | 298 | min_error = 6000 299 | min_threshold = 0 300 | 301 | for d in data: 302 | threshold = d['angle'] 303 | type1 = len([s for s in data if s['angle'] <= threshold and s['type'] == 0]) 304 | type2 = len([s for s in data if s['angle'] > threshold and s['type'] == 1]) 305 | num_errors = type1 + type2 306 | if num_errors < min_error: 307 | min_error = num_errors 308 | min_threshold = threshold 309 | 310 | # print(min_error, min_threshold) 311 | return min_threshold 312 | 313 | 314 | def lfw_test(model): 315 | filename = 'data/lfw-funneled.tgz' 316 | if not os.path.isdir('data/lfw_funneled'): 317 | print('Extracting {}...'.format(filename)) 318 | extract(filename) 319 | 320 | # if not os.path.isfile(lfw_pickle): 321 | print('Processing {}...'.format(lfw_pickle)) 322 | process() 323 | 324 | # if not os.path.isfile(angles_file): 325 | print('Evaluating {}...'.format(angles_file)) 326 | evaluate(model) 327 | 328 | print('Calculating threshold...') 329 | # threshold = 70.36 330 | thres = get_threshold() 331 | print('Calculating accuracy...') 332 | acc = accuracy(thres) 333 | print('Accuracy: {}%, threshold: {}'.format(acc * 100, thres)) 334 | return acc, thres 335 | 336 | 337 | if __name__ == "__main__": 338 | # checkpoint = 'BEST_checkpoint.tar' 339 | # checkpoint = torch.load(checkpoint) 340 | # model = checkpoint['model'].module 341 | # model = model.to(device) 342 | # model.eval() 343 | 344 | scripted_model_file = 'mobilefacenet_scripted.pt' 345 | model = torch.jit.load(scripted_model_file) 346 | model = model.to(device) 347 | model.eval() 348 | 349 | acc, threshold = lfw_test(model) 350 | 351 | print('Visualizing {}...'.format(angles_file)) 352 | visualize(threshold) 353 | 354 | print('error analysis...') 355 | error_analysis(threshold) 356 | -------------------------------------------------------------------------------- /megaface.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import os 4 | import struct 5 | 6 | import cv2 as cv 7 | import numpy as np 8 | import torch 9 | import tqdm 10 | from PIL import Image 11 | from tqdm import tqdm 12 | 13 | from config import device 14 | from data_gen import data_transforms 15 | from utils import align_face, get_central_face_attributes 16 | 17 | checkpoint = 'BEST_checkpoint.tar' 18 | print('loading model: {}...'.format(checkpoint)) 19 | checkpoint = torch.load(checkpoint) 20 | model = checkpoint['model'].to(device) 21 | model.eval() 22 | transformer = data_transforms['val'] 23 | 24 | 25 | def walkdir(folder, ext): 26 | # Walk through each files in a directory 27 | for dirpath, dirs, files in os.walk(folder): 28 | for filename in [f for f in files if f.lower().endswith(ext)]: 29 | yield os.path.abspath(os.path.join(dirpath, filename)) 30 | 31 | 32 | def crop_one_image(filepath, oldkey, newkey): 33 | new_fn = filepath.replace(oldkey, newkey) 34 | tardir = os.path.dirname(new_fn) 35 | if not os.path.isdir(tardir): 36 | os.makedirs(tardir) 37 | 38 | if not os.path.exists(new_fn): 39 | is_valid, bounding_boxes, landmarks = get_central_face_attributes(filepath) 40 | if is_valid: 41 | img = align_face(filepath, landmarks) 42 | cv.imwrite(new_fn, img) 43 | 44 | 45 | def crop(path, oldkey, newkey): 46 | print('Counting images under {}...'.format(path)) 47 | # Preprocess the total files count 48 | filecounter = 0 49 | for filepath in walkdir(path, '.jpg'): 50 | filecounter += 1 51 | 52 | for filepath in tqdm(walkdir(path, '.jpg'), total=filecounter, unit="files"): 53 | crop_one_image(filepath, oldkey, newkey) 54 | 55 | print('{} images were cropped successfully.'.format(filecounter)) 56 | 57 | 58 | def gen_feature(path): 59 | print('gen features {}...'.format(path)) 60 | # Preprocess the total files count 61 | files = [] 62 | for filepath in walkdir(path, '.jpg'): 63 | files.append(filepath) 64 | file_count = len(files) 65 | 66 | batch_size = 128 67 | 68 | with torch.no_grad(): 69 | for start_idx in tqdm(range(0, file_count, batch_size)): 70 | end_idx = min(file_count, start_idx + batch_size) 71 | length = end_idx - start_idx 72 | 73 | imgs = torch.zeros([length, 3, 112, 112], dtype=torch.float) 74 | for idx in range(0, length): 75 | i = start_idx + idx 76 | filepath = files[i] 77 | imgs[idx] = get_image(cv.imread(filepath, True), transformer) 78 | 79 | features = model(imgs.to(device)).cpu().numpy() 80 | for idx in range(0, length): 81 | i = start_idx + idx 82 | filepath = files[i] 83 | tarfile = filepath + '_0.bin' 84 | feature = features[idx] 85 | write_feature(tarfile, feature / np.linalg.norm(feature)) 86 | 87 | 88 | def get_image(img, transformer): 89 | img = img[..., ::-1] # RGB 90 | img = Image.fromarray(img, 'RGB') # RGB 91 | img = transformer(img) 92 | return img.to(device) 93 | 94 | 95 | def read_feature(filename): 96 | f = open(filename, 'rb') 97 | rows, cols, stride, type_ = struct.unpack('iiii', f.read(4 * 4)) 98 | mat = np.fromstring(f.read(rows * 4), dtype=np.dtype('float32')) 99 | return mat.reshape(rows, 1) 100 | 101 | 102 | def write_feature(filename, m): 103 | header = struct.pack('iiii', m.shape[0], 1, 4, 5) 104 | f = open(filename, 'wb') 105 | f.write(header) 106 | f.write(m.data) 107 | 108 | 109 | def remove_noise(): 110 | for line in open('megaface/megaface_noises.txt', 'r'): 111 | filename = 'megaface/MegaFace_aligned/FlickrFinal2/' + line.strip() + '_0.bin' 112 | if os.path.exists(filename): 113 | print(filename) 114 | os.remove(filename) 115 | 116 | noise = set() 117 | for line in open('megaface/facescrub_noises.txt', 'r'): 118 | noise.add((line.strip().replace('png', 'jpg') + '0.bin').replace('_', '').replace(' ', '')) 119 | for root, dirs, files in os.walk('megaface/facescrub_images'): 120 | for f in files: 121 | if f.replace('_', '').replace(' ', '') in noise: 122 | filename = os.path.join(root, f) 123 | if os.path.exists(filename): 124 | print(filename) 125 | os.remove(filename) 126 | 127 | 128 | def test(): 129 | root1 = '/root/lin/data/FaceScrub_aligned/Benicio Del Toro' 130 | root2 = '/root/lin/data/FaceScrub_aligned/Ben Kingsley' 131 | for f1 in os.listdir(root1): 132 | for f2 in os.listdir(root2): 133 | if f1.lower().endswith('.bin') and f2.lower().endswith('.bin'): 134 | filename1 = os.path.join(root1, f1) 135 | filename2 = os.path.join(root2, f2) 136 | fea1 = read_feature(filename1) 137 | fea2 = read_feature(filename2) 138 | print(((fea1 - fea2) ** 2).sum() ** 0.5) 139 | 140 | 141 | def match_result(): 142 | with open('matches_facescrub_megaface_0_1000000_1.json', 'r') as load_f: 143 | load_dict = json.load(load_f) 144 | print(load_dict) 145 | for i in range(len(load_dict)): 146 | print(load_dict[i]['probes']) 147 | 148 | 149 | def pngtojpg(path): 150 | for root, dirs, files in os.walk(path): 151 | for f in files: 152 | if os.path.splitext(f)[1] == '.png': 153 | img = cv.imread(os.path.join(root, f)) 154 | newfilename = f.replace(".png", ".jpg") 155 | cv.imwrite(os.path.join(root, newfilename), img) 156 | 157 | 158 | def parse_args(): 159 | parser = argparse.ArgumentParser(description='Train face network') 160 | # general 161 | parser.add_argument('--action', default='crop_megaface', help='action') 162 | args = parser.parse_args() 163 | return args 164 | 165 | 166 | if __name__ == '__main__': 167 | args = parse_args() 168 | if args.action == 'crop_megaface': 169 | crop('megaface/MegaFace/FlickrFinal2', 'MegaFace', 'MegaFace_aligned') 170 | elif args.action == 'crop_facescrub': 171 | crop('megaface/facescrub_images', 'facescrub', 'facescrub_aligned') 172 | elif args.action == 'gen_features': 173 | gen_feature('megaface/facescrub_images') 174 | gen_feature('megaface/MegaFace_aligned/FlickrFinal2') 175 | remove_noise() 176 | elif args.action == 'pngtojpg': 177 | pngtojpg('megaface/facescrub_images') 178 | -------------------------------------------------------------------------------- /megaface_eval.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | import torch 4 | 5 | from config import device 6 | from megaface_utils import gen_feature, remove_noise 7 | 8 | 9 | # from torch import nn 10 | 11 | 12 | def megaface_test(model): 13 | cmd = 'find megaface/FaceScrub_aligned -name "*.bin" -type f -delete' 14 | print(cmd) 15 | output = subprocess.check_output(cmd, shell=True).decode("utf-8") 16 | print(output) 17 | 18 | cmd = 'find megaface/MegaFace_aligned/FlickrFinal2 -name "*.bin" -type f -delete' 19 | print(cmd) 20 | output = subprocess.check_output(cmd, shell=True).decode("utf-8") 21 | print(output) 22 | 23 | gen_feature('megaface/FaceScrub_aligned', model) 24 | gen_feature('megaface/MegaFace_aligned/FlickrFinal2', model) 25 | remove_noise() 26 | 27 | cmd = 'python megaface/devkit/experiments/run_experiment.py -p megaface/devkit/templatelists/facescrub_uncropped_features_list.json megaface/MegaFace_aligned/FlickrFinal2 megaface/FaceScrub_aligned _0.bin results -s 1000000' 28 | # print(cmd) 29 | output = subprocess.check_output(cmd, shell=True).decode("utf-8") 30 | # print(output) 31 | 32 | lines = output.split('\n') 33 | line = [l for l in lines if l.startswith('Rank 1: ')][0] 34 | accuracy = float(line[8:]) 35 | 36 | print('Megaface accuracy: ' + str(accuracy)) 37 | 38 | return accuracy 39 | 40 | 41 | if __name__ == '__main__': 42 | checkpoint = 'BEST_checkpoint.tar' 43 | print('loading model: {}...'.format(checkpoint)) 44 | checkpoint = torch.load(checkpoint) 45 | model = checkpoint['model'].module.to(device) 46 | model.eval() 47 | 48 | megaface_test(model) 49 | -------------------------------------------------------------------------------- /megaface_utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import os 4 | import struct 5 | 6 | import cv2 as cv 7 | import numpy as np 8 | import torch 9 | import tqdm 10 | from PIL import Image, ImageOps 11 | from tqdm import tqdm 12 | 13 | from config import device 14 | from data_gen import data_transforms 15 | from utils import align_face, get_central_face_attributes 16 | 17 | 18 | def walkdir(folder, ext): 19 | # Walk through each files in a directory 20 | for dirpath, dirs, files in os.walk(folder): 21 | for filename in [f for f in files if f.lower().endswith(ext)]: 22 | yield os.path.abspath(os.path.join(dirpath, filename)) 23 | 24 | 25 | def crop_one_image(filepath, oldkey, newkey): 26 | new_fn = filepath.replace(oldkey, newkey) 27 | tardir = os.path.dirname(new_fn) 28 | if not os.path.isdir(tardir): 29 | os.makedirs(tardir) 30 | 31 | if not os.path.exists(new_fn): 32 | is_valid, bounding_boxes, landmarks = get_central_face_attributes(filepath) 33 | if is_valid: 34 | img = align_face(filepath, landmarks) 35 | cv.imwrite(new_fn, img) 36 | 37 | 38 | def crop(path, oldkey, newkey): 39 | print('Counting images under {}...'.format(path)) 40 | # Preprocess the total files count 41 | filecounter = 0 42 | for filepath in walkdir(path, '.jpg'): 43 | filecounter += 1 44 | 45 | for filepath in tqdm(walkdir(path, '.jpg'), total=filecounter, unit="files"): 46 | crop_one_image(filepath, oldkey, newkey) 47 | 48 | print('{} images were cropped successfully.'.format(filecounter)) 49 | 50 | 51 | def get_image(transformer, filepath, flip=False): 52 | img = Image.open(filepath) 53 | if flip: 54 | img = ImageOps.flip(img) 55 | img = transformer(img) 56 | return img.to(device) 57 | 58 | 59 | def gen_feature(path, model): 60 | model.eval() 61 | 62 | print('gen features {}...'.format(path)) 63 | # Preprocess the total files count 64 | files = [] 65 | for filepath in walkdir(path, ('.jpg', '.png')): 66 | files.append(filepath) 67 | file_count = len(files) 68 | 69 | transformer = data_transforms['val'] 70 | 71 | batch_size = 128 72 | 73 | with torch.no_grad(): 74 | for start_idx in tqdm(range(0, file_count, batch_size)): 75 | end_idx = min(file_count, start_idx + batch_size) 76 | length = end_idx - start_idx 77 | 78 | imgs_0 = torch.zeros([length, 3, 112, 112], dtype=torch.float, device=device) 79 | for idx in range(0, length): 80 | i = start_idx + idx 81 | filepath = files[i] 82 | imgs_0[idx] = get_image(transformer, filepath, flip=False) 83 | 84 | features_0 = model(imgs_0.to(device)) 85 | features_0 = features_0.cpu().numpy() 86 | 87 | imgs_1 = torch.zeros([length, 3, 112, 112], dtype=torch.float, device=device) 88 | for idx in range(0, length): 89 | i = start_idx + idx 90 | filepath = files[i] 91 | imgs_1[idx] = get_image(transformer, filepath, flip=True) 92 | 93 | features_1 = model(imgs_1.to(device)) 94 | features_1 = features_1.cpu().numpy() 95 | 96 | for idx in range(0, length): 97 | i = start_idx + idx 98 | filepath = files[i] 99 | filepath = filepath.replace(' ', '_') 100 | tarfile = filepath + '_0.bin' 101 | feature = features_0[idx] + features_1[idx] 102 | write_feature(tarfile, feature / np.linalg.norm(feature)) 103 | 104 | 105 | def read_feature(filename): 106 | f = open(filename, 'rb') 107 | rows, cols, stride, type_ = struct.unpack('iiii', f.read(4 * 4)) 108 | mat = np.fromstring(f.read(rows * 4), dtype=np.dtype('float32')) 109 | return mat.reshape(rows, 1) 110 | 111 | 112 | def write_feature(filename, m): 113 | header = struct.pack('iiii', m.shape[0], 1, 4, 5) 114 | f = open(filename, 'wb') 115 | f.write(header) 116 | f.write(m.data) 117 | 118 | 119 | def remove_noise(): 120 | megaface_count = 0 121 | for line in open('megaface/megaface_noises.txt', 'r'): 122 | filename = 'megaface/MegaFace_aligned/FlickrFinal2/' + line.strip() + '_0.bin' 123 | if os.path.exists(filename): 124 | # print(filename) 125 | os.remove(filename) 126 | megaface_count += 1 127 | 128 | print('remove noise - megaface: ' + str(megaface_count)) 129 | 130 | facescrub_count = 0 131 | noise = set() 132 | for line in open('megaface/facescrub_noises.txt', 'r'): 133 | noise.add((line.strip().replace('.png', '.jpg') + '_0.bin')) 134 | 135 | for root, dirs, files in os.walk('megaface/FaceScrub_aligned'): 136 | for f in files: 137 | # print(f) 138 | if f in noise: 139 | filename = os.path.join(root, f) 140 | if os.path.exists(filename): 141 | # print(filename) 142 | os.remove(filename) 143 | facescrub_count += 1 144 | 145 | print('remove noise - facescrub: ' + str(facescrub_count)) 146 | 147 | 148 | def test(): 149 | root1 = '/root/lin/data/FaceScrub_aligned/Benicio Del Toro' 150 | root2 = '/root/lin/data/FaceScrub_aligned/Ben Kingsley' 151 | for f1 in os.listdir(root1): 152 | for f2 in os.listdir(root2): 153 | if f1.lower().endswith('.bin') and f2.lower().endswith('.bin'): 154 | filename1 = os.path.join(root1, f1) 155 | filename2 = os.path.join(root2, f2) 156 | fea1 = read_feature(filename1) 157 | fea2 = read_feature(filename2) 158 | print(((fea1 - fea2) ** 2).sum() ** 0.5) 159 | 160 | 161 | def match_result(): 162 | with open('matches_facescrub_megaface_0_1000000_1.json', 'r') as load_f: 163 | load_dict = json.load(load_f) 164 | print(load_dict) 165 | for i in range(len(load_dict)): 166 | print(load_dict[i]['probes']) 167 | 168 | 169 | def pngtojpg(path): 170 | for root, dirs, files in os.walk(path): 171 | for f in files: 172 | if os.path.splitext(f)[1] == '.png': 173 | img = cv.imread(os.path.join(root, f)) 174 | newfilename = f.replace(".png", ".jpg") 175 | cv.imwrite(os.path.join(root, newfilename), img) 176 | 177 | 178 | def parse_args(): 179 | parser = argparse.ArgumentParser(description='Train face network') 180 | # general 181 | parser.add_argument('--action', default='crop_megaface', help='action') 182 | args = parser.parse_args() 183 | return args 184 | 185 | 186 | if __name__ == '__main__': 187 | args = parse_args() 188 | if args.action == 'crop_megaface': 189 | crop('megaface/MegaFace/FlickrFinal2', 'MegaFace', 'MegaFace_aligned') 190 | elif args.action == 'crop_facescrub': 191 | crop('megaface/facescrub_images', 'facescrub', 'facescrub_aligned') 192 | elif args.action == 'gen_features': 193 | gen_feature('megaface/facescrub_images') 194 | gen_feature('megaface/MegaFace_aligned/FlickrFinal2') 195 | remove_noise() 196 | elif args.action == 'pngtojpg': 197 | pngtojpg('megaface/facescrub_images') 198 | elif args.action == 'remove_noise': 199 | remove_noise() 200 | -------------------------------------------------------------------------------- /mobilefacenet.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | import torch 4 | import torch.nn.functional as F 5 | from torch import nn 6 | from torch.nn import Parameter 7 | 8 | from config import device, num_classes, emb_size 9 | 10 | 11 | def _make_divisible(v, divisor, min_value=None): 12 | """ 13 | This function is taken from the original tf repo. 14 | It ensures that all layers have a channel number that is divisible by 8 15 | It can be seen here: 16 | https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py 17 | :param v: 18 | :param divisor: 19 | :param min_value: 20 | :return: 21 | """ 22 | if min_value is None: 23 | min_value = divisor 24 | new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) 25 | # Make sure that round down does not go down by more than 10%. 26 | if new_v < 0.9 * v: 27 | new_v += divisor 28 | return new_v 29 | 30 | 31 | class ConvBNReLU(nn.Sequential): 32 | def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1): 33 | padding = (kernel_size - 1) // 2 34 | super(ConvBNReLU, self).__init__( 35 | nn.Conv2d(in_planes, out_planes, kernel_size, stride, padding, groups=groups, bias=False), 36 | nn.BatchNorm2d(out_planes), 37 | nn.ReLU6(inplace=True) 38 | ) 39 | 40 | 41 | class DepthwiseSeparableConv(nn.Module): 42 | def __init__(self, in_planes, out_planes, kernel_size, padding, bias=False): 43 | super(DepthwiseSeparableConv, self).__init__() 44 | self.depthwise = nn.Conv2d(in_planes, in_planes, kernel_size=kernel_size, padding=padding, groups=in_planes, 45 | bias=bias) 46 | self.pointwise = nn.Conv2d(in_planes, out_planes, kernel_size=1, bias=bias) 47 | self.bn1 = nn.BatchNorm2d(in_planes) 48 | self.bn2 = nn.BatchNorm2d(out_planes) 49 | self.relu = nn.ReLU() 50 | 51 | def forward(self, x): 52 | x = self.depthwise(x) 53 | x = self.bn1(x) 54 | x = self.relu(x) 55 | 56 | x = self.pointwise(x) 57 | x = self.bn2(x) 58 | x = self.relu(x) 59 | return x 60 | 61 | 62 | class GDConv(nn.Module): 63 | def __init__(self, in_planes, out_planes, kernel_size, padding, bias=False): 64 | super(GDConv, self).__init__() 65 | self.depthwise = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, padding=padding, groups=in_planes, 66 | bias=bias) 67 | self.bn = nn.BatchNorm2d(in_planes) 68 | 69 | def forward(self, x): 70 | x = self.depthwise(x) 71 | x = self.bn(x) 72 | return x 73 | 74 | 75 | class InvertedResidual(nn.Module): 76 | def __init__(self, inp, oup, stride, expand_ratio): 77 | super(InvertedResidual, self).__init__() 78 | self.stride = stride 79 | assert stride in [1, 2] 80 | 81 | hidden_dim = int(round(inp * expand_ratio)) 82 | self.use_res_connect = self.stride == 1 and inp == oup 83 | 84 | layers = [] 85 | if expand_ratio != 1: 86 | # pw 87 | layers.append(ConvBNReLU(inp, hidden_dim, kernel_size=1)) 88 | layers.extend([ 89 | # dw 90 | ConvBNReLU(hidden_dim, hidden_dim, stride=stride, groups=hidden_dim), 91 | # pw-linear 92 | nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), 93 | nn.BatchNorm2d(oup), 94 | ]) 95 | self.conv = nn.Sequential(*layers) 96 | 97 | def forward(self, x): 98 | if self.use_res_connect: 99 | return x + self.conv(x) 100 | else: 101 | return self.conv(x) 102 | 103 | 104 | class MobileFaceNet(nn.Module): 105 | def __init__(self, width_mult=1.0, inverted_residual_setting=None, round_nearest=8): 106 | """ 107 | MobileNet V2 main class 108 | Args: 109 | num_classes (int): Number of classes 110 | width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount 111 | inverted_residual_setting: Network structure 112 | round_nearest (int): Round the number of channels in each layer to be a multiple of this number 113 | Set to 1 to turn off rounding 114 | """ 115 | super(MobileFaceNet, self).__init__() 116 | block = InvertedResidual 117 | input_channel = 64 118 | last_channel = 512 119 | 120 | if inverted_residual_setting is None: 121 | inverted_residual_setting = [ 122 | # t, c, n, s 123 | [2, 64, 5, 2], 124 | [4, 128, 1, 2], 125 | [2, 128, 6, 1], 126 | [4, 128, 1, 2], 127 | [2, 128, 2, 1], 128 | ] 129 | 130 | # only check the first element, assuming user knows t,c,n,s are required 131 | if len(inverted_residual_setting) == 0 or len(inverted_residual_setting[0]) != 4: 132 | raise ValueError("inverted_residual_setting should be non-empty " 133 | "or a 4-element list, got {}".format(inverted_residual_setting)) 134 | 135 | # building first layer 136 | # input_channel = _make_divisible(input_channel * width_mult, round_nearest) 137 | self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest) 138 | self.conv1 = ConvBNReLU(3, input_channel, stride=2) 139 | self.dw_conv = DepthwiseSeparableConv(in_planes=64, out_planes=64, kernel_size=3, padding=1) 140 | features = list() 141 | # building inverted residual blocks 142 | for t, c, n, s in inverted_residual_setting: 143 | output_channel = _make_divisible(c * width_mult, round_nearest) 144 | for i in range(n): 145 | stride = s if i == 0 else 1 146 | features.append(block(input_channel, output_channel, stride, expand_ratio=t)) 147 | input_channel = output_channel 148 | # building last several layers 149 | self.conv2 = ConvBNReLU(input_channel, self.last_channel, kernel_size=1) 150 | self.gdconv = GDConv(in_planes=512, out_planes=512, kernel_size=7, padding=0) 151 | self.conv3 = nn.Conv2d(512, 128, kernel_size=1) 152 | self.bn = nn.BatchNorm2d(128) 153 | # make it nn.Sequential 154 | self.features = nn.Sequential(*features) 155 | 156 | # weight initialization 157 | for m in self.modules(): 158 | if isinstance(m, nn.Conv2d): 159 | nn.init.kaiming_normal_(m.weight, mode='fan_out') 160 | if m.bias is not None: 161 | nn.init.zeros_(m.bias) 162 | elif isinstance(m, nn.BatchNorm2d): 163 | nn.init.ones_(m.weight) 164 | nn.init.zeros_(m.bias) 165 | elif isinstance(m, nn.Linear): 166 | nn.init.normal_(m.weight, 0, 0.01) 167 | nn.init.zeros_(m.bias) 168 | 169 | def forward(self, x): 170 | x = self.conv1(x) 171 | x = self.dw_conv(x) 172 | x = self.features(x) 173 | x = self.conv2(x) 174 | x = self.gdconv(x) 175 | x = self.conv3(x) 176 | x = self.bn(x) 177 | x = x.view(x.size(0), -1) 178 | return x 179 | 180 | 181 | class ArcMarginModel(nn.Module): 182 | def __init__(self, args): 183 | super(ArcMarginModel, self).__init__() 184 | 185 | self.weight = Parameter(torch.FloatTensor(num_classes, emb_size)) 186 | nn.init.xavier_uniform_(self.weight) 187 | 188 | self.easy_margin = args.easy_margin 189 | self.m = args.margin_m 190 | self.s = args.margin_s 191 | 192 | self.cos_m = math.cos(self.m) 193 | self.sin_m = math.sin(self.m) 194 | self.th = math.cos(math.pi - self.m) 195 | self.mm = math.sin(math.pi - self.m) * self.m 196 | 197 | def forward(self, input, label): 198 | x = F.normalize(input) 199 | W = F.normalize(self.weight) 200 | cosine = F.linear(x, W) 201 | sine = torch.sqrt(1.0 - torch.pow(cosine, 2)) 202 | phi = cosine * self.cos_m - sine * self.sin_m # cos(theta + m) 203 | if self.easy_margin: 204 | phi = torch.where(cosine > 0, phi, cosine) 205 | else: 206 | phi = torch.where(cosine > self.th, phi, cosine - self.mm) 207 | one_hot = torch.zeros(cosine.size(), device=device) 208 | one_hot.scatter_(1, label.view(-1, 1).long(), 1) 209 | output = (one_hot * phi) + ((1.0 - one_hot) * cosine) 210 | output *= self.s 211 | return output 212 | 213 | 214 | if __name__ == "__main__": 215 | from torchscope import scope 216 | 217 | model = MobileFaceNet() 218 | # print(model) 219 | scope(model, input_size=(3, 112, 112)) 220 | -------------------------------------------------------------------------------- /optimizer.py: -------------------------------------------------------------------------------- 1 | class MFNptimizer(object): 2 | """A simple wrapper class for learning rate scheduling""" 3 | 4 | def __init__(self, optimizer): 5 | self.optimizer = optimizer 6 | self.lr = 0.1 7 | self.step_num = 0 8 | 9 | def zero_grad(self): 10 | self.optimizer.zero_grad() 11 | 12 | def step(self): 13 | self._update_lr() 14 | self.optimizer.step() 15 | 16 | def _update_lr(self): 17 | self.step_num += 1 18 | if self.step_num in [36000, 52000, 78000, 100000]: 19 | self.lr = self.lr / 10 20 | for param_group in self.optimizer.param_groups: 21 | param_group['lr'] = self.lr 22 | 23 | def clip_gradient(self, grad_clip): 24 | """ 25 | Clips gradients computed during backpropagation to avoid explosion of gradients. 26 | :param optimizer: optimizer with the gradients to be clipped 27 | :param grad_clip: clip value 28 | """ 29 | for group in self.optimizer.param_groups: 30 | for param in group['params']: 31 | if param.grad is not None: 32 | param.grad.data.clamp_(-grad_clip, grad_clip) 33 | -------------------------------------------------------------------------------- /pre_process.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | 4 | import cv2 as cv 5 | import mxnet as mx 6 | from mxnet import recordio 7 | from tqdm import tqdm 8 | 9 | from config import path_imgidx, path_imgrec, IMG_DIR, pickle_file 10 | from utils import ensure_folder 11 | 12 | if __name__ == "__main__": 13 | ensure_folder(IMG_DIR) 14 | imgrec = recordio.MXIndexedRecordIO(path_imgidx, path_imgrec, 'r') 15 | # print(len(imgrec)) 16 | 17 | samples = [] 18 | class_ids = set() 19 | 20 | # # %% 1 ~ 5179510 21 | 22 | try: 23 | for i in tqdm(range(10000000)): 24 | # print(i) 25 | header, s = recordio.unpack(imgrec.read_idx(i + 1)) 26 | img = mx.image.imdecode(s).asnumpy() 27 | # print(img.shape) 28 | img = cv.cvtColor(img, cv.COLOR_RGB2BGR) 29 | # print(header.label) 30 | # print(type(header.label)) 31 | label = int(header.label) 32 | class_ids.add(label) 33 | filename = '{}.jpg'.format(i) 34 | samples.append({'img': filename, 'label': label}) 35 | filename = os.path.join(IMG_DIR, filename) 36 | cv.imwrite(filename, img) 37 | # except KeyboardInterrupt: 38 | # raise 39 | except Exception as err: 40 | print(err) 41 | 42 | with open(pickle_file, 'wb') as file: 43 | pickle.dump(samples, file) 44 | 45 | print('num_samples: ' + str(len(samples))) 46 | 47 | class_ids = list(class_ids) 48 | print(len(class_ids)) 49 | print(max(class_ids)) 50 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | matplotlib 2 | scipy 3 | tqdm 4 | opencv-python 5 | pillow 6 | torch 7 | torchvision 8 | numpy 9 | scikit-image 10 | torchsummary 11 | imgaug 12 | -------------------------------------------------------------------------------- /retinaface/data/FDDB/img_list.txt: -------------------------------------------------------------------------------- 1 | 2002/08/11/big/img_591 2 | 2002/08/26/big/img_265 3 | 2002/07/19/big/img_423 4 | 2002/08/24/big/img_490 5 | 2002/08/31/big/img_17676 6 | 2002/07/31/big/img_228 7 | 2002/07/24/big/img_402 8 | 2002/08/04/big/img_769 9 | 2002/07/19/big/img_581 10 | 2002/08/13/big/img_723 11 | 2002/08/12/big/img_821 12 | 2003/01/17/big/img_610 13 | 2002/08/13/big/img_1116 14 | 2002/08/28/big/img_19238 15 | 2002/08/21/big/img_660 16 | 2002/08/14/big/img_607 17 | 2002/08/05/big/img_3708 18 | 2002/08/19/big/img_511 19 | 2002/08/07/big/img_1316 20 | 2002/07/25/big/img_1047 21 | 2002/07/23/big/img_474 22 | 2002/07/27/big/img_970 23 | 2002/09/02/big/img_15752 24 | 2002/09/01/big/img_16378 25 | 2002/09/01/big/img_16189 26 | 2002/08/26/big/img_276 27 | 2002/07/24/big/img_518 28 | 2002/08/14/big/img_1027 29 | 2002/08/24/big/img_733 30 | 2002/08/15/big/img_249 31 | 2003/01/15/big/img_1371 32 | 2002/08/07/big/img_1348 33 | 2003/01/01/big/img_331 34 | 2002/08/23/big/img_536 35 | 2002/07/30/big/img_224 36 | 2002/08/10/big/img_763 37 | 2002/08/21/big/img_293 38 | 2002/08/15/big/img_1211 39 | 2002/08/15/big/img_1194 40 | 2003/01/15/big/img_390 41 | 2002/08/06/big/img_2893 42 | 2002/08/17/big/img_691 43 | 2002/08/07/big/img_1695 44 | 2002/08/16/big/img_829 45 | 2002/07/25/big/img_201 46 | 2002/08/23/big/img_36 47 | 2003/01/15/big/img_763 48 | 2003/01/15/big/img_637 49 | 2002/08/22/big/img_592 50 | 2002/07/25/big/img_817 51 | 2003/01/15/big/img_1219 52 | 2002/08/05/big/img_3508 53 | 2002/08/15/big/img_1108 54 | 2002/07/19/big/img_488 55 | 2003/01/16/big/img_704 56 | 2003/01/13/big/img_1087 57 | 2002/08/10/big/img_670 58 | 2002/07/24/big/img_104 59 | 2002/08/27/big/img_19823 60 | 2002/09/01/big/img_16229 61 | 2003/01/13/big/img_846 62 | 2002/08/04/big/img_412 63 | 2002/07/22/big/img_554 64 | 2002/08/12/big/img_331 65 | 2002/08/02/big/img_533 66 | 2002/08/12/big/img_259 67 | 2002/08/18/big/img_328 68 | 2003/01/14/big/img_630 69 | 2002/08/05/big/img_3541 70 | 2002/08/06/big/img_2390 71 | 2002/08/20/big/img_150 72 | 2002/08/02/big/img_1231 73 | 2002/08/16/big/img_710 74 | 2002/08/19/big/img_591 75 | 2002/07/22/big/img_725 76 | 2002/07/24/big/img_820 77 | 2003/01/13/big/img_568 78 | 2002/08/22/big/img_853 79 | 2002/08/09/big/img_648 80 | 2002/08/23/big/img_528 81 | 2003/01/14/big/img_888 82 | 2002/08/30/big/img_18201 83 | 2002/08/13/big/img_965 84 | 2003/01/14/big/img_660 85 | 2002/07/19/big/img_517 86 | 2003/01/14/big/img_406 87 | 2002/08/30/big/img_18433 88 | 2002/08/07/big/img_1630 89 | 2002/08/06/big/img_2717 90 | 2002/08/21/big/img_470 91 | 2002/07/23/big/img_633 92 | 2002/08/20/big/img_915 93 | 2002/08/16/big/img_893 94 | 2002/07/29/big/img_644 95 | 2002/08/15/big/img_529 96 | 2002/08/16/big/img_668 97 | 2002/08/07/big/img_1871 98 | 2002/07/25/big/img_192 99 | 2002/07/31/big/img_961 100 | 2002/08/19/big/img_738 101 | 2002/07/31/big/img_382 102 | 2002/08/19/big/img_298 103 | 2003/01/17/big/img_608 104 | 2002/08/21/big/img_514 105 | 2002/07/23/big/img_183 106 | 2003/01/17/big/img_536 107 | 2002/07/24/big/img_478 108 | 2002/08/06/big/img_2997 109 | 2002/09/02/big/img_15380 110 | 2002/08/07/big/img_1153 111 | 2002/07/31/big/img_967 112 | 2002/07/31/big/img_711 113 | 2002/08/26/big/img_664 114 | 2003/01/01/big/img_326 115 | 2002/08/24/big/img_775 116 | 2002/08/08/big/img_961 117 | 2002/08/16/big/img_77 118 | 2002/08/12/big/img_296 119 | 2002/07/22/big/img_905 120 | 2003/01/13/big/img_284 121 | 2002/08/13/big/img_887 122 | 2002/08/24/big/img_849 123 | 2002/07/30/big/img_345 124 | 2002/08/18/big/img_419 125 | 2002/08/01/big/img_1347 126 | 2002/08/05/big/img_3670 127 | 2002/07/21/big/img_479 128 | 2002/08/08/big/img_913 129 | 2002/09/02/big/img_15828 130 | 2002/08/30/big/img_18194 131 | 2002/08/08/big/img_471 132 | 2002/08/22/big/img_734 133 | 2002/08/09/big/img_586 134 | 2002/08/09/big/img_454 135 | 2002/07/29/big/img_47 136 | 2002/07/19/big/img_381 137 | 2002/07/29/big/img_733 138 | 2002/08/20/big/img_327 139 | 2002/07/21/big/img_96 140 | 2002/08/06/big/img_2680 141 | 2002/07/25/big/img_919 142 | 2002/07/21/big/img_158 143 | 2002/07/22/big/img_801 144 | 2002/07/22/big/img_567 145 | 2002/07/24/big/img_804 146 | 2002/07/24/big/img_690 147 | 2003/01/15/big/img_576 148 | 2002/08/14/big/img_335 149 | 2003/01/13/big/img_390 150 | 2002/08/11/big/img_258 151 | 2002/07/23/big/img_917 152 | 2002/08/15/big/img_525 153 | 2003/01/15/big/img_505 154 | 2002/07/30/big/img_886 155 | 2003/01/16/big/img_640 156 | 2003/01/14/big/img_642 157 | 2003/01/17/big/img_844 158 | 2002/08/04/big/img_571 159 | 2002/08/29/big/img_18702 160 | 2003/01/15/big/img_240 161 | 2002/07/29/big/img_553 162 | 2002/08/10/big/img_354 163 | 2002/08/18/big/img_17 164 | 2003/01/15/big/img_782 165 | 2002/07/27/big/img_382 166 | 2002/08/14/big/img_970 167 | 2003/01/16/big/img_70 168 | 2003/01/16/big/img_625 169 | 2002/08/18/big/img_341 170 | 2002/08/26/big/img_188 171 | 2002/08/09/big/img_405 172 | 2002/08/02/big/img_37 173 | 2002/08/13/big/img_748 174 | 2002/07/22/big/img_399 175 | 2002/07/25/big/img_844 176 | 2002/08/12/big/img_340 177 | 2003/01/13/big/img_815 178 | 2002/08/26/big/img_5 179 | 2002/08/10/big/img_158 180 | 2002/08/18/big/img_95 181 | 2002/07/29/big/img_1297 182 | 2003/01/13/big/img_508 183 | 2002/09/01/big/img_16680 184 | 2003/01/16/big/img_338 185 | 2002/08/13/big/img_517 186 | 2002/07/22/big/img_626 187 | 2002/08/06/big/img_3024 188 | 2002/07/26/big/img_499 189 | 2003/01/13/big/img_387 190 | 2002/08/31/big/img_18025 191 | 2002/08/13/big/img_520 192 | 2003/01/16/big/img_576 193 | 2002/07/26/big/img_121 194 | 2002/08/25/big/img_703 195 | 2002/08/26/big/img_615 196 | 2002/08/17/big/img_434 197 | 2002/08/02/big/img_677 198 | 2002/08/18/big/img_276 199 | 2002/08/05/big/img_3672 200 | 2002/07/26/big/img_700 201 | 2002/07/31/big/img_277 202 | 2003/01/14/big/img_220 203 | 2002/08/23/big/img_232 204 | 2002/08/31/big/img_17422 205 | 2002/07/22/big/img_508 206 | 2002/08/13/big/img_681 207 | 2003/01/15/big/img_638 208 | 2002/08/30/big/img_18408 209 | 2003/01/14/big/img_533 210 | 2003/01/17/big/img_12 211 | 2002/08/28/big/img_19388 212 | 2002/08/08/big/img_133 213 | 2002/07/26/big/img_885 214 | 2002/08/19/big/img_387 215 | 2002/08/27/big/img_19976 216 | 2002/08/26/big/img_118 217 | 2002/08/28/big/img_19146 218 | 2002/08/05/big/img_3259 219 | 2002/08/15/big/img_536 220 | 2002/07/22/big/img_279 221 | 2002/07/22/big/img_9 222 | 2002/08/13/big/img_301 223 | 2002/08/15/big/img_974 224 | 2002/08/06/big/img_2355 225 | 2002/08/01/big/img_1526 226 | 2002/08/03/big/img_417 227 | 2002/08/04/big/img_407 228 | 2002/08/15/big/img_1029 229 | 2002/07/29/big/img_700 230 | 2002/08/01/big/img_1463 231 | 2002/08/31/big/img_17365 232 | 2002/07/28/big/img_223 233 | 2002/07/19/big/img_827 234 | 2002/07/27/big/img_531 235 | 2002/07/19/big/img_845 236 | 2002/08/20/big/img_382 237 | 2002/07/31/big/img_268 238 | 2002/08/27/big/img_19705 239 | 2002/08/02/big/img_830 240 | 2002/08/23/big/img_250 241 | 2002/07/20/big/img_777 242 | 2002/08/21/big/img_879 243 | 2002/08/26/big/img_20146 244 | 2002/08/23/big/img_789 245 | 2002/08/06/big/img_2683 246 | 2002/08/25/big/img_576 247 | 2002/08/09/big/img_498 248 | 2002/08/08/big/img_384 249 | 2002/08/26/big/img_592 250 | 2002/07/29/big/img_1470 251 | 2002/08/21/big/img_452 252 | 2002/08/30/big/img_18395 253 | 2002/08/15/big/img_215 254 | 2002/07/21/big/img_643 255 | 2002/07/22/big/img_209 256 | 2003/01/17/big/img_346 257 | 2002/08/25/big/img_658 258 | 2002/08/21/big/img_221 259 | 2002/08/14/big/img_60 260 | 2003/01/17/big/img_885 261 | 2003/01/16/big/img_482 262 | 2002/08/19/big/img_593 263 | 2002/08/08/big/img_233 264 | 2002/07/30/big/img_458 265 | 2002/07/23/big/img_384 266 | 2003/01/15/big/img_670 267 | 2003/01/15/big/img_267 268 | 2002/08/26/big/img_540 269 | 2002/07/29/big/img_552 270 | 2002/07/30/big/img_997 271 | 2003/01/17/big/img_377 272 | 2002/08/21/big/img_265 273 | 2002/08/09/big/img_561 274 | 2002/07/31/big/img_945 275 | 2002/09/02/big/img_15252 276 | 2002/08/11/big/img_276 277 | 2002/07/22/big/img_491 278 | 2002/07/26/big/img_517 279 | 2002/08/14/big/img_726 280 | 2002/08/08/big/img_46 281 | 2002/08/28/big/img_19458 282 | 2002/08/06/big/img_2935 283 | 2002/07/29/big/img_1392 284 | 2002/08/13/big/img_776 285 | 2002/08/24/big/img_616 286 | 2002/08/14/big/img_1065 287 | 2002/07/29/big/img_889 288 | 2002/08/18/big/img_188 289 | 2002/08/07/big/img_1453 290 | 2002/08/02/big/img_760 291 | 2002/07/28/big/img_416 292 | 2002/08/07/big/img_1393 293 | 2002/08/26/big/img_292 294 | 2002/08/26/big/img_301 295 | 2003/01/13/big/img_195 296 | 2002/07/26/big/img_532 297 | 2002/08/20/big/img_550 298 | 2002/08/05/big/img_3658 299 | 2002/08/26/big/img_738 300 | 2002/09/02/big/img_15750 301 | 2003/01/17/big/img_451 302 | 2002/07/23/big/img_339 303 | 2002/08/16/big/img_637 304 | 2002/08/14/big/img_748 305 | 2002/08/06/big/img_2739 306 | 2002/07/25/big/img_482 307 | 2002/08/19/big/img_191 308 | 2002/08/26/big/img_537 309 | 2003/01/15/big/img_716 310 | 2003/01/15/big/img_767 311 | 2002/08/02/big/img_452 312 | 2002/08/08/big/img_1011 313 | 2002/08/10/big/img_144 314 | 2003/01/14/big/img_122 315 | 2002/07/24/big/img_586 316 | 2002/07/24/big/img_762 317 | 2002/08/20/big/img_369 318 | 2002/07/30/big/img_146 319 | 2002/08/23/big/img_396 320 | 2003/01/15/big/img_200 321 | 2002/08/15/big/img_1183 322 | 2003/01/14/big/img_698 323 | 2002/08/09/big/img_792 324 | 2002/08/06/big/img_2347 325 | 2002/07/31/big/img_911 326 | 2002/08/26/big/img_722 327 | 2002/08/23/big/img_621 328 | 2002/08/05/big/img_3790 329 | 2003/01/13/big/img_633 330 | 2002/08/09/big/img_224 331 | 2002/07/24/big/img_454 332 | 2002/07/21/big/img_202 333 | 2002/08/02/big/img_630 334 | 2002/08/30/big/img_18315 335 | 2002/07/19/big/img_491 336 | 2002/09/01/big/img_16456 337 | 2002/08/09/big/img_242 338 | 2002/07/25/big/img_595 339 | 2002/07/22/big/img_522 340 | 2002/08/01/big/img_1593 341 | 2002/07/29/big/img_336 342 | 2002/08/15/big/img_448 343 | 2002/08/28/big/img_19281 344 | 2002/07/29/big/img_342 345 | 2002/08/12/big/img_78 346 | 2003/01/14/big/img_525 347 | 2002/07/28/big/img_147 348 | 2002/08/11/big/img_353 349 | 2002/08/22/big/img_513 350 | 2002/08/04/big/img_721 351 | 2002/08/17/big/img_247 352 | 2003/01/14/big/img_891 353 | 2002/08/20/big/img_853 354 | 2002/07/19/big/img_414 355 | 2002/08/01/big/img_1530 356 | 2003/01/14/big/img_924 357 | 2002/08/22/big/img_468 358 | 2002/08/18/big/img_354 359 | 2002/08/30/big/img_18193 360 | 2002/08/23/big/img_492 361 | 2002/08/15/big/img_871 362 | 2002/08/12/big/img_494 363 | 2002/08/06/big/img_2470 364 | 2002/07/23/big/img_923 365 | 2002/08/26/big/img_155 366 | 2002/08/08/big/img_669 367 | 2002/07/23/big/img_404 368 | 2002/08/28/big/img_19421 369 | 2002/08/29/big/img_18993 370 | 2002/08/25/big/img_416 371 | 2003/01/17/big/img_434 372 | 2002/07/29/big/img_1370 373 | 2002/07/28/big/img_483 374 | 2002/08/11/big/img_50 375 | 2002/08/10/big/img_404 376 | 2002/09/02/big/img_15057 377 | 2003/01/14/big/img_911 378 | 2002/09/01/big/img_16697 379 | 2003/01/16/big/img_665 380 | 2002/09/01/big/img_16708 381 | 2002/08/22/big/img_612 382 | 2002/08/28/big/img_19471 383 | 2002/08/02/big/img_198 384 | 2003/01/16/big/img_527 385 | 2002/08/22/big/img_209 386 | 2002/08/30/big/img_18205 387 | 2003/01/14/big/img_114 388 | 2003/01/14/big/img_1028 389 | 2003/01/16/big/img_894 390 | 2003/01/14/big/img_837 391 | 2002/07/30/big/img_9 392 | 2002/08/06/big/img_2821 393 | 2002/08/04/big/img_85 394 | 2003/01/13/big/img_884 395 | 2002/07/22/big/img_570 396 | 2002/08/07/big/img_1773 397 | 2002/07/26/big/img_208 398 | 2003/01/17/big/img_946 399 | 2002/07/19/big/img_930 400 | 2003/01/01/big/img_698 401 | 2003/01/17/big/img_612 402 | 2002/07/19/big/img_372 403 | 2002/07/30/big/img_721 404 | 2003/01/14/big/img_649 405 | 2002/08/19/big/img_4 406 | 2002/07/25/big/img_1024 407 | 2003/01/15/big/img_601 408 | 2002/08/30/big/img_18470 409 | 2002/07/22/big/img_29 410 | 2002/08/07/big/img_1686 411 | 2002/07/20/big/img_294 412 | 2002/08/14/big/img_800 413 | 2002/08/19/big/img_353 414 | 2002/08/19/big/img_350 415 | 2002/08/05/big/img_3392 416 | 2002/08/09/big/img_622 417 | 2003/01/15/big/img_236 418 | 2002/08/11/big/img_643 419 | 2002/08/05/big/img_3458 420 | 2002/08/12/big/img_413 421 | 2002/08/22/big/img_415 422 | 2002/08/13/big/img_635 423 | 2002/08/07/big/img_1198 424 | 2002/08/04/big/img_873 425 | 2002/08/12/big/img_407 426 | 2003/01/15/big/img_346 427 | 2002/08/02/big/img_275 428 | 2002/08/17/big/img_997 429 | 2002/08/21/big/img_958 430 | 2002/08/20/big/img_579 431 | 2002/07/29/big/img_142 432 | 2003/01/14/big/img_1115 433 | 2002/08/16/big/img_365 434 | 2002/07/29/big/img_1414 435 | 2002/08/17/big/img_489 436 | 2002/08/13/big/img_1010 437 | 2002/07/31/big/img_276 438 | 2002/07/25/big/img_1000 439 | 2002/08/23/big/img_524 440 | 2002/08/28/big/img_19147 441 | 2003/01/13/big/img_433 442 | 2002/08/20/big/img_205 443 | 2003/01/01/big/img_458 444 | 2002/07/29/big/img_1449 445 | 2003/01/16/big/img_696 446 | 2002/08/28/big/img_19296 447 | 2002/08/29/big/img_18688 448 | 2002/08/21/big/img_767 449 | 2002/08/20/big/img_532 450 | 2002/08/26/big/img_187 451 | 2002/07/26/big/img_183 452 | 2002/07/27/big/img_890 453 | 2003/01/13/big/img_576 454 | 2002/07/30/big/img_15 455 | 2002/07/31/big/img_889 456 | 2002/08/31/big/img_17759 457 | 2003/01/14/big/img_1114 458 | 2002/07/19/big/img_445 459 | 2002/08/03/big/img_593 460 | 2002/07/24/big/img_750 461 | 2002/07/30/big/img_133 462 | 2002/08/25/big/img_671 463 | 2002/07/20/big/img_351 464 | 2002/08/31/big/img_17276 465 | 2002/08/05/big/img_3231 466 | 2002/09/02/big/img_15882 467 | 2002/08/14/big/img_115 468 | 2002/08/02/big/img_1148 469 | 2002/07/25/big/img_936 470 | 2002/07/31/big/img_639 471 | 2002/08/04/big/img_427 472 | 2002/08/22/big/img_843 473 | 2003/01/17/big/img_17 474 | 2003/01/13/big/img_690 475 | 2002/08/13/big/img_472 476 | 2002/08/09/big/img_425 477 | 2002/08/05/big/img_3450 478 | 2003/01/17/big/img_439 479 | 2002/08/13/big/img_539 480 | 2002/07/28/big/img_35 481 | 2002/08/16/big/img_241 482 | 2002/08/06/big/img_2898 483 | 2003/01/16/big/img_429 484 | 2002/08/05/big/img_3817 485 | 2002/08/27/big/img_19919 486 | 2002/07/19/big/img_422 487 | 2002/08/15/big/img_560 488 | 2002/07/23/big/img_750 489 | 2002/07/30/big/img_353 490 | 2002/08/05/big/img_43 491 | 2002/08/23/big/img_305 492 | 2002/08/01/big/img_2137 493 | 2002/08/30/big/img_18097 494 | 2002/08/01/big/img_1389 495 | 2002/08/02/big/img_308 496 | 2003/01/14/big/img_652 497 | 2002/08/01/big/img_1798 498 | 2003/01/14/big/img_732 499 | 2003/01/16/big/img_294 500 | 2002/08/26/big/img_213 501 | 2002/07/24/big/img_842 502 | 2003/01/13/big/img_630 503 | 2003/01/13/big/img_634 504 | 2002/08/06/big/img_2285 505 | 2002/08/01/big/img_2162 506 | 2002/08/30/big/img_18134 507 | 2002/08/02/big/img_1045 508 | 2002/08/01/big/img_2143 509 | 2002/07/25/big/img_135 510 | 2002/07/20/big/img_645 511 | 2002/08/05/big/img_3666 512 | 2002/08/14/big/img_523 513 | 2002/08/04/big/img_425 514 | 2003/01/14/big/img_137 515 | 2003/01/01/big/img_176 516 | 2002/08/15/big/img_505 517 | 2002/08/24/big/img_386 518 | 2002/08/05/big/img_3187 519 | 2002/08/15/big/img_419 520 | 2003/01/13/big/img_520 521 | 2002/08/04/big/img_444 522 | 2002/08/26/big/img_483 523 | 2002/08/05/big/img_3449 524 | 2002/08/30/big/img_18409 525 | 2002/08/28/big/img_19455 526 | 2002/08/27/big/img_20090 527 | 2002/07/23/big/img_625 528 | 2002/08/24/big/img_205 529 | 2002/08/08/big/img_938 530 | 2003/01/13/big/img_527 531 | 2002/08/07/big/img_1712 532 | 2002/07/24/big/img_801 533 | 2002/08/09/big/img_579 534 | 2003/01/14/big/img_41 535 | 2003/01/15/big/img_1130 536 | 2002/07/21/big/img_672 537 | 2002/08/07/big/img_1590 538 | 2003/01/01/big/img_532 539 | 2002/08/02/big/img_529 540 | 2002/08/05/big/img_3591 541 | 2002/08/23/big/img_5 542 | 2003/01/14/big/img_882 543 | 2002/08/28/big/img_19234 544 | 2002/07/24/big/img_398 545 | 2003/01/14/big/img_592 546 | 2002/08/22/big/img_548 547 | 2002/08/12/big/img_761 548 | 2003/01/16/big/img_497 549 | 2002/08/18/big/img_133 550 | 2002/08/08/big/img_874 551 | 2002/07/19/big/img_247 552 | 2002/08/15/big/img_170 553 | 2002/08/27/big/img_19679 554 | 2002/08/20/big/img_246 555 | 2002/08/24/big/img_358 556 | 2002/07/29/big/img_599 557 | 2002/08/01/big/img_1555 558 | 2002/07/30/big/img_491 559 | 2002/07/30/big/img_371 560 | 2003/01/16/big/img_682 561 | 2002/07/25/big/img_619 562 | 2003/01/15/big/img_587 563 | 2002/08/02/big/img_1212 564 | 2002/08/01/big/img_2152 565 | 2002/07/25/big/img_668 566 | 2003/01/16/big/img_574 567 | 2002/08/28/big/img_19464 568 | 2002/08/11/big/img_536 569 | 2002/07/24/big/img_201 570 | 2002/08/05/big/img_3488 571 | 2002/07/25/big/img_887 572 | 2002/07/22/big/img_789 573 | 2002/07/30/big/img_432 574 | 2002/08/16/big/img_166 575 | 2002/09/01/big/img_16333 576 | 2002/07/26/big/img_1010 577 | 2002/07/21/big/img_793 578 | 2002/07/22/big/img_720 579 | 2002/07/31/big/img_337 580 | 2002/07/27/big/img_185 581 | 2002/08/23/big/img_440 582 | 2002/07/31/big/img_801 583 | 2002/07/25/big/img_478 584 | 2003/01/14/big/img_171 585 | 2002/08/07/big/img_1054 586 | 2002/09/02/big/img_15659 587 | 2002/07/29/big/img_1348 588 | 2002/08/09/big/img_337 589 | 2002/08/26/big/img_684 590 | 2002/07/31/big/img_537 591 | 2002/08/15/big/img_808 592 | 2003/01/13/big/img_740 593 | 2002/08/07/big/img_1667 594 | 2002/08/03/big/img_404 595 | 2002/08/06/big/img_2520 596 | 2002/07/19/big/img_230 597 | 2002/07/19/big/img_356 598 | 2003/01/16/big/img_627 599 | 2002/08/04/big/img_474 600 | 2002/07/29/big/img_833 601 | 2002/07/25/big/img_176 602 | 2002/08/01/big/img_1684 603 | 2002/08/21/big/img_643 604 | 2002/08/27/big/img_19673 605 | 2002/08/02/big/img_838 606 | 2002/08/06/big/img_2378 607 | 2003/01/15/big/img_48 608 | 2002/07/30/big/img_470 609 | 2002/08/15/big/img_963 610 | 2002/08/24/big/img_444 611 | 2002/08/16/big/img_662 612 | 2002/08/15/big/img_1209 613 | 2002/07/24/big/img_25 614 | 2002/08/06/big/img_2740 615 | 2002/07/29/big/img_996 616 | 2002/08/31/big/img_18074 617 | 2002/08/04/big/img_343 618 | 2003/01/17/big/img_509 619 | 2003/01/13/big/img_726 620 | 2002/08/07/big/img_1466 621 | 2002/07/26/big/img_307 622 | 2002/08/10/big/img_598 623 | 2002/08/13/big/img_890 624 | 2002/08/14/big/img_997 625 | 2002/07/19/big/img_392 626 | 2002/08/02/big/img_475 627 | 2002/08/29/big/img_19038 628 | 2002/07/29/big/img_538 629 | 2002/07/29/big/img_502 630 | 2002/08/02/big/img_364 631 | 2002/08/31/big/img_17353 632 | 2002/08/08/big/img_539 633 | 2002/08/01/big/img_1449 634 | 2002/07/22/big/img_363 635 | 2002/08/02/big/img_90 636 | 2002/09/01/big/img_16867 637 | 2002/08/05/big/img_3371 638 | 2002/07/30/big/img_342 639 | 2002/08/07/big/img_1363 640 | 2002/08/22/big/img_790 641 | 2003/01/15/big/img_404 642 | 2002/08/05/big/img_3447 643 | 2002/09/01/big/img_16167 644 | 2003/01/13/big/img_840 645 | 2002/08/22/big/img_1001 646 | 2002/08/09/big/img_431 647 | 2002/07/27/big/img_618 648 | 2002/07/31/big/img_741 649 | 2002/07/30/big/img_964 650 | 2002/07/25/big/img_86 651 | 2002/07/29/big/img_275 652 | 2002/08/21/big/img_921 653 | 2002/07/26/big/img_892 654 | 2002/08/21/big/img_663 655 | 2003/01/13/big/img_567 656 | 2003/01/14/big/img_719 657 | 2002/07/28/big/img_251 658 | 2003/01/15/big/img_1123 659 | 2002/07/29/big/img_260 660 | 2002/08/24/big/img_337 661 | 2002/08/01/big/img_1914 662 | 2002/08/13/big/img_373 663 | 2003/01/15/big/img_589 664 | 2002/08/13/big/img_906 665 | 2002/07/26/big/img_270 666 | 2002/08/26/big/img_313 667 | 2002/08/25/big/img_694 668 | 2003/01/01/big/img_327 669 | 2002/07/23/big/img_261 670 | 2002/08/26/big/img_642 671 | 2002/07/29/big/img_918 672 | 2002/07/23/big/img_455 673 | 2002/07/24/big/img_612 674 | 2002/07/23/big/img_534 675 | 2002/07/19/big/img_534 676 | 2002/07/19/big/img_726 677 | 2002/08/01/big/img_2146 678 | 2002/08/02/big/img_543 679 | 2003/01/16/big/img_777 680 | 2002/07/30/big/img_484 681 | 2002/08/13/big/img_1161 682 | 2002/07/21/big/img_390 683 | 2002/08/06/big/img_2288 684 | 2002/08/21/big/img_677 685 | 2002/08/13/big/img_747 686 | 2002/08/15/big/img_1248 687 | 2002/07/31/big/img_416 688 | 2002/09/02/big/img_15259 689 | 2002/08/16/big/img_781 690 | 2002/08/24/big/img_754 691 | 2002/07/24/big/img_803 692 | 2002/08/20/big/img_609 693 | 2002/08/28/big/img_19571 694 | 2002/09/01/big/img_16140 695 | 2002/08/26/big/img_769 696 | 2002/07/20/big/img_588 697 | 2002/08/02/big/img_898 698 | 2002/07/21/big/img_466 699 | 2002/08/14/big/img_1046 700 | 2002/07/25/big/img_212 701 | 2002/08/26/big/img_353 702 | 2002/08/19/big/img_810 703 | 2002/08/31/big/img_17824 704 | 2002/08/12/big/img_631 705 | 2002/07/19/big/img_828 706 | 2002/07/24/big/img_130 707 | 2002/08/25/big/img_580 708 | 2002/07/31/big/img_699 709 | 2002/07/23/big/img_808 710 | 2002/07/31/big/img_377 711 | 2003/01/16/big/img_570 712 | 2002/09/01/big/img_16254 713 | 2002/07/21/big/img_471 714 | 2002/08/01/big/img_1548 715 | 2002/08/18/big/img_252 716 | 2002/08/19/big/img_576 717 | 2002/08/20/big/img_464 718 | 2002/07/27/big/img_735 719 | 2002/08/21/big/img_589 720 | 2003/01/15/big/img_1192 721 | 2002/08/09/big/img_302 722 | 2002/07/31/big/img_594 723 | 2002/08/23/big/img_19 724 | 2002/08/29/big/img_18819 725 | 2002/08/19/big/img_293 726 | 2002/07/30/big/img_331 727 | 2002/08/23/big/img_607 728 | 2002/07/30/big/img_363 729 | 2002/08/16/big/img_766 730 | 2003/01/13/big/img_481 731 | 2002/08/06/big/img_2515 732 | 2002/09/02/big/img_15913 733 | 2002/09/02/big/img_15827 734 | 2002/09/02/big/img_15053 735 | 2002/08/07/big/img_1576 736 | 2002/07/23/big/img_268 737 | 2002/08/21/big/img_152 738 | 2003/01/15/big/img_578 739 | 2002/07/21/big/img_589 740 | 2002/07/20/big/img_548 741 | 2002/08/27/big/img_19693 742 | 2002/08/31/big/img_17252 743 | 2002/07/31/big/img_138 744 | 2002/07/23/big/img_372 745 | 2002/08/16/big/img_695 746 | 2002/07/27/big/img_287 747 | 2002/08/15/big/img_315 748 | 2002/08/10/big/img_361 749 | 2002/07/29/big/img_899 750 | 2002/08/13/big/img_771 751 | 2002/08/21/big/img_92 752 | 2003/01/15/big/img_425 753 | 2003/01/16/big/img_450 754 | 2002/09/01/big/img_16942 755 | 2002/08/02/big/img_51 756 | 2002/09/02/big/img_15379 757 | 2002/08/24/big/img_147 758 | 2002/08/30/big/img_18122 759 | 2002/07/26/big/img_950 760 | 2002/08/07/big/img_1400 761 | 2002/08/17/big/img_468 762 | 2002/08/15/big/img_470 763 | 2002/07/30/big/img_318 764 | 2002/07/22/big/img_644 765 | 2002/08/27/big/img_19732 766 | 2002/07/23/big/img_601 767 | 2002/08/26/big/img_398 768 | 2002/08/21/big/img_428 769 | 2002/08/06/big/img_2119 770 | 2002/08/29/big/img_19103 771 | 2003/01/14/big/img_933 772 | 2002/08/11/big/img_674 773 | 2002/08/28/big/img_19420 774 | 2002/08/03/big/img_418 775 | 2002/08/17/big/img_312 776 | 2002/07/25/big/img_1044 777 | 2003/01/17/big/img_671 778 | 2002/08/30/big/img_18297 779 | 2002/07/25/big/img_755 780 | 2002/07/23/big/img_471 781 | 2002/08/21/big/img_39 782 | 2002/07/26/big/img_699 783 | 2003/01/14/big/img_33 784 | 2002/07/31/big/img_411 785 | 2002/08/16/big/img_645 786 | 2003/01/17/big/img_116 787 | 2002/09/02/big/img_15903 788 | 2002/08/20/big/img_120 789 | 2002/08/22/big/img_176 790 | 2002/07/29/big/img_1316 791 | 2002/08/27/big/img_19914 792 | 2002/07/22/big/img_719 793 | 2002/08/28/big/img_19239 794 | 2003/01/13/big/img_385 795 | 2002/08/08/big/img_525 796 | 2002/07/19/big/img_782 797 | 2002/08/13/big/img_843 798 | 2002/07/30/big/img_107 799 | 2002/08/11/big/img_752 800 | 2002/07/29/big/img_383 801 | 2002/08/26/big/img_249 802 | 2002/08/29/big/img_18860 803 | 2002/07/30/big/img_70 804 | 2002/07/26/big/img_194 805 | 2002/08/15/big/img_530 806 | 2002/08/08/big/img_816 807 | 2002/07/31/big/img_286 808 | 2003/01/13/big/img_294 809 | 2002/07/31/big/img_251 810 | 2002/07/24/big/img_13 811 | 2002/08/31/big/img_17938 812 | 2002/07/22/big/img_642 813 | 2003/01/14/big/img_728 814 | 2002/08/18/big/img_47 815 | 2002/08/22/big/img_306 816 | 2002/08/20/big/img_348 817 | 2002/08/15/big/img_764 818 | 2002/08/08/big/img_163 819 | 2002/07/23/big/img_531 820 | 2002/07/23/big/img_467 821 | 2003/01/16/big/img_743 822 | 2003/01/13/big/img_535 823 | 2002/08/02/big/img_523 824 | 2002/08/22/big/img_120 825 | 2002/08/11/big/img_496 826 | 2002/08/29/big/img_19075 827 | 2002/08/08/big/img_465 828 | 2002/08/09/big/img_790 829 | 2002/08/19/big/img_588 830 | 2002/08/23/big/img_407 831 | 2003/01/17/big/img_435 832 | 2002/08/24/big/img_398 833 | 2002/08/27/big/img_19899 834 | 2003/01/15/big/img_335 835 | 2002/08/13/big/img_493 836 | 2002/09/02/big/img_15460 837 | 2002/07/31/big/img_470 838 | 2002/08/05/big/img_3550 839 | 2002/07/28/big/img_123 840 | 2002/08/01/big/img_1498 841 | 2002/08/04/big/img_504 842 | 2003/01/17/big/img_427 843 | 2002/08/27/big/img_19708 844 | 2002/07/27/big/img_861 845 | 2002/07/25/big/img_685 846 | 2002/07/31/big/img_207 847 | 2003/01/14/big/img_745 848 | 2002/08/31/big/img_17756 849 | 2002/08/24/big/img_288 850 | 2002/08/18/big/img_181 851 | 2002/08/10/big/img_520 852 | 2002/08/25/big/img_705 853 | 2002/08/23/big/img_226 854 | 2002/08/04/big/img_727 855 | 2002/07/24/big/img_625 856 | 2002/08/28/big/img_19157 857 | 2002/08/23/big/img_586 858 | 2002/07/31/big/img_232 859 | 2003/01/13/big/img_240 860 | 2003/01/14/big/img_321 861 | 2003/01/15/big/img_533 862 | 2002/07/23/big/img_480 863 | 2002/07/24/big/img_371 864 | 2002/08/21/big/img_702 865 | 2002/08/31/big/img_17075 866 | 2002/09/02/big/img_15278 867 | 2002/07/29/big/img_246 868 | 2003/01/15/big/img_829 869 | 2003/01/15/big/img_1213 870 | 2003/01/16/big/img_441 871 | 2002/08/14/big/img_921 872 | 2002/07/23/big/img_425 873 | 2002/08/15/big/img_296 874 | 2002/07/19/big/img_135 875 | 2002/07/26/big/img_402 876 | 2003/01/17/big/img_88 877 | 2002/08/20/big/img_872 878 | 2002/08/13/big/img_1110 879 | 2003/01/16/big/img_1040 880 | 2002/07/23/big/img_9 881 | 2002/08/13/big/img_700 882 | 2002/08/16/big/img_371 883 | 2002/08/27/big/img_19966 884 | 2003/01/17/big/img_391 885 | 2002/08/18/big/img_426 886 | 2002/08/01/big/img_1618 887 | 2002/07/21/big/img_754 888 | 2003/01/14/big/img_1101 889 | 2003/01/16/big/img_1022 890 | 2002/07/22/big/img_275 891 | 2002/08/24/big/img_86 892 | 2002/08/17/big/img_582 893 | 2003/01/15/big/img_765 894 | 2003/01/17/big/img_449 895 | 2002/07/28/big/img_265 896 | 2003/01/13/big/img_552 897 | 2002/07/28/big/img_115 898 | 2003/01/16/big/img_56 899 | 2002/08/02/big/img_1232 900 | 2003/01/17/big/img_925 901 | 2002/07/22/big/img_445 902 | 2002/07/25/big/img_957 903 | 2002/07/20/big/img_589 904 | 2002/08/31/big/img_17107 905 | 2002/07/29/big/img_483 906 | 2002/08/14/big/img_1063 907 | 2002/08/07/big/img_1545 908 | 2002/08/14/big/img_680 909 | 2002/09/01/big/img_16694 910 | 2002/08/14/big/img_257 911 | 2002/08/11/big/img_726 912 | 2002/07/26/big/img_681 913 | 2002/07/25/big/img_481 914 | 2003/01/14/big/img_737 915 | 2002/08/28/big/img_19480 916 | 2003/01/16/big/img_362 917 | 2002/08/27/big/img_19865 918 | 2003/01/01/big/img_547 919 | 2002/09/02/big/img_15074 920 | 2002/08/01/big/img_1453 921 | 2002/08/22/big/img_594 922 | 2002/08/28/big/img_19263 923 | 2002/08/13/big/img_478 924 | 2002/07/29/big/img_1358 925 | 2003/01/14/big/img_1022 926 | 2002/08/16/big/img_450 927 | 2002/08/02/big/img_159 928 | 2002/07/26/big/img_781 929 | 2003/01/13/big/img_601 930 | 2002/08/20/big/img_407 931 | 2002/08/15/big/img_468 932 | 2002/08/31/big/img_17902 933 | 2002/08/16/big/img_81 934 | 2002/07/25/big/img_987 935 | 2002/07/25/big/img_500 936 | 2002/08/02/big/img_31 937 | 2002/08/18/big/img_538 938 | 2002/08/08/big/img_54 939 | 2002/07/23/big/img_686 940 | 2002/07/24/big/img_836 941 | 2003/01/17/big/img_734 942 | 2002/08/16/big/img_1055 943 | 2003/01/16/big/img_521 944 | 2002/07/25/big/img_612 945 | 2002/08/22/big/img_778 946 | 2002/08/03/big/img_251 947 | 2002/08/12/big/img_436 948 | 2002/08/23/big/img_705 949 | 2002/07/28/big/img_243 950 | 2002/07/25/big/img_1029 951 | 2002/08/20/big/img_287 952 | 2002/08/29/big/img_18739 953 | 2002/08/05/big/img_3272 954 | 2002/07/27/big/img_214 955 | 2003/01/14/big/img_5 956 | 2002/08/01/big/img_1380 957 | 2002/08/29/big/img_19097 958 | 2002/07/30/big/img_486 959 | 2002/08/29/big/img_18707 960 | 2002/08/10/big/img_559 961 | 2002/08/15/big/img_365 962 | 2002/08/09/big/img_525 963 | 2002/08/10/big/img_689 964 | 2002/07/25/big/img_502 965 | 2002/08/03/big/img_667 966 | 2002/08/10/big/img_855 967 | 2002/08/10/big/img_706 968 | 2002/08/18/big/img_603 969 | 2003/01/16/big/img_1055 970 | 2002/08/31/big/img_17890 971 | 2002/08/15/big/img_761 972 | 2003/01/15/big/img_489 973 | 2002/08/26/big/img_351 974 | 2002/08/01/big/img_1772 975 | 2002/08/31/big/img_17729 976 | 2002/07/25/big/img_609 977 | 2003/01/13/big/img_539 978 | 2002/07/27/big/img_686 979 | 2002/07/31/big/img_311 980 | 2002/08/22/big/img_799 981 | 2003/01/16/big/img_936 982 | 2002/08/31/big/img_17813 983 | 2002/08/04/big/img_862 984 | 2002/08/09/big/img_332 985 | 2002/07/20/big/img_148 986 | 2002/08/12/big/img_426 987 | 2002/07/24/big/img_69 988 | 2002/07/27/big/img_685 989 | 2002/08/02/big/img_480 990 | 2002/08/26/big/img_154 991 | 2002/07/24/big/img_598 992 | 2002/08/01/big/img_1881 993 | 2002/08/20/big/img_667 994 | 2003/01/14/big/img_495 995 | 2002/07/21/big/img_744 996 | 2002/07/30/big/img_150 997 | 2002/07/23/big/img_924 998 | 2002/08/08/big/img_272 999 | 2002/07/23/big/img_310 1000 | 2002/07/25/big/img_1011 1001 | 2002/09/02/big/img_15725 1002 | 2002/07/19/big/img_814 1003 | 2002/08/20/big/img_936 1004 | 2002/07/25/big/img_85 1005 | 2002/08/24/big/img_662 1006 | 2002/08/09/big/img_495 1007 | 2003/01/15/big/img_196 1008 | 2002/08/16/big/img_707 1009 | 2002/08/28/big/img_19370 1010 | 2002/08/06/big/img_2366 1011 | 2002/08/06/big/img_3012 1012 | 2002/08/01/big/img_1452 1013 | 2002/07/31/big/img_742 1014 | 2002/07/27/big/img_914 1015 | 2003/01/13/big/img_290 1016 | 2002/07/31/big/img_288 1017 | 2002/08/02/big/img_171 1018 | 2002/08/22/big/img_191 1019 | 2002/07/27/big/img_1066 1020 | 2002/08/12/big/img_383 1021 | 2003/01/17/big/img_1018 1022 | 2002/08/01/big/img_1785 1023 | 2002/08/11/big/img_390 1024 | 2002/08/27/big/img_20037 1025 | 2002/08/12/big/img_38 1026 | 2003/01/15/big/img_103 1027 | 2002/08/26/big/img_31 1028 | 2002/08/18/big/img_660 1029 | 2002/07/22/big/img_694 1030 | 2002/08/15/big/img_24 1031 | 2002/07/27/big/img_1077 1032 | 2002/08/01/big/img_1943 1033 | 2002/07/22/big/img_292 1034 | 2002/09/01/big/img_16857 1035 | 2002/07/22/big/img_892 1036 | 2003/01/14/big/img_46 1037 | 2002/08/09/big/img_469 1038 | 2002/08/09/big/img_414 1039 | 2003/01/16/big/img_40 1040 | 2002/08/28/big/img_19231 1041 | 2002/07/27/big/img_978 1042 | 2002/07/23/big/img_475 1043 | 2002/07/25/big/img_92 1044 | 2002/08/09/big/img_799 1045 | 2002/07/25/big/img_491 1046 | 2002/08/03/big/img_654 1047 | 2003/01/15/big/img_687 1048 | 2002/08/11/big/img_478 1049 | 2002/08/07/big/img_1664 1050 | 2002/08/20/big/img_362 1051 | 2002/08/01/big/img_1298 1052 | 2003/01/13/big/img_500 1053 | 2002/08/06/big/img_2896 1054 | 2002/08/30/big/img_18529 1055 | 2002/08/16/big/img_1020 1056 | 2002/07/29/big/img_892 1057 | 2002/08/29/big/img_18726 1058 | 2002/07/21/big/img_453 1059 | 2002/08/17/big/img_437 1060 | 2002/07/19/big/img_665 1061 | 2002/07/22/big/img_440 1062 | 2002/07/19/big/img_582 1063 | 2002/07/21/big/img_233 1064 | 2003/01/01/big/img_82 1065 | 2002/07/25/big/img_341 1066 | 2002/07/29/big/img_864 1067 | 2002/08/02/big/img_276 1068 | 2002/08/29/big/img_18654 1069 | 2002/07/27/big/img_1024 1070 | 2002/08/19/big/img_373 1071 | 2003/01/15/big/img_241 1072 | 2002/07/25/big/img_84 1073 | 2002/08/13/big/img_834 1074 | 2002/08/10/big/img_511 1075 | 2002/08/01/big/img_1627 1076 | 2002/08/08/big/img_607 1077 | 2002/08/06/big/img_2083 1078 | 2002/08/01/big/img_1486 1079 | 2002/08/08/big/img_700 1080 | 2002/08/01/big/img_1954 1081 | 2002/08/21/big/img_54 1082 | 2002/07/30/big/img_847 1083 | 2002/08/28/big/img_19169 1084 | 2002/07/21/big/img_549 1085 | 2002/08/03/big/img_693 1086 | 2002/07/31/big/img_1002 1087 | 2003/01/14/big/img_1035 1088 | 2003/01/16/big/img_622 1089 | 2002/07/30/big/img_1201 1090 | 2002/08/10/big/img_444 1091 | 2002/07/31/big/img_374 1092 | 2002/08/21/big/img_301 1093 | 2002/08/13/big/img_1095 1094 | 2003/01/13/big/img_288 1095 | 2002/07/25/big/img_232 1096 | 2003/01/13/big/img_967 1097 | 2002/08/26/big/img_360 1098 | 2002/08/05/big/img_67 1099 | 2002/08/29/big/img_18969 1100 | 2002/07/28/big/img_16 1101 | 2002/08/16/big/img_515 1102 | 2002/07/20/big/img_708 1103 | 2002/08/18/big/img_178 1104 | 2003/01/15/big/img_509 1105 | 2002/07/25/big/img_430 1106 | 2002/08/21/big/img_738 1107 | 2002/08/16/big/img_886 1108 | 2002/09/02/big/img_15605 1109 | 2002/09/01/big/img_16242 1110 | 2002/08/24/big/img_711 1111 | 2002/07/25/big/img_90 1112 | 2002/08/09/big/img_491 1113 | 2002/07/30/big/img_534 1114 | 2003/01/13/big/img_474 1115 | 2002/08/25/big/img_510 1116 | 2002/08/15/big/img_555 1117 | 2002/08/02/big/img_775 1118 | 2002/07/23/big/img_975 1119 | 2002/08/19/big/img_229 1120 | 2003/01/17/big/img_860 1121 | 2003/01/02/big/img_10 1122 | 2002/07/23/big/img_542 1123 | 2002/08/06/big/img_2535 1124 | 2002/07/22/big/img_37 1125 | 2002/08/06/big/img_2342 1126 | 2002/08/25/big/img_515 1127 | 2002/08/25/big/img_336 1128 | 2002/08/18/big/img_837 1129 | 2002/08/21/big/img_616 1130 | 2003/01/17/big/img_24 1131 | 2002/07/26/big/img_936 1132 | 2002/08/14/big/img_896 1133 | 2002/07/29/big/img_465 1134 | 2002/07/31/big/img_543 1135 | 2002/08/01/big/img_1411 1136 | 2002/08/02/big/img_423 1137 | 2002/08/21/big/img_44 1138 | 2002/07/31/big/img_11 1139 | 2003/01/15/big/img_628 1140 | 2003/01/15/big/img_605 1141 | 2002/07/30/big/img_571 1142 | 2002/07/23/big/img_428 1143 | 2002/08/15/big/img_942 1144 | 2002/07/26/big/img_531 1145 | 2003/01/16/big/img_59 1146 | 2002/08/02/big/img_410 1147 | 2002/07/31/big/img_230 1148 | 2002/08/19/big/img_806 1149 | 2003/01/14/big/img_462 1150 | 2002/08/16/big/img_370 1151 | 2002/08/13/big/img_380 1152 | 2002/08/16/big/img_932 1153 | 2002/07/19/big/img_393 1154 | 2002/08/20/big/img_764 1155 | 2002/08/15/big/img_616 1156 | 2002/07/26/big/img_267 1157 | 2002/07/27/big/img_1069 1158 | 2002/08/14/big/img_1041 1159 | 2003/01/13/big/img_594 1160 | 2002/09/01/big/img_16845 1161 | 2002/08/09/big/img_229 1162 | 2003/01/16/big/img_639 1163 | 2002/08/19/big/img_398 1164 | 2002/08/18/big/img_978 1165 | 2002/08/24/big/img_296 1166 | 2002/07/29/big/img_415 1167 | 2002/07/30/big/img_923 1168 | 2002/08/18/big/img_575 1169 | 2002/08/22/big/img_182 1170 | 2002/07/25/big/img_806 1171 | 2002/07/22/big/img_49 1172 | 2002/07/29/big/img_989 1173 | 2003/01/17/big/img_789 1174 | 2003/01/15/big/img_503 1175 | 2002/09/01/big/img_16062 1176 | 2003/01/17/big/img_794 1177 | 2002/08/15/big/img_564 1178 | 2003/01/15/big/img_222 1179 | 2002/08/01/big/img_1656 1180 | 2003/01/13/big/img_432 1181 | 2002/07/19/big/img_426 1182 | 2002/08/17/big/img_244 1183 | 2002/08/13/big/img_805 1184 | 2002/09/02/big/img_15067 1185 | 2002/08/11/big/img_58 1186 | 2002/08/22/big/img_636 1187 | 2002/07/22/big/img_416 1188 | 2002/08/13/big/img_836 1189 | 2002/08/26/big/img_363 1190 | 2002/07/30/big/img_917 1191 | 2003/01/14/big/img_206 1192 | 2002/08/12/big/img_311 1193 | 2002/08/31/big/img_17623 1194 | 2002/07/29/big/img_661 1195 | 2003/01/13/big/img_417 1196 | 2002/08/02/big/img_463 1197 | 2002/08/02/big/img_669 1198 | 2002/08/26/big/img_670 1199 | 2002/08/02/big/img_375 1200 | 2002/07/19/big/img_209 1201 | 2002/08/08/big/img_115 1202 | 2002/08/21/big/img_399 1203 | 2002/08/20/big/img_911 1204 | 2002/08/07/big/img_1212 1205 | 2002/08/20/big/img_578 1206 | 2002/08/22/big/img_554 1207 | 2002/08/21/big/img_484 1208 | 2002/07/25/big/img_450 1209 | 2002/08/03/big/img_542 1210 | 2002/08/15/big/img_561 1211 | 2002/07/23/big/img_360 1212 | 2002/08/30/big/img_18137 1213 | 2002/07/25/big/img_250 1214 | 2002/08/03/big/img_647 1215 | 2002/08/20/big/img_375 1216 | 2002/08/14/big/img_387 1217 | 2002/09/01/big/img_16990 1218 | 2002/08/28/big/img_19341 1219 | 2003/01/15/big/img_239 1220 | 2002/08/20/big/img_528 1221 | 2002/08/12/big/img_130 1222 | 2002/09/02/big/img_15108 1223 | 2003/01/15/big/img_372 1224 | 2002/08/16/big/img_678 1225 | 2002/08/04/big/img_623 1226 | 2002/07/23/big/img_477 1227 | 2002/08/28/big/img_19590 1228 | 2003/01/17/big/img_978 1229 | 2002/09/01/big/img_16692 1230 | 2002/07/20/big/img_109 1231 | 2002/08/06/big/img_2660 1232 | 2003/01/14/big/img_464 1233 | 2002/08/09/big/img_618 1234 | 2002/07/22/big/img_722 1235 | 2002/08/25/big/img_419 1236 | 2002/08/03/big/img_314 1237 | 2002/08/25/big/img_40 1238 | 2002/07/27/big/img_430 1239 | 2002/08/10/big/img_569 1240 | 2002/08/23/big/img_398 1241 | 2002/07/23/big/img_893 1242 | 2002/08/16/big/img_261 1243 | 2002/08/06/big/img_2668 1244 | 2002/07/22/big/img_835 1245 | 2002/09/02/big/img_15093 1246 | 2003/01/16/big/img_65 1247 | 2002/08/21/big/img_448 1248 | 2003/01/14/big/img_351 1249 | 2003/01/17/big/img_133 1250 | 2002/07/28/big/img_493 1251 | 2003/01/15/big/img_640 1252 | 2002/09/01/big/img_16880 1253 | 2002/08/15/big/img_350 1254 | 2002/08/20/big/img_624 1255 | 2002/08/25/big/img_604 1256 | 2002/08/06/big/img_2200 1257 | 2002/08/23/big/img_290 1258 | 2002/08/13/big/img_1152 1259 | 2003/01/14/big/img_251 1260 | 2002/08/02/big/img_538 1261 | 2002/08/22/big/img_613 1262 | 2003/01/13/big/img_351 1263 | 2002/08/18/big/img_368 1264 | 2002/07/23/big/img_392 1265 | 2002/07/25/big/img_198 1266 | 2002/07/25/big/img_418 1267 | 2002/08/26/big/img_614 1268 | 2002/07/23/big/img_405 1269 | 2003/01/14/big/img_445 1270 | 2002/07/25/big/img_326 1271 | 2002/08/10/big/img_734 1272 | 2003/01/14/big/img_530 1273 | 2002/08/08/big/img_561 1274 | 2002/08/29/big/img_18990 1275 | 2002/08/10/big/img_576 1276 | 2002/07/29/big/img_1494 1277 | 2002/07/19/big/img_198 1278 | 2002/08/10/big/img_562 1279 | 2002/07/22/big/img_901 1280 | 2003/01/14/big/img_37 1281 | 2002/09/02/big/img_15629 1282 | 2003/01/14/big/img_58 1283 | 2002/08/01/big/img_1364 1284 | 2002/07/27/big/img_636 1285 | 2003/01/13/big/img_241 1286 | 2002/09/01/big/img_16988 1287 | 2003/01/13/big/img_560 1288 | 2002/08/09/big/img_533 1289 | 2002/07/31/big/img_249 1290 | 2003/01/17/big/img_1007 1291 | 2002/07/21/big/img_64 1292 | 2003/01/13/big/img_537 1293 | 2003/01/15/big/img_606 1294 | 2002/08/18/big/img_651 1295 | 2002/08/24/big/img_405 1296 | 2002/07/26/big/img_837 1297 | 2002/08/09/big/img_562 1298 | 2002/08/01/big/img_1983 1299 | 2002/08/03/big/img_514 1300 | 2002/07/29/big/img_314 1301 | 2002/08/12/big/img_493 1302 | 2003/01/14/big/img_121 1303 | 2003/01/14/big/img_479 1304 | 2002/08/04/big/img_410 1305 | 2002/07/22/big/img_607 1306 | 2003/01/17/big/img_417 1307 | 2002/07/20/big/img_547 1308 | 2002/08/13/big/img_396 1309 | 2002/08/31/big/img_17538 1310 | 2002/08/13/big/img_187 1311 | 2002/08/12/big/img_328 1312 | 2003/01/14/big/img_569 1313 | 2002/07/27/big/img_1081 1314 | 2002/08/14/big/img_504 1315 | 2002/08/23/big/img_785 1316 | 2002/07/26/big/img_339 1317 | 2002/08/07/big/img_1156 1318 | 2002/08/07/big/img_1456 1319 | 2002/08/23/big/img_378 1320 | 2002/08/27/big/img_19719 1321 | 2002/07/31/big/img_39 1322 | 2002/07/31/big/img_883 1323 | 2003/01/14/big/img_676 1324 | 2002/07/29/big/img_214 1325 | 2002/07/26/big/img_669 1326 | 2002/07/25/big/img_202 1327 | 2002/08/08/big/img_259 1328 | 2003/01/17/big/img_943 1329 | 2003/01/15/big/img_512 1330 | 2002/08/05/big/img_3295 1331 | 2002/08/27/big/img_19685 1332 | 2002/08/08/big/img_277 1333 | 2002/08/30/big/img_18154 1334 | 2002/07/22/big/img_663 1335 | 2002/08/29/big/img_18914 1336 | 2002/07/31/big/img_908 1337 | 2002/08/27/big/img_19926 1338 | 2003/01/13/big/img_791 1339 | 2003/01/15/big/img_827 1340 | 2002/08/18/big/img_878 1341 | 2002/08/14/big/img_670 1342 | 2002/07/20/big/img_182 1343 | 2002/08/15/big/img_291 1344 | 2002/08/06/big/img_2600 1345 | 2002/07/23/big/img_587 1346 | 2002/08/14/big/img_577 1347 | 2003/01/15/big/img_585 1348 | 2002/07/30/big/img_310 1349 | 2002/08/03/big/img_658 1350 | 2002/08/10/big/img_157 1351 | 2002/08/19/big/img_811 1352 | 2002/07/29/big/img_1318 1353 | 2002/08/04/big/img_104 1354 | 2002/07/30/big/img_332 1355 | 2002/07/24/big/img_789 1356 | 2002/07/29/big/img_516 1357 | 2002/07/23/big/img_843 1358 | 2002/08/01/big/img_1528 1359 | 2002/08/13/big/img_798 1360 | 2002/08/07/big/img_1729 1361 | 2002/08/28/big/img_19448 1362 | 2003/01/16/big/img_95 1363 | 2002/08/12/big/img_473 1364 | 2002/07/27/big/img_269 1365 | 2003/01/16/big/img_621 1366 | 2002/07/29/big/img_772 1367 | 2002/07/24/big/img_171 1368 | 2002/07/19/big/img_429 1369 | 2002/08/07/big/img_1933 1370 | 2002/08/27/big/img_19629 1371 | 2002/08/05/big/img_3688 1372 | 2002/08/07/big/img_1691 1373 | 2002/07/23/big/img_600 1374 | 2002/07/29/big/img_666 1375 | 2002/08/25/big/img_566 1376 | 2002/08/06/big/img_2659 1377 | 2002/08/29/big/img_18929 1378 | 2002/08/16/big/img_407 1379 | 2002/08/18/big/img_774 1380 | 2002/08/19/big/img_249 1381 | 2002/08/06/big/img_2427 1382 | 2002/08/29/big/img_18899 1383 | 2002/08/01/big/img_1818 1384 | 2002/07/31/big/img_108 1385 | 2002/07/29/big/img_500 1386 | 2002/08/11/big/img_115 1387 | 2002/07/19/big/img_521 1388 | 2002/08/02/big/img_1163 1389 | 2002/07/22/big/img_62 1390 | 2002/08/13/big/img_466 1391 | 2002/08/21/big/img_956 1392 | 2002/08/23/big/img_602 1393 | 2002/08/20/big/img_858 1394 | 2002/07/25/big/img_690 1395 | 2002/07/19/big/img_130 1396 | 2002/08/04/big/img_874 1397 | 2002/07/26/big/img_489 1398 | 2002/07/22/big/img_548 1399 | 2002/08/10/big/img_191 1400 | 2002/07/25/big/img_1051 1401 | 2002/08/18/big/img_473 1402 | 2002/08/12/big/img_755 1403 | 2002/08/18/big/img_413 1404 | 2002/08/08/big/img_1044 1405 | 2002/08/17/big/img_680 1406 | 2002/08/26/big/img_235 1407 | 2002/08/20/big/img_330 1408 | 2002/08/22/big/img_344 1409 | 2002/08/09/big/img_593 1410 | 2002/07/31/big/img_1006 1411 | 2002/08/14/big/img_337 1412 | 2002/08/16/big/img_728 1413 | 2002/07/24/big/img_834 1414 | 2002/08/04/big/img_552 1415 | 2002/09/02/big/img_15213 1416 | 2002/07/25/big/img_725 1417 | 2002/08/30/big/img_18290 1418 | 2003/01/01/big/img_475 1419 | 2002/07/27/big/img_1083 1420 | 2002/08/29/big/img_18955 1421 | 2002/08/31/big/img_17232 1422 | 2002/08/08/big/img_480 1423 | 2002/08/01/big/img_1311 1424 | 2002/07/30/big/img_745 1425 | 2002/08/03/big/img_649 1426 | 2002/08/12/big/img_193 1427 | 2002/07/29/big/img_228 1428 | 2002/07/25/big/img_836 1429 | 2002/08/20/big/img_400 1430 | 2002/07/30/big/img_507 1431 | 2002/09/02/big/img_15072 1432 | 2002/07/26/big/img_658 1433 | 2002/07/28/big/img_503 1434 | 2002/08/05/big/img_3814 1435 | 2002/08/24/big/img_745 1436 | 2003/01/13/big/img_817 1437 | 2002/08/08/big/img_579 1438 | 2002/07/22/big/img_251 1439 | 2003/01/13/big/img_689 1440 | 2002/07/25/big/img_407 1441 | 2002/08/13/big/img_1050 1442 | 2002/08/14/big/img_733 1443 | 2002/07/24/big/img_82 1444 | 2003/01/17/big/img_288 1445 | 2003/01/15/big/img_475 1446 | 2002/08/14/big/img_620 1447 | 2002/08/21/big/img_167 1448 | 2002/07/19/big/img_300 1449 | 2002/07/26/big/img_219 1450 | 2002/08/01/big/img_1468 1451 | 2002/07/23/big/img_260 1452 | 2002/08/09/big/img_555 1453 | 2002/07/19/big/img_160 1454 | 2002/08/02/big/img_1060 1455 | 2003/01/14/big/img_149 1456 | 2002/08/15/big/img_346 1457 | 2002/08/24/big/img_597 1458 | 2002/08/22/big/img_502 1459 | 2002/08/30/big/img_18228 1460 | 2002/07/21/big/img_766 1461 | 2003/01/15/big/img_841 1462 | 2002/07/24/big/img_516 1463 | 2002/08/02/big/img_265 1464 | 2002/08/15/big/img_1243 1465 | 2003/01/15/big/img_223 1466 | 2002/08/04/big/img_236 1467 | 2002/07/22/big/img_309 1468 | 2002/07/20/big/img_656 1469 | 2002/07/31/big/img_412 1470 | 2002/09/01/big/img_16462 1471 | 2003/01/16/big/img_431 1472 | 2002/07/22/big/img_793 1473 | 2002/08/15/big/img_877 1474 | 2002/07/26/big/img_282 1475 | 2002/07/25/big/img_529 1476 | 2002/08/24/big/img_613 1477 | 2003/01/17/big/img_700 1478 | 2002/08/06/big/img_2526 1479 | 2002/08/24/big/img_394 1480 | 2002/08/21/big/img_521 1481 | 2002/08/25/big/img_560 1482 | 2002/07/29/big/img_966 1483 | 2002/07/25/big/img_448 1484 | 2003/01/13/big/img_782 1485 | 2002/08/21/big/img_296 1486 | 2002/09/01/big/img_16755 1487 | 2002/08/05/big/img_3552 1488 | 2002/09/02/big/img_15823 1489 | 2003/01/14/big/img_193 1490 | 2002/07/21/big/img_159 1491 | 2002/08/02/big/img_564 1492 | 2002/08/16/big/img_300 1493 | 2002/07/19/big/img_269 1494 | 2002/08/13/big/img_676 1495 | 2002/07/28/big/img_57 1496 | 2002/08/05/big/img_3318 1497 | 2002/07/31/big/img_218 1498 | 2002/08/21/big/img_898 1499 | 2002/07/29/big/img_109 1500 | 2002/07/19/big/img_854 1501 | 2002/08/23/big/img_311 1502 | 2002/08/14/big/img_318 1503 | 2002/07/25/big/img_523 1504 | 2002/07/21/big/img_678 1505 | 2003/01/17/big/img_690 1506 | 2002/08/28/big/img_19503 1507 | 2002/08/18/big/img_251 1508 | 2002/08/22/big/img_672 1509 | 2002/08/20/big/img_663 1510 | 2002/08/02/big/img_148 1511 | 2002/09/02/big/img_15580 1512 | 2002/07/25/big/img_778 1513 | 2002/08/14/big/img_565 1514 | 2002/08/12/big/img_374 1515 | 2002/08/13/big/img_1018 1516 | 2002/08/20/big/img_474 1517 | 2002/08/25/big/img_33 1518 | 2002/08/02/big/img_1190 1519 | 2002/08/08/big/img_864 1520 | 2002/08/14/big/img_1071 1521 | 2002/08/30/big/img_18103 1522 | 2002/08/18/big/img_533 1523 | 2003/01/16/big/img_650 1524 | 2002/07/25/big/img_108 1525 | 2002/07/26/big/img_81 1526 | 2002/07/27/big/img_543 1527 | 2002/07/29/big/img_521 1528 | 2003/01/13/big/img_434 1529 | 2002/08/26/big/img_674 1530 | 2002/08/06/big/img_2932 1531 | 2002/08/07/big/img_1262 1532 | 2003/01/15/big/img_201 1533 | 2003/01/16/big/img_673 1534 | 2002/09/02/big/img_15988 1535 | 2002/07/29/big/img_1306 1536 | 2003/01/14/big/img_1072 1537 | 2002/08/30/big/img_18232 1538 | 2002/08/05/big/img_3711 1539 | 2002/07/23/big/img_775 1540 | 2002/08/01/big/img_16 1541 | 2003/01/16/big/img_630 1542 | 2002/08/22/big/img_695 1543 | 2002/08/14/big/img_51 1544 | 2002/08/14/big/img_782 1545 | 2002/08/24/big/img_742 1546 | 2003/01/14/big/img_512 1547 | 2003/01/15/big/img_1183 1548 | 2003/01/15/big/img_714 1549 | 2002/08/01/big/img_2078 1550 | 2002/07/31/big/img_682 1551 | 2002/09/02/big/img_15687 1552 | 2002/07/26/big/img_518 1553 | 2002/08/27/big/img_19676 1554 | 2002/09/02/big/img_15969 1555 | 2002/08/02/big/img_931 1556 | 2002/08/25/big/img_508 1557 | 2002/08/29/big/img_18616 1558 | 2002/07/22/big/img_839 1559 | 2002/07/28/big/img_313 1560 | 2003/01/14/big/img_155 1561 | 2002/08/02/big/img_1105 1562 | 2002/08/09/big/img_53 1563 | 2002/08/16/big/img_469 1564 | 2002/08/15/big/img_502 1565 | 2002/08/20/big/img_575 1566 | 2002/07/25/big/img_138 1567 | 2003/01/16/big/img_579 1568 | 2002/07/19/big/img_352 1569 | 2003/01/14/big/img_762 1570 | 2003/01/01/big/img_588 1571 | 2002/08/02/big/img_981 1572 | 2002/08/21/big/img_447 1573 | 2002/09/01/big/img_16151 1574 | 2003/01/14/big/img_769 1575 | 2002/08/23/big/img_461 1576 | 2002/08/17/big/img_240 1577 | 2002/09/02/big/img_15220 1578 | 2002/07/19/big/img_408 1579 | 2002/09/02/big/img_15496 1580 | 2002/07/29/big/img_758 1581 | 2002/08/28/big/img_19392 1582 | 2002/08/06/big/img_2723 1583 | 2002/08/31/big/img_17752 1584 | 2002/08/23/big/img_469 1585 | 2002/08/13/big/img_515 1586 | 2002/09/02/big/img_15551 1587 | 2002/08/03/big/img_462 1588 | 2002/07/24/big/img_613 1589 | 2002/07/22/big/img_61 1590 | 2002/08/08/big/img_171 1591 | 2002/08/21/big/img_177 1592 | 2003/01/14/big/img_105 1593 | 2002/08/02/big/img_1017 1594 | 2002/08/22/big/img_106 1595 | 2002/07/27/big/img_542 1596 | 2002/07/21/big/img_665 1597 | 2002/07/23/big/img_595 1598 | 2002/08/04/big/img_657 1599 | 2002/08/29/big/img_19002 1600 | 2003/01/15/big/img_550 1601 | 2002/08/14/big/img_662 1602 | 2002/07/20/big/img_425 1603 | 2002/08/30/big/img_18528 1604 | 2002/07/26/big/img_611 1605 | 2002/07/22/big/img_849 1606 | 2002/08/07/big/img_1655 1607 | 2002/08/21/big/img_638 1608 | 2003/01/17/big/img_732 1609 | 2003/01/01/big/img_496 1610 | 2002/08/18/big/img_713 1611 | 2002/08/08/big/img_109 1612 | 2002/07/27/big/img_1008 1613 | 2002/07/20/big/img_559 1614 | 2002/08/16/big/img_699 1615 | 2002/08/31/big/img_17702 1616 | 2002/07/31/big/img_1013 1617 | 2002/08/01/big/img_2027 1618 | 2002/08/02/big/img_1001 1619 | 2002/08/03/big/img_210 1620 | 2002/08/01/big/img_2087 1621 | 2003/01/14/big/img_199 1622 | 2002/07/29/big/img_48 1623 | 2002/07/19/big/img_727 1624 | 2002/08/09/big/img_249 1625 | 2002/08/04/big/img_632 1626 | 2002/08/22/big/img_620 1627 | 2003/01/01/big/img_457 1628 | 2002/08/05/big/img_3223 1629 | 2002/07/27/big/img_240 1630 | 2002/07/25/big/img_797 1631 | 2002/08/13/big/img_430 1632 | 2002/07/25/big/img_615 1633 | 2002/08/12/big/img_28 1634 | 2002/07/30/big/img_220 1635 | 2002/07/24/big/img_89 1636 | 2002/08/21/big/img_357 1637 | 2002/08/09/big/img_590 1638 | 2003/01/13/big/img_525 1639 | 2002/08/17/big/img_818 1640 | 2003/01/02/big/img_7 1641 | 2002/07/26/big/img_636 1642 | 2003/01/13/big/img_1122 1643 | 2002/07/23/big/img_810 1644 | 2002/08/20/big/img_888 1645 | 2002/07/27/big/img_3 1646 | 2002/08/15/big/img_451 1647 | 2002/09/02/big/img_15787 1648 | 2002/07/31/big/img_281 1649 | 2002/08/05/big/img_3274 1650 | 2002/08/07/big/img_1254 1651 | 2002/07/31/big/img_27 1652 | 2002/08/01/big/img_1366 1653 | 2002/07/30/big/img_182 1654 | 2002/08/27/big/img_19690 1655 | 2002/07/29/big/img_68 1656 | 2002/08/23/big/img_754 1657 | 2002/07/30/big/img_540 1658 | 2002/08/27/big/img_20063 1659 | 2002/08/14/big/img_471 1660 | 2002/08/02/big/img_615 1661 | 2002/07/30/big/img_186 1662 | 2002/08/25/big/img_150 1663 | 2002/07/27/big/img_626 1664 | 2002/07/20/big/img_225 1665 | 2003/01/15/big/img_1252 1666 | 2002/07/19/big/img_367 1667 | 2003/01/15/big/img_582 1668 | 2002/08/09/big/img_572 1669 | 2002/08/08/big/img_428 1670 | 2003/01/15/big/img_639 1671 | 2002/08/28/big/img_19245 1672 | 2002/07/24/big/img_321 1673 | 2002/08/02/big/img_662 1674 | 2002/08/08/big/img_1033 1675 | 2003/01/17/big/img_867 1676 | 2002/07/22/big/img_652 1677 | 2003/01/14/big/img_224 1678 | 2002/08/18/big/img_49 1679 | 2002/07/26/big/img_46 1680 | 2002/08/31/big/img_18021 1681 | 2002/07/25/big/img_151 1682 | 2002/08/23/big/img_540 1683 | 2002/08/25/big/img_693 1684 | 2002/07/23/big/img_340 1685 | 2002/07/28/big/img_117 1686 | 2002/09/02/big/img_15768 1687 | 2002/08/26/big/img_562 1688 | 2002/07/24/big/img_480 1689 | 2003/01/15/big/img_341 1690 | 2002/08/10/big/img_783 1691 | 2002/08/20/big/img_132 1692 | 2003/01/14/big/img_370 1693 | 2002/07/20/big/img_720 1694 | 2002/08/03/big/img_144 1695 | 2002/08/20/big/img_538 1696 | 2002/08/01/big/img_1745 1697 | 2002/08/11/big/img_683 1698 | 2002/08/03/big/img_328 1699 | 2002/08/10/big/img_793 1700 | 2002/08/14/big/img_689 1701 | 2002/08/02/big/img_162 1702 | 2003/01/17/big/img_411 1703 | 2002/07/31/big/img_361 1704 | 2002/08/15/big/img_289 1705 | 2002/08/08/big/img_254 1706 | 2002/08/15/big/img_996 1707 | 2002/08/20/big/img_785 1708 | 2002/07/24/big/img_511 1709 | 2002/08/06/big/img_2614 1710 | 2002/08/29/big/img_18733 1711 | 2002/08/17/big/img_78 1712 | 2002/07/30/big/img_378 1713 | 2002/08/31/big/img_17947 1714 | 2002/08/26/big/img_88 1715 | 2002/07/30/big/img_558 1716 | 2002/08/02/big/img_67 1717 | 2003/01/14/big/img_325 1718 | 2002/07/29/big/img_1357 1719 | 2002/07/19/big/img_391 1720 | 2002/07/30/big/img_307 1721 | 2003/01/13/big/img_219 1722 | 2002/07/24/big/img_807 1723 | 2002/08/23/big/img_543 1724 | 2002/08/29/big/img_18620 1725 | 2002/07/22/big/img_769 1726 | 2002/08/26/big/img_503 1727 | 2002/07/30/big/img_78 1728 | 2002/08/14/big/img_1036 1729 | 2002/08/09/big/img_58 1730 | 2002/07/24/big/img_616 1731 | 2002/08/02/big/img_464 1732 | 2002/07/26/big/img_576 1733 | 2002/07/22/big/img_273 1734 | 2003/01/16/big/img_470 1735 | 2002/07/29/big/img_329 1736 | 2002/07/30/big/img_1086 1737 | 2002/07/31/big/img_353 1738 | 2002/09/02/big/img_15275 1739 | 2003/01/17/big/img_555 1740 | 2002/08/26/big/img_212 1741 | 2002/08/01/big/img_1692 1742 | 2003/01/15/big/img_600 1743 | 2002/07/29/big/img_825 1744 | 2002/08/08/big/img_68 1745 | 2002/08/10/big/img_719 1746 | 2002/07/31/big/img_636 1747 | 2002/07/29/big/img_325 1748 | 2002/07/21/big/img_515 1749 | 2002/07/22/big/img_705 1750 | 2003/01/13/big/img_818 1751 | 2002/08/09/big/img_486 1752 | 2002/08/22/big/img_141 1753 | 2002/07/22/big/img_303 1754 | 2002/08/09/big/img_393 1755 | 2002/07/29/big/img_963 1756 | 2002/08/02/big/img_1215 1757 | 2002/08/19/big/img_674 1758 | 2002/08/12/big/img_690 1759 | 2002/08/21/big/img_637 1760 | 2002/08/21/big/img_841 1761 | 2002/08/24/big/img_71 1762 | 2002/07/25/big/img_596 1763 | 2002/07/24/big/img_864 1764 | 2002/08/18/big/img_293 1765 | 2003/01/14/big/img_657 1766 | 2002/08/15/big/img_411 1767 | 2002/08/16/big/img_348 1768 | 2002/08/05/big/img_3157 1769 | 2002/07/20/big/img_663 1770 | 2003/01/13/big/img_654 1771 | 2003/01/16/big/img_433 1772 | 2002/08/30/big/img_18200 1773 | 2002/08/12/big/img_226 1774 | 2003/01/16/big/img_491 1775 | 2002/08/08/big/img_666 1776 | 2002/07/19/big/img_576 1777 | 2003/01/15/big/img_776 1778 | 2003/01/16/big/img_899 1779 | 2002/07/19/big/img_397 1780 | 2002/08/14/big/img_44 1781 | 2003/01/15/big/img_762 1782 | 2002/08/02/big/img_982 1783 | 2002/09/02/big/img_15234 1784 | 2002/08/17/big/img_556 1785 | 2002/08/21/big/img_410 1786 | 2002/08/21/big/img_386 1787 | 2002/07/19/big/img_690 1788 | 2002/08/05/big/img_3052 1789 | 2002/08/14/big/img_219 1790 | 2002/08/16/big/img_273 1791 | 2003/01/15/big/img_752 1792 | 2002/08/08/big/img_184 1793 | 2002/07/31/big/img_743 1794 | 2002/08/23/big/img_338 1795 | 2003/01/14/big/img_1055 1796 | 2002/08/05/big/img_3405 1797 | 2003/01/15/big/img_17 1798 | 2002/08/03/big/img_141 1799 | 2002/08/14/big/img_549 1800 | 2002/07/27/big/img_1034 1801 | 2002/07/31/big/img_932 1802 | 2002/08/30/big/img_18487 1803 | 2002/09/02/big/img_15814 1804 | 2002/08/01/big/img_2086 1805 | 2002/09/01/big/img_16535 1806 | 2002/07/22/big/img_500 1807 | 2003/01/13/big/img_400 1808 | 2002/08/25/big/img_607 1809 | 2002/08/30/big/img_18384 1810 | 2003/01/14/big/img_951 1811 | 2002/08/13/big/img_1150 1812 | 2002/08/08/big/img_1022 1813 | 2002/08/10/big/img_428 1814 | 2002/08/28/big/img_19242 1815 | 2002/08/05/big/img_3098 1816 | 2002/07/23/big/img_400 1817 | 2002/08/26/big/img_365 1818 | 2002/07/20/big/img_318 1819 | 2002/08/13/big/img_740 1820 | 2003/01/16/big/img_37 1821 | 2002/08/26/big/img_274 1822 | 2002/08/02/big/img_205 1823 | 2002/08/21/big/img_695 1824 | 2002/08/06/big/img_2289 1825 | 2002/08/20/big/img_794 1826 | 2002/08/18/big/img_438 1827 | 2002/08/07/big/img_1380 1828 | 2002/08/02/big/img_737 1829 | 2002/08/07/big/img_1651 1830 | 2002/08/15/big/img_1238 1831 | 2002/08/01/big/img_1681 1832 | 2002/08/06/big/img_3017 1833 | 2002/07/23/big/img_706 1834 | 2002/07/31/big/img_392 1835 | 2002/08/09/big/img_539 1836 | 2002/07/29/big/img_835 1837 | 2002/08/26/big/img_723 1838 | 2002/08/28/big/img_19235 1839 | 2003/01/16/big/img_353 1840 | 2002/08/10/big/img_150 1841 | 2002/08/29/big/img_19025 1842 | 2002/08/21/big/img_310 1843 | 2002/08/10/big/img_823 1844 | 2002/07/26/big/img_981 1845 | 2002/08/11/big/img_288 1846 | 2002/08/19/big/img_534 1847 | 2002/08/21/big/img_300 1848 | 2002/07/31/big/img_49 1849 | 2002/07/30/big/img_469 1850 | 2002/08/28/big/img_19197 1851 | 2002/08/25/big/img_205 1852 | 2002/08/10/big/img_390 1853 | 2002/08/23/big/img_291 1854 | 2002/08/26/big/img_230 1855 | 2002/08/18/big/img_76 1856 | 2002/07/23/big/img_409 1857 | 2002/08/14/big/img_1053 1858 | 2003/01/14/big/img_291 1859 | 2002/08/10/big/img_503 1860 | 2002/08/27/big/img_19928 1861 | 2002/08/03/big/img_563 1862 | 2002/08/17/big/img_250 1863 | 2002/08/06/big/img_2381 1864 | 2002/08/17/big/img_948 1865 | 2002/08/06/big/img_2710 1866 | 2002/07/22/big/img_696 1867 | 2002/07/31/big/img_670 1868 | 2002/08/12/big/img_594 1869 | 2002/07/29/big/img_624 1870 | 2003/01/17/big/img_934 1871 | 2002/08/03/big/img_584 1872 | 2002/08/22/big/img_1003 1873 | 2002/08/05/big/img_3396 1874 | 2003/01/13/big/img_570 1875 | 2002/08/02/big/img_219 1876 | 2002/09/02/big/img_15774 1877 | 2002/08/16/big/img_818 1878 | 2002/08/23/big/img_402 1879 | 2003/01/14/big/img_552 1880 | 2002/07/29/big/img_71 1881 | 2002/08/05/big/img_3592 1882 | 2002/08/16/big/img_80 1883 | 2002/07/27/big/img_672 1884 | 2003/01/13/big/img_470 1885 | 2003/01/16/big/img_702 1886 | 2002/09/01/big/img_16130 1887 | 2002/08/08/big/img_240 1888 | 2002/09/01/big/img_16338 1889 | 2002/07/26/big/img_312 1890 | 2003/01/14/big/img_538 1891 | 2002/07/20/big/img_695 1892 | 2002/08/30/big/img_18098 1893 | 2002/08/25/big/img_259 1894 | 2002/08/16/big/img_1042 1895 | 2002/08/09/big/img_837 1896 | 2002/08/31/big/img_17760 1897 | 2002/07/31/big/img_14 1898 | 2002/08/09/big/img_361 1899 | 2003/01/16/big/img_107 1900 | 2002/08/14/big/img_124 1901 | 2002/07/19/big/img_463 1902 | 2003/01/15/big/img_275 1903 | 2002/07/25/big/img_1151 1904 | 2002/07/29/big/img_1501 1905 | 2002/08/27/big/img_19889 1906 | 2002/08/29/big/img_18603 1907 | 2003/01/17/big/img_601 1908 | 2002/08/25/big/img_355 1909 | 2002/08/08/big/img_297 1910 | 2002/08/20/big/img_290 1911 | 2002/07/31/big/img_195 1912 | 2003/01/01/big/img_336 1913 | 2002/08/18/big/img_369 1914 | 2002/07/25/big/img_621 1915 | 2002/08/11/big/img_508 1916 | 2003/01/14/big/img_458 1917 | 2003/01/15/big/img_795 1918 | 2002/08/12/big/img_498 1919 | 2002/08/01/big/img_1734 1920 | 2002/08/02/big/img_246 1921 | 2002/08/16/big/img_565 1922 | 2002/08/11/big/img_475 1923 | 2002/08/22/big/img_408 1924 | 2002/07/28/big/img_78 1925 | 2002/07/21/big/img_81 1926 | 2003/01/14/big/img_697 1927 | 2002/08/14/big/img_661 1928 | 2002/08/15/big/img_507 1929 | 2002/08/19/big/img_55 1930 | 2002/07/22/big/img_152 1931 | 2003/01/14/big/img_470 1932 | 2002/08/03/big/img_379 1933 | 2002/08/22/big/img_506 1934 | 2003/01/16/big/img_966 1935 | 2002/08/18/big/img_698 1936 | 2002/08/24/big/img_528 1937 | 2002/08/23/big/img_10 1938 | 2002/08/01/big/img_1655 1939 | 2002/08/22/big/img_953 1940 | 2002/07/19/big/img_630 1941 | 2002/07/22/big/img_889 1942 | 2002/08/16/big/img_351 1943 | 2003/01/16/big/img_83 1944 | 2002/07/19/big/img_805 1945 | 2002/08/14/big/img_704 1946 | 2002/07/19/big/img_389 1947 | 2002/08/31/big/img_17765 1948 | 2002/07/29/big/img_606 1949 | 2003/01/17/big/img_939 1950 | 2002/09/02/big/img_15081 1951 | 2002/08/21/big/img_181 1952 | 2002/07/29/big/img_1321 1953 | 2002/07/21/big/img_497 1954 | 2002/07/20/big/img_539 1955 | 2002/08/24/big/img_119 1956 | 2002/08/01/big/img_1281 1957 | 2002/07/26/big/img_207 1958 | 2002/07/26/big/img_432 1959 | 2002/07/27/big/img_1006 1960 | 2002/08/05/big/img_3087 1961 | 2002/08/14/big/img_252 1962 | 2002/08/14/big/img_798 1963 | 2002/07/24/big/img_538 1964 | 2002/09/02/big/img_15507 1965 | 2002/08/08/big/img_901 1966 | 2003/01/14/big/img_557 1967 | 2002/08/07/big/img_1819 1968 | 2002/08/04/big/img_470 1969 | 2002/08/01/big/img_1504 1970 | 2002/08/16/big/img_1070 1971 | 2002/08/16/big/img_372 1972 | 2002/08/23/big/img_416 1973 | 2002/08/30/big/img_18208 1974 | 2002/08/01/big/img_2043 1975 | 2002/07/22/big/img_385 1976 | 2002/08/22/big/img_466 1977 | 2002/08/21/big/img_869 1978 | 2002/08/28/big/img_19429 1979 | 2002/08/02/big/img_770 1980 | 2002/07/23/big/img_433 1981 | 2003/01/14/big/img_13 1982 | 2002/07/27/big/img_953 1983 | 2002/09/02/big/img_15728 1984 | 2002/08/01/big/img_1361 1985 | 2002/08/29/big/img_18897 1986 | 2002/08/26/big/img_534 1987 | 2002/08/11/big/img_121 1988 | 2002/08/26/big/img_20130 1989 | 2002/07/31/big/img_363 1990 | 2002/08/13/big/img_978 1991 | 2002/07/25/big/img_835 1992 | 2002/08/02/big/img_906 1993 | 2003/01/14/big/img_548 1994 | 2002/07/30/big/img_80 1995 | 2002/07/26/big/img_982 1996 | 2003/01/16/big/img_99 1997 | 2002/08/19/big/img_362 1998 | 2002/08/24/big/img_376 1999 | 2002/08/07/big/img_1264 2000 | 2002/07/27/big/img_938 2001 | 2003/01/17/big/img_535 2002 | 2002/07/26/big/img_457 2003 | 2002/08/08/big/img_848 2004 | 2003/01/15/big/img_859 2005 | 2003/01/15/big/img_622 2006 | 2002/07/30/big/img_403 2007 | 2002/07/29/big/img_217 2008 | 2002/07/26/big/img_891 2009 | 2002/07/24/big/img_70 2010 | 2002/08/25/big/img_619 2011 | 2002/08/05/big/img_3375 2012 | 2002/08/01/big/img_2160 2013 | 2002/08/06/big/img_2227 2014 | 2003/01/14/big/img_117 2015 | 2002/08/14/big/img_227 2016 | 2002/08/13/big/img_565 2017 | 2002/08/19/big/img_625 2018 | 2002/08/03/big/img_812 2019 | 2002/07/24/big/img_41 2020 | 2002/08/16/big/img_235 2021 | 2002/07/29/big/img_759 2022 | 2002/07/21/big/img_433 2023 | 2002/07/29/big/img_190 2024 | 2003/01/16/big/img_435 2025 | 2003/01/13/big/img_708 2026 | 2002/07/30/big/img_57 2027 | 2002/08/22/big/img_162 2028 | 2003/01/01/big/img_558 2029 | 2003/01/15/big/img_604 2030 | 2002/08/16/big/img_935 2031 | 2002/08/20/big/img_394 2032 | 2002/07/28/big/img_465 2033 | 2002/09/02/big/img_15534 2034 | 2002/08/16/big/img_87 2035 | 2002/07/22/big/img_469 2036 | 2002/08/12/big/img_245 2037 | 2003/01/13/big/img_236 2038 | 2002/08/06/big/img_2736 2039 | 2002/08/03/big/img_348 2040 | 2003/01/14/big/img_218 2041 | 2002/07/26/big/img_232 2042 | 2003/01/15/big/img_244 2043 | 2002/07/25/big/img_1121 2044 | 2002/08/01/big/img_1484 2045 | 2002/07/26/big/img_541 2046 | 2002/08/07/big/img_1244 2047 | 2002/07/31/big/img_3 2048 | 2002/08/30/big/img_18437 2049 | 2002/08/29/big/img_19094 2050 | 2002/08/01/big/img_1355 2051 | 2002/08/19/big/img_338 2052 | 2002/07/19/big/img_255 2053 | 2002/07/21/big/img_76 2054 | 2002/08/25/big/img_199 2055 | 2002/08/12/big/img_740 2056 | 2002/07/30/big/img_852 2057 | 2002/08/15/big/img_599 2058 | 2002/08/23/big/img_254 2059 | 2002/08/19/big/img_125 2060 | 2002/07/24/big/img_2 2061 | 2002/08/04/big/img_145 2062 | 2002/08/05/big/img_3137 2063 | 2002/07/28/big/img_463 2064 | 2003/01/14/big/img_801 2065 | 2002/07/23/big/img_366 2066 | 2002/08/26/big/img_600 2067 | 2002/08/26/big/img_649 2068 | 2002/09/02/big/img_15849 2069 | 2002/07/26/big/img_248 2070 | 2003/01/13/big/img_200 2071 | 2002/08/07/big/img_1794 2072 | 2002/08/31/big/img_17270 2073 | 2002/08/23/big/img_608 2074 | 2003/01/13/big/img_837 2075 | 2002/08/23/big/img_581 2076 | 2002/08/20/big/img_754 2077 | 2002/08/18/big/img_183 2078 | 2002/08/20/big/img_328 2079 | 2002/07/22/big/img_494 2080 | 2002/07/29/big/img_399 2081 | 2002/08/28/big/img_19284 2082 | 2002/08/08/big/img_566 2083 | 2002/07/25/big/img_376 2084 | 2002/07/23/big/img_138 2085 | 2002/07/25/big/img_435 2086 | 2002/08/17/big/img_685 2087 | 2002/07/19/big/img_90 2088 | 2002/07/20/big/img_716 2089 | 2002/08/31/big/img_17458 2090 | 2002/08/26/big/img_461 2091 | 2002/07/25/big/img_355 2092 | 2002/08/06/big/img_2152 2093 | 2002/07/27/big/img_932 2094 | 2002/07/23/big/img_232 2095 | 2002/08/08/big/img_1020 2096 | 2002/07/31/big/img_366 2097 | 2002/08/06/big/img_2667 2098 | 2002/08/21/big/img_465 2099 | 2002/08/15/big/img_305 2100 | 2002/08/02/big/img_247 2101 | 2002/07/28/big/img_46 2102 | 2002/08/27/big/img_19922 2103 | 2002/08/23/big/img_643 2104 | 2003/01/13/big/img_624 2105 | 2002/08/23/big/img_625 2106 | 2002/08/05/big/img_3787 2107 | 2003/01/13/big/img_627 2108 | 2002/09/01/big/img_16381 2109 | 2002/08/05/big/img_3668 2110 | 2002/07/21/big/img_535 2111 | 2002/08/27/big/img_19680 2112 | 2002/07/22/big/img_413 2113 | 2002/07/29/big/img_481 2114 | 2003/01/15/big/img_496 2115 | 2002/07/23/big/img_701 2116 | 2002/08/29/big/img_18670 2117 | 2002/07/28/big/img_319 2118 | 2003/01/14/big/img_517 2119 | 2002/07/26/big/img_256 2120 | 2003/01/16/big/img_593 2121 | 2002/07/30/big/img_956 2122 | 2002/07/30/big/img_667 2123 | 2002/07/25/big/img_100 2124 | 2002/08/11/big/img_570 2125 | 2002/07/26/big/img_745 2126 | 2002/08/04/big/img_834 2127 | 2002/08/25/big/img_521 2128 | 2002/08/01/big/img_2148 2129 | 2002/09/02/big/img_15183 2130 | 2002/08/22/big/img_514 2131 | 2002/08/23/big/img_477 2132 | 2002/07/23/big/img_336 2133 | 2002/07/26/big/img_481 2134 | 2002/08/20/big/img_409 2135 | 2002/07/23/big/img_918 2136 | 2002/08/09/big/img_474 2137 | 2002/08/02/big/img_929 2138 | 2002/08/31/big/img_17932 2139 | 2002/08/19/big/img_161 2140 | 2002/08/09/big/img_667 2141 | 2002/07/31/big/img_805 2142 | 2002/09/02/big/img_15678 2143 | 2002/08/31/big/img_17509 2144 | 2002/08/29/big/img_18998 2145 | 2002/07/23/big/img_301 2146 | 2002/08/07/big/img_1612 2147 | 2002/08/06/big/img_2472 2148 | 2002/07/23/big/img_466 2149 | 2002/08/27/big/img_19634 2150 | 2003/01/16/big/img_16 2151 | 2002/08/14/big/img_193 2152 | 2002/08/21/big/img_340 2153 | 2002/08/27/big/img_19799 2154 | 2002/08/01/big/img_1345 2155 | 2002/08/07/big/img_1448 2156 | 2002/08/11/big/img_324 2157 | 2003/01/16/big/img_754 2158 | 2002/08/13/big/img_418 2159 | 2003/01/16/big/img_544 2160 | 2002/08/19/big/img_135 2161 | 2002/08/10/big/img_455 2162 | 2002/08/10/big/img_693 2163 | 2002/08/31/big/img_17967 2164 | 2002/08/28/big/img_19229 2165 | 2002/08/04/big/img_811 2166 | 2002/09/01/big/img_16225 2167 | 2003/01/16/big/img_428 2168 | 2002/09/02/big/img_15295 2169 | 2002/07/26/big/img_108 2170 | 2002/07/21/big/img_477 2171 | 2002/08/07/big/img_1354 2172 | 2002/08/23/big/img_246 2173 | 2002/08/16/big/img_652 2174 | 2002/07/27/big/img_553 2175 | 2002/07/31/big/img_346 2176 | 2002/08/04/big/img_537 2177 | 2002/08/08/big/img_498 2178 | 2002/08/29/big/img_18956 2179 | 2003/01/13/big/img_922 2180 | 2002/08/31/big/img_17425 2181 | 2002/07/26/big/img_438 2182 | 2002/08/19/big/img_185 2183 | 2003/01/16/big/img_33 2184 | 2002/08/10/big/img_252 2185 | 2002/07/29/big/img_598 2186 | 2002/08/27/big/img_19820 2187 | 2002/08/06/big/img_2664 2188 | 2002/08/20/big/img_705 2189 | 2003/01/14/big/img_816 2190 | 2002/08/03/big/img_552 2191 | 2002/07/25/big/img_561 2192 | 2002/07/25/big/img_934 2193 | 2002/08/01/big/img_1893 2194 | 2003/01/14/big/img_746 2195 | 2003/01/16/big/img_519 2196 | 2002/08/03/big/img_681 2197 | 2002/07/24/big/img_808 2198 | 2002/08/14/big/img_803 2199 | 2002/08/25/big/img_155 2200 | 2002/07/30/big/img_1107 2201 | 2002/08/29/big/img_18882 2202 | 2003/01/15/big/img_598 2203 | 2002/08/19/big/img_122 2204 | 2002/07/30/big/img_428 2205 | 2002/07/24/big/img_684 2206 | 2002/08/22/big/img_192 2207 | 2002/08/22/big/img_543 2208 | 2002/08/07/big/img_1318 2209 | 2002/08/18/big/img_25 2210 | 2002/07/26/big/img_583 2211 | 2002/07/20/big/img_464 2212 | 2002/08/19/big/img_664 2213 | 2002/08/24/big/img_861 2214 | 2002/09/01/big/img_16136 2215 | 2002/08/22/big/img_400 2216 | 2002/08/12/big/img_445 2217 | 2003/01/14/big/img_174 2218 | 2002/08/27/big/img_19677 2219 | 2002/08/31/big/img_17214 2220 | 2002/08/30/big/img_18175 2221 | 2003/01/17/big/img_402 2222 | 2002/08/06/big/img_2396 2223 | 2002/08/18/big/img_448 2224 | 2002/08/21/big/img_165 2225 | 2002/08/31/big/img_17609 2226 | 2003/01/01/big/img_151 2227 | 2002/08/26/big/img_372 2228 | 2002/09/02/big/img_15994 2229 | 2002/07/26/big/img_660 2230 | 2002/09/02/big/img_15197 2231 | 2002/07/29/big/img_258 2232 | 2002/08/30/big/img_18525 2233 | 2003/01/13/big/img_368 2234 | 2002/07/29/big/img_1538 2235 | 2002/07/21/big/img_787 2236 | 2002/08/18/big/img_152 2237 | 2002/08/06/big/img_2379 2238 | 2003/01/17/big/img_864 2239 | 2002/08/27/big/img_19998 2240 | 2002/08/01/big/img_1634 2241 | 2002/07/25/big/img_414 2242 | 2002/08/22/big/img_627 2243 | 2002/08/07/big/img_1669 2244 | 2002/08/16/big/img_1052 2245 | 2002/08/31/big/img_17796 2246 | 2002/08/18/big/img_199 2247 | 2002/09/02/big/img_15147 2248 | 2002/08/09/big/img_460 2249 | 2002/08/14/big/img_581 2250 | 2002/08/30/big/img_18286 2251 | 2002/07/26/big/img_337 2252 | 2002/08/18/big/img_589 2253 | 2003/01/14/big/img_866 2254 | 2002/07/20/big/img_624 2255 | 2002/08/01/big/img_1801 2256 | 2002/07/24/big/img_683 2257 | 2002/08/09/big/img_725 2258 | 2003/01/14/big/img_34 2259 | 2002/07/30/big/img_144 2260 | 2002/07/30/big/img_706 2261 | 2002/08/08/big/img_394 2262 | 2002/08/19/big/img_619 2263 | 2002/08/06/big/img_2703 2264 | 2002/08/29/big/img_19034 2265 | 2002/07/24/big/img_67 2266 | 2002/08/27/big/img_19841 2267 | 2002/08/19/big/img_427 2268 | 2003/01/14/big/img_333 2269 | 2002/09/01/big/img_16406 2270 | 2002/07/19/big/img_882 2271 | 2002/08/17/big/img_238 2272 | 2003/01/14/big/img_739 2273 | 2002/07/22/big/img_151 2274 | 2002/08/21/big/img_743 2275 | 2002/07/25/big/img_1048 2276 | 2002/07/30/big/img_395 2277 | 2003/01/13/big/img_584 2278 | 2002/08/13/big/img_742 2279 | 2002/08/13/big/img_1168 2280 | 2003/01/14/big/img_147 2281 | 2002/07/26/big/img_803 2282 | 2002/08/05/big/img_3298 2283 | 2002/08/07/big/img_1451 2284 | 2002/08/16/big/img_424 2285 | 2002/07/29/big/img_1069 2286 | 2002/09/01/big/img_16735 2287 | 2002/07/21/big/img_637 2288 | 2003/01/14/big/img_585 2289 | 2002/08/02/big/img_358 2290 | 2003/01/13/big/img_358 2291 | 2002/08/14/big/img_198 2292 | 2002/08/17/big/img_935 2293 | 2002/08/04/big/img_42 2294 | 2002/08/30/big/img_18245 2295 | 2002/07/25/big/img_158 2296 | 2002/08/22/big/img_744 2297 | 2002/08/06/big/img_2291 2298 | 2002/08/05/big/img_3044 2299 | 2002/07/30/big/img_272 2300 | 2002/08/23/big/img_641 2301 | 2002/07/24/big/img_797 2302 | 2002/07/30/big/img_392 2303 | 2003/01/14/big/img_447 2304 | 2002/07/31/big/img_898 2305 | 2002/08/06/big/img_2812 2306 | 2002/08/13/big/img_564 2307 | 2002/07/22/big/img_43 2308 | 2002/07/26/big/img_634 2309 | 2002/07/19/big/img_843 2310 | 2002/08/26/big/img_58 2311 | 2002/07/21/big/img_375 2312 | 2002/08/25/big/img_729 2313 | 2002/07/19/big/img_561 2314 | 2003/01/15/big/img_884 2315 | 2002/07/25/big/img_891 2316 | 2002/08/09/big/img_558 2317 | 2002/08/26/big/img_587 2318 | 2002/08/13/big/img_1146 2319 | 2002/09/02/big/img_15153 2320 | 2002/07/26/big/img_316 2321 | 2002/08/01/big/img_1940 2322 | 2002/08/26/big/img_90 2323 | 2003/01/13/big/img_347 2324 | 2002/07/25/big/img_520 2325 | 2002/08/29/big/img_18718 2326 | 2002/08/28/big/img_19219 2327 | 2002/08/13/big/img_375 2328 | 2002/07/20/big/img_719 2329 | 2002/08/31/big/img_17431 2330 | 2002/07/28/big/img_192 2331 | 2002/08/26/big/img_259 2332 | 2002/08/18/big/img_484 2333 | 2002/07/29/big/img_580 2334 | 2002/07/26/big/img_84 2335 | 2002/08/02/big/img_302 2336 | 2002/08/31/big/img_17007 2337 | 2003/01/15/big/img_543 2338 | 2002/09/01/big/img_16488 2339 | 2002/08/22/big/img_798 2340 | 2002/07/30/big/img_383 2341 | 2002/08/04/big/img_668 2342 | 2002/08/13/big/img_156 2343 | 2002/08/07/big/img_1353 2344 | 2002/07/25/big/img_281 2345 | 2003/01/14/big/img_587 2346 | 2003/01/15/big/img_524 2347 | 2002/08/19/big/img_726 2348 | 2002/08/21/big/img_709 2349 | 2002/08/26/big/img_465 2350 | 2002/07/31/big/img_658 2351 | 2002/08/28/big/img_19148 2352 | 2002/07/23/big/img_423 2353 | 2002/08/16/big/img_758 2354 | 2002/08/22/big/img_523 2355 | 2002/08/16/big/img_591 2356 | 2002/08/23/big/img_845 2357 | 2002/07/26/big/img_678 2358 | 2002/08/09/big/img_806 2359 | 2002/08/06/big/img_2369 2360 | 2002/07/29/big/img_457 2361 | 2002/07/19/big/img_278 2362 | 2002/08/30/big/img_18107 2363 | 2002/07/26/big/img_444 2364 | 2002/08/20/big/img_278 2365 | 2002/08/26/big/img_92 2366 | 2002/08/26/big/img_257 2367 | 2002/07/25/big/img_266 2368 | 2002/08/05/big/img_3829 2369 | 2002/07/26/big/img_757 2370 | 2002/07/29/big/img_1536 2371 | 2002/08/09/big/img_472 2372 | 2003/01/17/big/img_480 2373 | 2002/08/28/big/img_19355 2374 | 2002/07/26/big/img_97 2375 | 2002/08/06/big/img_2503 2376 | 2002/07/19/big/img_254 2377 | 2002/08/01/big/img_1470 2378 | 2002/08/21/big/img_42 2379 | 2002/08/20/big/img_217 2380 | 2002/08/06/big/img_2459 2381 | 2002/07/19/big/img_552 2382 | 2002/08/13/big/img_717 2383 | 2002/08/12/big/img_586 2384 | 2002/08/20/big/img_411 2385 | 2003/01/13/big/img_768 2386 | 2002/08/07/big/img_1747 2387 | 2002/08/15/big/img_385 2388 | 2002/08/01/big/img_1648 2389 | 2002/08/15/big/img_311 2390 | 2002/08/21/big/img_95 2391 | 2002/08/09/big/img_108 2392 | 2002/08/21/big/img_398 2393 | 2002/08/17/big/img_340 2394 | 2002/08/14/big/img_474 2395 | 2002/08/13/big/img_294 2396 | 2002/08/24/big/img_840 2397 | 2002/08/09/big/img_808 2398 | 2002/08/23/big/img_491 2399 | 2002/07/28/big/img_33 2400 | 2003/01/13/big/img_664 2401 | 2002/08/02/big/img_261 2402 | 2002/08/09/big/img_591 2403 | 2002/07/26/big/img_309 2404 | 2003/01/14/big/img_372 2405 | 2002/08/19/big/img_581 2406 | 2002/08/19/big/img_168 2407 | 2002/08/26/big/img_422 2408 | 2002/07/24/big/img_106 2409 | 2002/08/01/big/img_1936 2410 | 2002/08/05/big/img_3764 2411 | 2002/08/21/big/img_266 2412 | 2002/08/31/big/img_17968 2413 | 2002/08/01/big/img_1941 2414 | 2002/08/15/big/img_550 2415 | 2002/08/14/big/img_13 2416 | 2002/07/30/big/img_171 2417 | 2003/01/13/big/img_490 2418 | 2002/07/25/big/img_427 2419 | 2002/07/19/big/img_770 2420 | 2002/08/12/big/img_759 2421 | 2003/01/15/big/img_1360 2422 | 2002/08/05/big/img_3692 2423 | 2003/01/16/big/img_30 2424 | 2002/07/25/big/img_1026 2425 | 2002/07/22/big/img_288 2426 | 2002/08/29/big/img_18801 2427 | 2002/07/24/big/img_793 2428 | 2002/08/13/big/img_178 2429 | 2002/08/06/big/img_2322 2430 | 2003/01/14/big/img_560 2431 | 2002/08/18/big/img_408 2432 | 2003/01/16/big/img_915 2433 | 2003/01/16/big/img_679 2434 | 2002/08/07/big/img_1552 2435 | 2002/08/29/big/img_19050 2436 | 2002/08/01/big/img_2172 2437 | 2002/07/31/big/img_30 2438 | 2002/07/30/big/img_1019 2439 | 2002/07/30/big/img_587 2440 | 2003/01/13/big/img_773 2441 | 2002/07/30/big/img_410 2442 | 2002/07/28/big/img_65 2443 | 2002/08/05/big/img_3138 2444 | 2002/07/23/big/img_541 2445 | 2002/08/22/big/img_963 2446 | 2002/07/27/big/img_657 2447 | 2002/07/30/big/img_1051 2448 | 2003/01/16/big/img_150 2449 | 2002/07/31/big/img_519 2450 | 2002/08/01/big/img_1961 2451 | 2002/08/05/big/img_3752 2452 | 2002/07/23/big/img_631 2453 | 2003/01/14/big/img_237 2454 | 2002/07/28/big/img_21 2455 | 2002/07/22/big/img_813 2456 | 2002/08/05/big/img_3563 2457 | 2003/01/17/big/img_620 2458 | 2002/07/19/big/img_523 2459 | 2002/07/30/big/img_904 2460 | 2002/08/29/big/img_18642 2461 | 2002/08/11/big/img_492 2462 | 2002/08/01/big/img_2130 2463 | 2002/07/25/big/img_618 2464 | 2002/08/17/big/img_305 2465 | 2003/01/16/big/img_520 2466 | 2002/07/26/big/img_495 2467 | 2002/08/17/big/img_164 2468 | 2002/08/03/big/img_440 2469 | 2002/07/24/big/img_441 2470 | 2002/08/06/big/img_2146 2471 | 2002/08/11/big/img_558 2472 | 2002/08/02/big/img_545 2473 | 2002/08/31/big/img_18090 2474 | 2003/01/01/big/img_136 2475 | 2002/07/25/big/img_1099 2476 | 2003/01/13/big/img_728 2477 | 2003/01/16/big/img_197 2478 | 2002/07/26/big/img_651 2479 | 2002/08/11/big/img_676 2480 | 2003/01/15/big/img_10 2481 | 2002/08/21/big/img_250 2482 | 2002/08/14/big/img_325 2483 | 2002/08/04/big/img_390 2484 | 2002/07/24/big/img_554 2485 | 2003/01/16/big/img_333 2486 | 2002/07/31/big/img_922 2487 | 2002/09/02/big/img_15586 2488 | 2003/01/16/big/img_184 2489 | 2002/07/22/big/img_766 2490 | 2002/07/21/big/img_608 2491 | 2002/08/07/big/img_1578 2492 | 2002/08/17/big/img_961 2493 | 2002/07/27/big/img_324 2494 | 2002/08/05/big/img_3765 2495 | 2002/08/23/big/img_462 2496 | 2003/01/16/big/img_382 2497 | 2002/08/27/big/img_19838 2498 | 2002/08/01/big/img_1505 2499 | 2002/08/21/big/img_662 2500 | 2002/08/14/big/img_605 2501 | 2002/08/19/big/img_816 2502 | 2002/07/29/big/img_136 2503 | 2002/08/20/big/img_719 2504 | 2002/08/06/big/img_2826 2505 | 2002/08/10/big/img_630 2506 | 2003/01/17/big/img_973 2507 | 2002/08/14/big/img_116 2508 | 2002/08/02/big/img_666 2509 | 2002/08/21/big/img_710 2510 | 2002/08/05/big/img_55 2511 | 2002/07/31/big/img_229 2512 | 2002/08/01/big/img_1549 2513 | 2002/07/23/big/img_432 2514 | 2002/07/21/big/img_430 2515 | 2002/08/21/big/img_549 2516 | 2002/08/08/big/img_985 2517 | 2002/07/20/big/img_610 2518 | 2002/07/23/big/img_978 2519 | 2002/08/23/big/img_219 2520 | 2002/07/25/big/img_175 2521 | 2003/01/15/big/img_230 2522 | 2002/08/23/big/img_385 2523 | 2002/07/31/big/img_879 2524 | 2002/08/12/big/img_495 2525 | 2002/08/22/big/img_499 2526 | 2002/08/30/big/img_18322 2527 | 2002/08/15/big/img_795 2528 | 2002/08/13/big/img_835 2529 | 2003/01/17/big/img_930 2530 | 2002/07/30/big/img_873 2531 | 2002/08/11/big/img_257 2532 | 2002/07/31/big/img_593 2533 | 2002/08/21/big/img_916 2534 | 2003/01/13/big/img_814 2535 | 2002/07/25/big/img_722 2536 | 2002/08/16/big/img_379 2537 | 2002/07/31/big/img_497 2538 | 2002/07/22/big/img_602 2539 | 2002/08/21/big/img_642 2540 | 2002/08/21/big/img_614 2541 | 2002/08/23/big/img_482 2542 | 2002/07/29/big/img_603 2543 | 2002/08/13/big/img_705 2544 | 2002/07/23/big/img_833 2545 | 2003/01/14/big/img_511 2546 | 2002/07/24/big/img_376 2547 | 2002/08/17/big/img_1030 2548 | 2002/08/05/big/img_3576 2549 | 2002/08/16/big/img_540 2550 | 2002/07/22/big/img_630 2551 | 2002/08/10/big/img_180 2552 | 2002/08/14/big/img_905 2553 | 2002/08/29/big/img_18777 2554 | 2002/08/22/big/img_693 2555 | 2003/01/16/big/img_933 2556 | 2002/08/20/big/img_555 2557 | 2002/08/15/big/img_549 2558 | 2003/01/14/big/img_830 2559 | 2003/01/16/big/img_64 2560 | 2002/08/27/big/img_19670 2561 | 2002/08/22/big/img_729 2562 | 2002/07/27/big/img_981 2563 | 2002/08/09/big/img_458 2564 | 2003/01/17/big/img_884 2565 | 2002/07/25/big/img_639 2566 | 2002/08/31/big/img_18008 2567 | 2002/08/22/big/img_249 2568 | 2002/08/17/big/img_971 2569 | 2002/08/04/big/img_308 2570 | 2002/07/28/big/img_362 2571 | 2002/08/12/big/img_142 2572 | 2002/08/26/big/img_61 2573 | 2002/08/14/big/img_422 2574 | 2002/07/19/big/img_607 2575 | 2003/01/15/big/img_717 2576 | 2002/08/01/big/img_1475 2577 | 2002/08/29/big/img_19061 2578 | 2003/01/01/big/img_346 2579 | 2002/07/20/big/img_315 2580 | 2003/01/15/big/img_756 2581 | 2002/08/15/big/img_879 2582 | 2002/08/08/big/img_615 2583 | 2003/01/13/big/img_431 2584 | 2002/08/05/big/img_3233 2585 | 2002/08/24/big/img_526 2586 | 2003/01/13/big/img_717 2587 | 2002/09/01/big/img_16408 2588 | 2002/07/22/big/img_217 2589 | 2002/07/31/big/img_960 2590 | 2002/08/21/big/img_610 2591 | 2002/08/05/big/img_3753 2592 | 2002/08/03/big/img_151 2593 | 2002/08/21/big/img_267 2594 | 2002/08/01/big/img_2175 2595 | 2002/08/04/big/img_556 2596 | 2002/08/21/big/img_527 2597 | 2002/09/02/big/img_15800 2598 | 2002/07/27/big/img_156 2599 | 2002/07/20/big/img_590 2600 | 2002/08/15/big/img_700 2601 | 2002/08/08/big/img_444 2602 | 2002/07/25/big/img_94 2603 | 2002/07/24/big/img_778 2604 | 2002/08/14/big/img_694 2605 | 2002/07/20/big/img_666 2606 | 2002/08/02/big/img_200 2607 | 2002/08/02/big/img_578 2608 | 2003/01/17/big/img_332 2609 | 2002/09/01/big/img_16352 2610 | 2002/08/27/big/img_19668 2611 | 2002/07/23/big/img_823 2612 | 2002/08/13/big/img_431 2613 | 2003/01/16/big/img_463 2614 | 2002/08/27/big/img_19711 2615 | 2002/08/23/big/img_154 2616 | 2002/07/31/big/img_360 2617 | 2002/08/23/big/img_555 2618 | 2002/08/10/big/img_561 2619 | 2003/01/14/big/img_550 2620 | 2002/08/07/big/img_1370 2621 | 2002/07/30/big/img_1184 2622 | 2002/08/01/big/img_1445 2623 | 2002/08/23/big/img_22 2624 | 2002/07/30/big/img_606 2625 | 2003/01/17/big/img_271 2626 | 2002/08/31/big/img_17316 2627 | 2002/08/16/big/img_973 2628 | 2002/07/26/big/img_77 2629 | 2002/07/20/big/img_788 2630 | 2002/08/06/big/img_2426 2631 | 2002/08/07/big/img_1498 2632 | 2002/08/16/big/img_358 2633 | 2002/08/06/big/img_2851 2634 | 2002/08/12/big/img_359 2635 | 2002/08/01/big/img_1521 2636 | 2002/08/02/big/img_709 2637 | 2002/08/20/big/img_935 2638 | 2002/08/12/big/img_188 2639 | 2002/08/24/big/img_411 2640 | 2002/08/22/big/img_680 2641 | 2002/08/06/big/img_2480 2642 | 2002/07/20/big/img_627 2643 | 2002/07/30/big/img_214 2644 | 2002/07/25/big/img_354 2645 | 2002/08/02/big/img_636 2646 | 2003/01/15/big/img_661 2647 | 2002/08/07/big/img_1327 2648 | 2002/08/01/big/img_2108 2649 | 2002/08/31/big/img_17919 2650 | 2002/08/29/big/img_18768 2651 | 2002/08/05/big/img_3840 2652 | 2002/07/26/big/img_242 2653 | 2003/01/14/big/img_451 2654 | 2002/08/20/big/img_923 2655 | 2002/08/27/big/img_19908 2656 | 2002/08/16/big/img_282 2657 | 2002/08/19/big/img_440 2658 | 2003/01/01/big/img_230 2659 | 2002/08/08/big/img_212 2660 | 2002/07/20/big/img_443 2661 | 2002/08/25/big/img_635 2662 | 2003/01/13/big/img_1169 2663 | 2002/07/26/big/img_998 2664 | 2002/08/15/big/img_995 2665 | 2002/08/06/big/img_3002 2666 | 2002/07/29/big/img_460 2667 | 2003/01/14/big/img_925 2668 | 2002/07/23/big/img_539 2669 | 2002/08/16/big/img_694 2670 | 2003/01/13/big/img_459 2671 | 2002/07/23/big/img_249 2672 | 2002/08/20/big/img_539 2673 | 2002/08/04/big/img_186 2674 | 2002/08/26/big/img_264 2675 | 2002/07/22/big/img_704 2676 | 2002/08/25/big/img_277 2677 | 2002/08/22/big/img_988 2678 | 2002/07/29/big/img_504 2679 | 2002/08/05/big/img_3600 2680 | 2002/08/30/big/img_18380 2681 | 2003/01/14/big/img_937 2682 | 2002/08/21/big/img_254 2683 | 2002/08/10/big/img_130 2684 | 2002/08/20/big/img_339 2685 | 2003/01/14/big/img_428 2686 | 2002/08/20/big/img_889 2687 | 2002/08/31/big/img_17637 2688 | 2002/07/26/big/img_644 2689 | 2002/09/01/big/img_16776 2690 | 2002/08/06/big/img_2239 2691 | 2002/08/06/big/img_2646 2692 | 2003/01/13/big/img_491 2693 | 2002/08/10/big/img_579 2694 | 2002/08/21/big/img_713 2695 | 2002/08/22/big/img_482 2696 | 2002/07/22/big/img_167 2697 | 2002/07/24/big/img_539 2698 | 2002/08/14/big/img_721 2699 | 2002/07/25/big/img_389 2700 | 2002/09/01/big/img_16591 2701 | 2002/08/13/big/img_543 2702 | 2003/01/14/big/img_432 2703 | 2002/08/09/big/img_287 2704 | 2002/07/26/big/img_126 2705 | 2002/08/23/big/img_412 2706 | 2002/08/15/big/img_1034 2707 | 2002/08/28/big/img_19485 2708 | 2002/07/31/big/img_236 2709 | 2002/07/30/big/img_523 2710 | 2002/07/19/big/img_141 2711 | 2003/01/17/big/img_957 2712 | 2002/08/04/big/img_81 2713 | 2002/07/25/big/img_206 2714 | 2002/08/15/big/img_716 2715 | 2002/08/13/big/img_403 2716 | 2002/08/15/big/img_685 2717 | 2002/07/26/big/img_884 2718 | 2002/07/19/big/img_499 2719 | 2002/07/23/big/img_772 2720 | 2002/07/27/big/img_752 2721 | 2003/01/14/big/img_493 2722 | 2002/08/25/big/img_664 2723 | 2002/07/31/big/img_334 2724 | 2002/08/26/big/img_678 2725 | 2002/09/01/big/img_16541 2726 | 2003/01/14/big/img_347 2727 | 2002/07/23/big/img_187 2728 | 2002/07/30/big/img_1163 2729 | 2002/08/05/big/img_35 2730 | 2002/08/22/big/img_944 2731 | 2002/08/07/big/img_1239 2732 | 2002/07/29/big/img_1215 2733 | 2002/08/03/big/img_312 2734 | 2002/08/05/big/img_3523 2735 | 2002/07/29/big/img_218 2736 | 2002/08/13/big/img_672 2737 | 2002/08/16/big/img_205 2738 | 2002/08/17/big/img_594 2739 | 2002/07/29/big/img_1411 2740 | 2002/07/30/big/img_942 2741 | 2003/01/16/big/img_312 2742 | 2002/08/08/big/img_312 2743 | 2002/07/25/big/img_15 2744 | 2002/08/09/big/img_839 2745 | 2002/08/01/big/img_2069 2746 | 2002/08/31/big/img_17512 2747 | 2002/08/01/big/img_3 2748 | 2002/07/31/big/img_320 2749 | 2003/01/15/big/img_1265 2750 | 2002/08/14/big/img_563 2751 | 2002/07/31/big/img_167 2752 | 2002/08/20/big/img_374 2753 | 2002/08/13/big/img_406 2754 | 2002/08/08/big/img_625 2755 | 2002/08/02/big/img_314 2756 | 2002/08/27/big/img_19964 2757 | 2002/09/01/big/img_16670 2758 | 2002/07/31/big/img_599 2759 | 2002/08/29/big/img_18906 2760 | 2002/07/24/big/img_373 2761 | 2002/07/26/big/img_513 2762 | 2002/09/02/big/img_15497 2763 | 2002/08/19/big/img_117 2764 | 2003/01/01/big/img_158 2765 | 2002/08/24/big/img_178 2766 | 2003/01/13/big/img_935 2767 | 2002/08/13/big/img_609 2768 | 2002/08/30/big/img_18341 2769 | 2002/08/25/big/img_674 2770 | 2003/01/13/big/img_209 2771 | 2002/08/13/big/img_258 2772 | 2002/08/05/big/img_3543 2773 | 2002/08/07/big/img_1970 2774 | 2002/08/06/big/img_3004 2775 | 2003/01/17/big/img_487 2776 | 2002/08/24/big/img_873 2777 | 2002/08/29/big/img_18730 2778 | 2002/08/09/big/img_375 2779 | 2003/01/16/big/img_751 2780 | 2002/08/02/big/img_603 2781 | 2002/08/19/big/img_325 2782 | 2002/09/01/big/img_16420 2783 | 2002/08/05/big/img_3633 2784 | 2002/08/21/big/img_516 2785 | 2002/07/19/big/img_501 2786 | 2002/07/26/big/img_688 2787 | 2002/07/24/big/img_256 2788 | 2002/07/25/big/img_438 2789 | 2002/07/31/big/img_1017 2790 | 2002/08/22/big/img_512 2791 | 2002/07/21/big/img_543 2792 | 2002/08/08/big/img_223 2793 | 2002/08/19/big/img_189 2794 | 2002/08/12/big/img_630 2795 | 2002/07/30/big/img_958 2796 | 2002/07/28/big/img_208 2797 | 2002/08/31/big/img_17691 2798 | 2002/07/22/big/img_542 2799 | 2002/07/19/big/img_741 2800 | 2002/07/19/big/img_158 2801 | 2002/08/15/big/img_399 2802 | 2002/08/01/big/img_2159 2803 | 2002/08/14/big/img_455 2804 | 2002/08/17/big/img_1011 2805 | 2002/08/26/big/img_744 2806 | 2002/08/12/big/img_624 2807 | 2003/01/17/big/img_821 2808 | 2002/08/16/big/img_980 2809 | 2002/07/28/big/img_281 2810 | 2002/07/25/big/img_171 2811 | 2002/08/03/big/img_116 2812 | 2002/07/22/big/img_467 2813 | 2002/07/31/big/img_750 2814 | 2002/07/26/big/img_435 2815 | 2002/07/19/big/img_822 2816 | 2002/08/13/big/img_626 2817 | 2002/08/11/big/img_344 2818 | 2002/08/02/big/img_473 2819 | 2002/09/01/big/img_16817 2820 | 2002/08/01/big/img_1275 2821 | 2002/08/28/big/img_19270 2822 | 2002/07/23/big/img_607 2823 | 2002/08/09/big/img_316 2824 | 2002/07/29/big/img_626 2825 | 2002/07/24/big/img_824 2826 | 2002/07/22/big/img_342 2827 | 2002/08/08/big/img_794 2828 | 2002/08/07/big/img_1209 2829 | 2002/07/19/big/img_18 2830 | 2002/08/25/big/img_634 2831 | 2002/07/24/big/img_730 2832 | 2003/01/17/big/img_356 2833 | 2002/07/23/big/img_305 2834 | 2002/07/30/big/img_453 2835 | 2003/01/13/big/img_972 2836 | 2002/08/06/big/img_2610 2837 | 2002/08/29/big/img_18920 2838 | 2002/07/31/big/img_123 2839 | 2002/07/26/big/img_979 2840 | 2002/08/24/big/img_635 2841 | 2002/08/05/big/img_3704 2842 | 2002/08/07/big/img_1358 2843 | 2002/07/22/big/img_306 2844 | 2002/08/13/big/img_619 2845 | 2002/08/02/big/img_366 2846 | -------------------------------------------------------------------------------- /retinaface/data/__init__.py: -------------------------------------------------------------------------------- 1 | from .wider_face import WiderFaceDetection, detection_collate 2 | from .data_augment import * 3 | from .config import * 4 | -------------------------------------------------------------------------------- /retinaface/data/config.py: -------------------------------------------------------------------------------- 1 | # config.py 2 | 3 | cfg_mnet = { 4 | 'name': 'mobilenet0.25', 5 | 'min_sizes': [[16, 32], [64, 128], [256, 512]], 6 | 'steps': [8, 16, 32], 7 | 'variance': [0.1, 0.2], 8 | 'clip': False, 9 | 'loc_weight': 2.0, 10 | 'gpu_train': True, 11 | 'batch_size': 32, 12 | 'ngpu': 1, 13 | 'epoch': 250, 14 | 'decay1': 190, 15 | 'decay2': 220, 16 | 'image_size': 640, 17 | 'pretrain': False, 18 | 'return_layers': {'stage1': 1, 'stage2': 2, 'stage3': 3}, 19 | 'in_channel': 32, 20 | 'out_channel': 64 21 | } 22 | 23 | cfg_re50 = { 24 | 'name': 'Resnet50', 25 | 'min_sizes': [[16, 32], [64, 128], [256, 512]], 26 | 'steps': [8, 16, 32], 27 | 'variance': [0.1, 0.2], 28 | 'clip': False, 29 | 'loc_weight': 2.0, 30 | 'gpu_train': True, 31 | 'batch_size': 24, 32 | 'ngpu': 4, 33 | 'epoch': 100, 34 | 'decay1': 70, 35 | 'decay2': 90, 36 | 'image_size': 840, 37 | 'pretrain': False, 38 | 'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3}, 39 | 'in_channel': 256, 40 | 'out_channel': 256 41 | } 42 | 43 | -------------------------------------------------------------------------------- /retinaface/data/data_augment.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | import random 4 | from retinaface.utils.box_utils import matrix_iof 5 | 6 | 7 | def _crop(image, boxes, labels, landm, img_dim): 8 | height, width, _ = image.shape 9 | pad_image_flag = True 10 | 11 | for _ in range(250): 12 | """ 13 | if random.uniform(0, 1) <= 0.2: 14 | scale = 1.0 15 | else: 16 | scale = random.uniform(0.3, 1.0) 17 | """ 18 | PRE_SCALES = [0.3, 0.45, 0.6, 0.8, 1.0] 19 | scale = random.choice(PRE_SCALES) 20 | short_side = min(width, height) 21 | w = int(scale * short_side) 22 | h = w 23 | 24 | if width == w: 25 | l = 0 26 | else: 27 | l = random.randrange(width - w) 28 | if height == h: 29 | t = 0 30 | else: 31 | t = random.randrange(height - h) 32 | roi = np.array((l, t, l + w, t + h)) 33 | 34 | value = matrix_iof(boxes, roi[np.newaxis]) 35 | flag = (value >= 1) 36 | if not flag.any(): 37 | continue 38 | 39 | centers = (boxes[:, :2] + boxes[:, 2:]) / 2 40 | mask_a = np.logical_and(roi[:2] < centers, centers < roi[2:]).all(axis=1) 41 | boxes_t = boxes[mask_a].copy() 42 | labels_t = labels[mask_a].copy() 43 | landms_t = landm[mask_a].copy() 44 | landms_t = landms_t.reshape([-1, 5, 2]) 45 | 46 | if boxes_t.shape[0] == 0: 47 | continue 48 | 49 | image_t = image[roi[1]:roi[3], roi[0]:roi[2]] 50 | 51 | boxes_t[:, :2] = np.maximum(boxes_t[:, :2], roi[:2]) 52 | boxes_t[:, :2] -= roi[:2] 53 | boxes_t[:, 2:] = np.minimum(boxes_t[:, 2:], roi[2:]) 54 | boxes_t[:, 2:] -= roi[:2] 55 | 56 | # landm 57 | landms_t[:, :, :2] = landms_t[:, :, :2] - roi[:2] 58 | landms_t[:, :, :2] = np.maximum(landms_t[:, :, :2], np.array([0, 0])) 59 | landms_t[:, :, :2] = np.minimum(landms_t[:, :, :2], roi[2:] - roi[:2]) 60 | landms_t = landms_t.reshape([-1, 10]) 61 | 62 | 63 | # make sure that the cropped image contains at least one face > 16 pixel at training image scale 64 | b_w_t = (boxes_t[:, 2] - boxes_t[:, 0] + 1) / w * img_dim 65 | b_h_t = (boxes_t[:, 3] - boxes_t[:, 1] + 1) / h * img_dim 66 | mask_b = np.minimum(b_w_t, b_h_t) > 0.0 67 | boxes_t = boxes_t[mask_b] 68 | labels_t = labels_t[mask_b] 69 | landms_t = landms_t[mask_b] 70 | 71 | if boxes_t.shape[0] == 0: 72 | continue 73 | 74 | pad_image_flag = False 75 | 76 | return image_t, boxes_t, labels_t, landms_t, pad_image_flag 77 | return image, boxes, labels, landm, pad_image_flag 78 | 79 | 80 | def _distort(image): 81 | 82 | def _convert(image, alpha=1, beta=0): 83 | tmp = image.astype(float) * alpha + beta 84 | tmp[tmp < 0] = 0 85 | tmp[tmp > 255] = 255 86 | image[:] = tmp 87 | 88 | image = image.copy() 89 | 90 | if random.randrange(2): 91 | 92 | #brightness distortion 93 | if random.randrange(2): 94 | _convert(image, beta=random.uniform(-32, 32)) 95 | 96 | #contrast distortion 97 | if random.randrange(2): 98 | _convert(image, alpha=random.uniform(0.5, 1.5)) 99 | 100 | image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) 101 | 102 | #saturation distortion 103 | if random.randrange(2): 104 | _convert(image[:, :, 1], alpha=random.uniform(0.5, 1.5)) 105 | 106 | #hue distortion 107 | if random.randrange(2): 108 | tmp = image[:, :, 0].astype(int) + random.randint(-18, 18) 109 | tmp %= 180 110 | image[:, :, 0] = tmp 111 | 112 | image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR) 113 | 114 | else: 115 | 116 | #brightness distortion 117 | if random.randrange(2): 118 | _convert(image, beta=random.uniform(-32, 32)) 119 | 120 | image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) 121 | 122 | #saturation distortion 123 | if random.randrange(2): 124 | _convert(image[:, :, 1], alpha=random.uniform(0.5, 1.5)) 125 | 126 | #hue distortion 127 | if random.randrange(2): 128 | tmp = image[:, :, 0].astype(int) + random.randint(-18, 18) 129 | tmp %= 180 130 | image[:, :, 0] = tmp 131 | 132 | image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR) 133 | 134 | #contrast distortion 135 | if random.randrange(2): 136 | _convert(image, alpha=random.uniform(0.5, 1.5)) 137 | 138 | return image 139 | 140 | 141 | def _expand(image, boxes, fill, p): 142 | if random.randrange(2): 143 | return image, boxes 144 | 145 | height, width, depth = image.shape 146 | 147 | scale = random.uniform(1, p) 148 | w = int(scale * width) 149 | h = int(scale * height) 150 | 151 | left = random.randint(0, w - width) 152 | top = random.randint(0, h - height) 153 | 154 | boxes_t = boxes.copy() 155 | boxes_t[:, :2] += (left, top) 156 | boxes_t[:, 2:] += (left, top) 157 | expand_image = np.empty( 158 | (h, w, depth), 159 | dtype=image.dtype) 160 | expand_image[:, :] = fill 161 | expand_image[top:top + height, left:left + width] = image 162 | image = expand_image 163 | 164 | return image, boxes_t 165 | 166 | 167 | def _mirror(image, boxes, landms): 168 | _, width, _ = image.shape 169 | if random.randrange(2): 170 | image = image[:, ::-1] 171 | boxes = boxes.copy() 172 | boxes[:, 0::2] = width - boxes[:, 2::-2] 173 | 174 | # landm 175 | landms = landms.copy() 176 | landms = landms.reshape([-1, 5, 2]) 177 | landms[:, :, 0] = width - landms[:, :, 0] 178 | tmp = landms[:, 1, :].copy() 179 | landms[:, 1, :] = landms[:, 0, :] 180 | landms[:, 0, :] = tmp 181 | tmp1 = landms[:, 4, :].copy() 182 | landms[:, 4, :] = landms[:, 3, :] 183 | landms[:, 3, :] = tmp1 184 | landms = landms.reshape([-1, 10]) 185 | 186 | return image, boxes, landms 187 | 188 | 189 | def _pad_to_square(image, rgb_mean, pad_image_flag): 190 | if not pad_image_flag: 191 | return image 192 | height, width, _ = image.shape 193 | long_side = max(width, height) 194 | image_t = np.empty((long_side, long_side, 3), dtype=image.dtype) 195 | image_t[:, :] = rgb_mean 196 | image_t[0:0 + height, 0:0 + width] = image 197 | return image_t 198 | 199 | 200 | def _resize_subtract_mean(image, insize, rgb_mean): 201 | interp_methods = [cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_AREA, cv2.INTER_NEAREST, cv2.INTER_LANCZOS4] 202 | interp_method = interp_methods[random.randrange(5)] 203 | image = cv2.resize(image, (insize, insize), interpolation=interp_method) 204 | image = image.astype(np.float32) 205 | image -= rgb_mean 206 | return image.transpose(2, 0, 1) 207 | 208 | 209 | class preproc(object): 210 | 211 | def __init__(self, img_dim, rgb_means): 212 | self.img_dim = img_dim 213 | self.rgb_means = rgb_means 214 | 215 | def __call__(self, image, targets): 216 | assert targets.shape[0] > 0, "this image does not have gt" 217 | 218 | boxes = targets[:, :4].copy() 219 | labels = targets[:, -1].copy() 220 | landm = targets[:, 4:-1].copy() 221 | 222 | image_t, boxes_t, labels_t, landm_t, pad_image_flag = _crop(image, boxes, labels, landm, self.img_dim) 223 | image_t = _distort(image_t) 224 | image_t = _pad_to_square(image_t,self.rgb_means, pad_image_flag) 225 | image_t, boxes_t, landm_t = _mirror(image_t, boxes_t, landm_t) 226 | height, width, _ = image_t.shape 227 | image_t = _resize_subtract_mean(image_t, self.img_dim, self.rgb_means) 228 | boxes_t[:, 0::2] /= width 229 | boxes_t[:, 1::2] /= height 230 | 231 | landm_t[:, 0::2] /= width 232 | landm_t[:, 1::2] /= height 233 | 234 | labels_t = np.expand_dims(labels_t, 1) 235 | targets_t = np.hstack((boxes_t, landm_t, labels_t)) 236 | 237 | return image_t, targets_t 238 | -------------------------------------------------------------------------------- /retinaface/data/wider_face.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.utils.data as data 4 | 5 | 6 | class WiderFaceDetection(data.Dataset): 7 | def __init__(self, txt_path, preproc=None): 8 | self.preproc = preproc 9 | self.imgs_path = [] 10 | self.words = [] 11 | f = open(txt_path, 'r') 12 | lines = f.readlines() 13 | isFirst = True 14 | labels = [] 15 | for line in lines: 16 | line = line.rstrip() 17 | if line.startswith('#'): 18 | if isFirst is True: 19 | isFirst = False 20 | else: 21 | labels_copy = labels.copy() 22 | self.words.append(labels_copy) 23 | labels.clear() 24 | path = line[2:] 25 | path = txt_path.replace('label.txt', 'images/') + path 26 | self.imgs_path.append(path) 27 | else: 28 | line = line.split(' ') 29 | label = [float(x) for x in line] 30 | labels.append(label) 31 | 32 | self.words.append(labels) 33 | 34 | def __len__(self): 35 | return len(self.imgs_path) 36 | 37 | def __getitem__(self, index): 38 | img = cv2.imread(self.imgs_path[index]) 39 | height, width, _ = img.shape 40 | 41 | labels = self.words[index] 42 | annotations = np.zeros((0, 15)) 43 | if len(labels) == 0: 44 | return annotations 45 | for idx, label in enumerate(labels): 46 | annotation = np.zeros((1, 15)) 47 | # bbox 48 | annotation[0, 0] = label[0] # x1 49 | annotation[0, 1] = label[1] # y1 50 | annotation[0, 2] = label[0] + label[2] # x2 51 | annotation[0, 3] = label[1] + label[3] # y2 52 | 53 | # landmarks 54 | annotation[0, 4] = label[4] # l0_x 55 | annotation[0, 5] = label[5] # l0_y 56 | annotation[0, 6] = label[7] # l1_x 57 | annotation[0, 7] = label[8] # l1_y 58 | annotation[0, 8] = label[10] # l2_x 59 | annotation[0, 9] = label[11] # l2_y 60 | annotation[0, 10] = label[13] # l3_x 61 | annotation[0, 11] = label[14] # l3_y 62 | annotation[0, 12] = label[16] # l4_x 63 | annotation[0, 13] = label[17] # l4_y 64 | if (annotation[0, 4] < 0): 65 | annotation[0, 14] = -1 66 | else: 67 | annotation[0, 14] = 1 68 | 69 | annotations = np.append(annotations, annotation, axis=0) 70 | target = np.array(annotations) 71 | if self.preproc is not None: 72 | img, target = self.preproc(img, target) 73 | 74 | return torch.from_numpy(img), target 75 | 76 | 77 | def detection_collate(batch): 78 | """Custom collate fn for dealing with batches of images that have a different 79 | number of associated object annotations (bounding boxes). 80 | 81 | Arguments: 82 | batch: (tuple) A tuple of tensor images and lists of annotations 83 | 84 | Return: 85 | A tuple containing: 86 | 1) (tensor) batch of images stacked on their 0 dim 87 | 2) (list of tensors) annotations for a given image are stacked on 0 dim 88 | """ 89 | targets = [] 90 | imgs = [] 91 | for _, sample in enumerate(batch): 92 | for _, tup in enumerate(sample): 93 | if torch.is_tensor(tup): 94 | imgs.append(tup) 95 | elif isinstance(tup, type(np.empty(0))): 96 | annos = torch.from_numpy(tup).float() 97 | targets.append(annos) 98 | 99 | return (torch.stack(imgs, 0), targets) 100 | -------------------------------------------------------------------------------- /retinaface/detector.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import numpy as np 4 | import torch 5 | import torch.backends.cudnn as cudnn 6 | 7 | from retinaface.data import cfg_mnet 8 | from retinaface.layers.functions.prior_box import PriorBox 9 | from retinaface.loader import load_model 10 | from retinaface.utils.box_utils import decode, decode_landm 11 | from retinaface.utils.nms.py_cpu_nms import py_cpu_nms 12 | 13 | 14 | class RetinafaceDetector: 15 | def __init__(self, net='mnet', type='cuda'): 16 | cudnn.benchmark = True 17 | self.net = net 18 | self.device = torch.device(type) 19 | self.model = load_model(net).to(self.device) 20 | self.model.eval() 21 | 22 | def detect_faces(self, img_raw, confidence_threshold=0.9, top_k=5000, nms_threshold=0.4, keep_top_k=750, resize=1): 23 | img = np.float32(img_raw) 24 | im_height, im_width = img.shape[:2] 25 | scale = torch.Tensor([img.shape[1], img.shape[0], img.shape[1], img.shape[0]]) 26 | img -= (104, 117, 123) 27 | img = img.transpose(2, 0, 1) 28 | img = torch.from_numpy(img).unsqueeze(0) 29 | img = img.to(self.device) 30 | scale = scale.to(self.device) 31 | 32 | # tic = time.time() 33 | with torch.no_grad(): 34 | loc, conf, landms = self.model(img) # forward pass 35 | # print('net forward time: {:.4f}'.format(time.time() - tic)) 36 | 37 | priorbox = PriorBox(cfg_mnet, image_size=(im_height, im_width)) 38 | priors = priorbox.forward() 39 | priors = priors.to(self.device) 40 | prior_data = priors.data 41 | boxes = decode(loc.data.squeeze(0), prior_data, cfg_mnet['variance']) 42 | boxes = boxes * scale / resize 43 | boxes = boxes.cpu().numpy() 44 | scores = conf.squeeze(0).data.cpu().numpy()[:, 1] 45 | landms = decode_landm(landms.data.squeeze(0), prior_data, cfg_mnet['variance']) 46 | scale1 = torch.Tensor([img.shape[3], img.shape[2], img.shape[3], img.shape[2], 47 | img.shape[3], img.shape[2], img.shape[3], img.shape[2], 48 | img.shape[3], img.shape[2]]) 49 | scale1 = scale1.to(self.device) 50 | landms = landms * scale1 / resize 51 | landms = landms.cpu().numpy() 52 | 53 | # ignore low scores 54 | inds = np.where(scores > confidence_threshold)[0] 55 | boxes = boxes[inds] 56 | landms = landms[inds] 57 | scores = scores[inds] 58 | 59 | # keep top-K before NMS 60 | order = scores.argsort()[::-1][:top_k] 61 | boxes = boxes[order] 62 | landms = landms[order] 63 | scores = scores[order] 64 | 65 | # do NMS 66 | dets = np.hstack((boxes, scores[:, np.newaxis])).astype(np.float32, copy=False) 67 | keep = py_cpu_nms(dets, nms_threshold) 68 | # keep = nms(dets, args.nms_threshold,force_cpu=args.cpu) 69 | dets = dets[keep, :] 70 | landms = landms[keep] 71 | 72 | # keep top-K faster NMS 73 | dets = dets[:keep_top_k, :] 74 | landms = landms[:keep_top_k, :] 75 | # print(landms.shape) 76 | landms = landms.reshape((-1, 5, 2)) 77 | # print(landms.shape) 78 | landms = landms.transpose((0, 2, 1)) 79 | # print(landms.shape) 80 | landms = landms.reshape(-1, 10, ) 81 | # print(landms.shape) 82 | 83 | return dets, landms 84 | 85 | 86 | detector = RetinafaceDetector(net='mnet') 87 | -------------------------------------------------------------------------------- /retinaface/layers/__init__.py: -------------------------------------------------------------------------------- 1 | from .functions import * 2 | from .modules import * 3 | -------------------------------------------------------------------------------- /retinaface/layers/functions/prior_box.py: -------------------------------------------------------------------------------- 1 | from itertools import product as product 2 | from math import ceil 3 | 4 | import torch 5 | 6 | 7 | class PriorBox(object): 8 | def __init__(self, cfg, image_size=None, phase='train'): 9 | super(PriorBox, self).__init__() 10 | self.min_sizes = cfg['min_sizes'] 11 | self.steps = cfg['steps'] 12 | self.clip = cfg['clip'] 13 | self.image_size = image_size 14 | self.feature_maps = [[ceil(self.image_size[0] / step), ceil(self.image_size[1] / step)] for step in self.steps] 15 | self.name = "s" 16 | 17 | def forward(self): 18 | anchors = [] 19 | for k, f in enumerate(self.feature_maps): 20 | min_sizes = self.min_sizes[k] 21 | for i, j in product(range(f[0]), range(f[1])): 22 | for min_size in min_sizes: 23 | s_kx = min_size / self.image_size[1] 24 | s_ky = min_size / self.image_size[0] 25 | dense_cx = [x * self.steps[k] / self.image_size[1] for x in [j + 0.5]] 26 | dense_cy = [y * self.steps[k] / self.image_size[0] for y in [i + 0.5]] 27 | for cy, cx in product(dense_cy, dense_cx): 28 | anchors += [cx, cy, s_kx, s_ky] 29 | 30 | # back to torch land 31 | output = torch.Tensor(anchors).view(-1, 4) 32 | if self.clip: 33 | output.clamp_(max=1, min=0) 34 | return output 35 | -------------------------------------------------------------------------------- /retinaface/layers/modules/__init__.py: -------------------------------------------------------------------------------- 1 | from .multibox_loss import MultiBoxLoss 2 | 3 | __all__ = ['MultiBoxLoss'] 4 | -------------------------------------------------------------------------------- /retinaface/layers/modules/multibox_loss.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | from retinaface.data import cfg_mnet 6 | from retinaface.utils.box_utils import match, log_sum_exp 7 | 8 | GPU = cfg_mnet['gpu_train'] 9 | 10 | 11 | class MultiBoxLoss(nn.Module): 12 | """SSD Weighted Loss Function 13 | Compute Targets: 14 | 1) Produce Confidence Target Indices by matching ground truth boxes 15 | with (default) 'priorboxes' that have jaccard index > threshold parameter 16 | (default threshold: 0.5). 17 | 2) Produce localization target by 'encoding' variance into offsets of ground 18 | truth boxes and their matched 'priorboxes'. 19 | 3) Hard negative mining to filter the excessive number of negative examples 20 | that comes with using a large number of default bounding boxes. 21 | (default negative:positive ratio 3:1) 22 | Objective Loss: 23 | L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N 24 | Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss 25 | weighted by α which is set to 1 by cross val. 26 | Args: 27 | c: class confidences, 28 | l: predicted boxes, 29 | g: ground truth boxes 30 | N: number of matched default boxes 31 | See: https://arxiv.org/pdf/1512.02325.pdf for more details. 32 | """ 33 | 34 | def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, 35 | encode_target): 36 | super(MultiBoxLoss, self).__init__() 37 | self.num_classes = num_classes 38 | self.threshold = overlap_thresh 39 | self.background_label = bkg_label 40 | self.encode_target = encode_target 41 | self.use_prior_for_matching = prior_for_matching 42 | self.do_neg_mining = neg_mining 43 | self.negpos_ratio = neg_pos 44 | self.neg_overlap = neg_overlap 45 | self.variance = [0.1, 0.2] 46 | 47 | def forward(self, predictions, priors, targets): 48 | """Multibox Loss 49 | Args: 50 | predictions (tuple): A tuple containing loc preds, conf preds, 51 | and prior boxes from SSD net. 52 | conf shape: torch.size(batch_size,num_priors,num_classes) 53 | loc shape: torch.size(batch_size,num_priors,4) 54 | priors shape: torch.size(num_priors,4) 55 | 56 | ground_truth (tensor): Ground truth boxes and labels for a batch, 57 | shape: [batch_size,num_objs,5] (last idx is the label). 58 | """ 59 | 60 | loc_data, conf_data, landm_data = predictions 61 | priors = priors 62 | num = loc_data.size(0) 63 | num_priors = (priors.size(0)) 64 | 65 | # match priors (default boxes) and ground truth boxes 66 | loc_t = torch.Tensor(num, num_priors, 4) 67 | landm_t = torch.Tensor(num, num_priors, 10) 68 | conf_t = torch.LongTensor(num, num_priors) 69 | for idx in range(num): 70 | truths = targets[idx][:, :4].data 71 | labels = targets[idx][:, -1].data 72 | landms = targets[idx][:, 4:14].data 73 | defaults = priors.data 74 | match(self.threshold, truths, defaults, self.variance, labels, landms, loc_t, conf_t, landm_t, idx) 75 | if GPU: 76 | loc_t = loc_t.cuda() 77 | conf_t = conf_t.cuda() 78 | landm_t = landm_t.cuda() 79 | 80 | zeros = torch.tensor(0).cuda() 81 | # landm Loss (Smooth L1) 82 | # Shape: [batch,num_priors,10] 83 | pos1 = conf_t > zeros 84 | num_pos_landm = pos1.long().sum(1, keepdim=True) 85 | N1 = max(num_pos_landm.data.sum().float(), 1) 86 | pos_idx1 = pos1.unsqueeze(pos1.dim()).expand_as(landm_data) 87 | landm_p = landm_data[pos_idx1].view(-1, 10) 88 | landm_t = landm_t[pos_idx1].view(-1, 10) 89 | loss_landm = F.smooth_l1_loss(landm_p, landm_t, reduction='sum') 90 | 91 | pos = conf_t != zeros 92 | conf_t[pos] = 1 93 | 94 | # Localization Loss (Smooth L1) 95 | # Shape: [batch,num_priors,4] 96 | pos_idx = pos.unsqueeze(pos.dim()).expand_as(loc_data) 97 | loc_p = loc_data[pos_idx].view(-1, 4) 98 | loc_t = loc_t[pos_idx].view(-1, 4) 99 | loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='sum') 100 | 101 | # Compute max conf across batch for hard negative mining 102 | batch_conf = conf_data.view(-1, self.num_classes) 103 | loss_c = log_sum_exp(batch_conf) - batch_conf.gather(1, conf_t.view(-1, 1)) 104 | 105 | # Hard Negative Mining 106 | loss_c[pos.view(-1, 1)] = 0 # filter out pos boxes for now 107 | loss_c = loss_c.view(num, -1) 108 | _, loss_idx = loss_c.sort(1, descending=True) 109 | _, idx_rank = loss_idx.sort(1) 110 | num_pos = pos.long().sum(1, keepdim=True) 111 | num_neg = torch.clamp(self.negpos_ratio * num_pos, max=pos.size(1) - 1) 112 | neg = idx_rank < num_neg.expand_as(idx_rank) 113 | 114 | # Confidence Loss Including Positive and Negative Examples 115 | pos_idx = pos.unsqueeze(2).expand_as(conf_data) 116 | neg_idx = neg.unsqueeze(2).expand_as(conf_data) 117 | conf_p = conf_data[(pos_idx + neg_idx).gt(0)].view(-1, self.num_classes) 118 | targets_weighted = conf_t[(pos + neg).gt(0)] 119 | loss_c = F.cross_entropy(conf_p, targets_weighted, reduction='sum') 120 | 121 | # Sum of losses: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N 122 | N = max(num_pos.data.sum().float(), 1) 123 | loss_l /= N 124 | loss_c /= N 125 | loss_landm /= N1 126 | 127 | return loss_l, loss_c, loss_landm 128 | -------------------------------------------------------------------------------- /retinaface/loader.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import torch 4 | 5 | from retinaface.data import cfg_mnet, cfg_re50 6 | from retinaface.models.retinaface import RetinaFace 7 | 8 | 9 | def check_keys(model, pretrained_state_dict): 10 | ckpt_keys = set(pretrained_state_dict.keys()) 11 | model_keys = set(model.state_dict().keys()) 12 | used_pretrained_keys = model_keys & ckpt_keys 13 | unused_pretrained_keys = ckpt_keys - model_keys 14 | missing_keys = model_keys - ckpt_keys 15 | # print('Missing keys:{}'.format(len(missing_keys))) 16 | # print('Unused checkpoint keys:{}'.format(len(unused_pretrained_keys))) 17 | # print('Used keys:{}'.format(len(used_pretrained_keys))) 18 | assert len(used_pretrained_keys) > 0, 'load NONE from pretrained checkpoint' 19 | return True 20 | 21 | 22 | def remove_prefix(state_dict, prefix): 23 | ''' Old style model is stored with all names of parameters sharing common prefix 'module.' ''' 24 | # print('remove prefix \'{}\''.format(prefix)) 25 | f = lambda x: x.split(prefix, 1)[-1] if x.startswith(prefix) else x 26 | return {f(key): value for key, value in state_dict.items()} 27 | 28 | 29 | def load_model(net='mnet'): 30 | if net == 'mnet': 31 | pretrained_path = 'retinaface/weights/mobilenet0.25_Final.pth' 32 | # print('Loading pretrained model from {}'.format(pretrained_path)) 33 | model = RetinaFace(cfg=cfg_mnet, phase='test') 34 | else: 35 | pretrained_path = 'retinaface/weights/Resnet50_Final.pth' 36 | # print('Loading pretrained model from {}'.format(pretrained_path)) 37 | model = RetinaFace(cfg=cfg_re50, phase='test') 38 | 39 | device = torch.cuda.current_device() 40 | pretrained_dict = torch.load(pretrained_path, map_location=lambda storage, loc: storage.cuda(device)) 41 | if "state_dict" in pretrained_dict.keys(): 42 | pretrained_dict = remove_prefix(pretrained_dict['state_dict'], 'module.') 43 | else: 44 | pretrained_dict = remove_prefix(pretrained_dict, 'module.') 45 | check_keys(model, pretrained_dict) 46 | model.load_state_dict(pretrained_dict, strict=False) 47 | # print('Finished loading model!') 48 | return model 49 | -------------------------------------------------------------------------------- /retinaface/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/foamliu/MobileFaceNet-PyTorch/2c720d6875488e94f4d4eb870936cb05613b74d5/retinaface/models/__init__.py -------------------------------------------------------------------------------- /retinaface/models/net.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | 6 | def conv_bn(inp, oup, stride=1, leaky=0): 7 | return nn.Sequential( 8 | nn.Conv2d(inp, oup, 3, stride, 1, bias=False), 9 | nn.BatchNorm2d(oup), 10 | nn.LeakyReLU(negative_slope=leaky, inplace=True) 11 | ) 12 | 13 | 14 | def conv_bn_no_relu(inp, oup, stride): 15 | return nn.Sequential( 16 | nn.Conv2d(inp, oup, 3, stride, 1, bias=False), 17 | nn.BatchNorm2d(oup), 18 | ) 19 | 20 | 21 | def conv_bn1X1(inp, oup, stride, leaky=0): 22 | return nn.Sequential( 23 | nn.Conv2d(inp, oup, 1, stride, padding=0, bias=False), 24 | nn.BatchNorm2d(oup), 25 | nn.LeakyReLU(negative_slope=leaky, inplace=True) 26 | ) 27 | 28 | 29 | def conv_dw(inp, oup, stride, leaky=0.1): 30 | return nn.Sequential( 31 | nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False), 32 | nn.BatchNorm2d(inp), 33 | nn.LeakyReLU(negative_slope=leaky, inplace=True), 34 | 35 | nn.Conv2d(inp, oup, 1, 1, 0, bias=False), 36 | nn.BatchNorm2d(oup), 37 | nn.LeakyReLU(negative_slope=leaky, inplace=True), 38 | ) 39 | 40 | 41 | class SSH(nn.Module): 42 | def __init__(self, in_channel, out_channel): 43 | super(SSH, self).__init__() 44 | assert out_channel % 4 == 0 45 | leaky = 0 46 | if (out_channel <= 64): 47 | leaky = 0.1 48 | self.conv3X3 = conv_bn_no_relu(in_channel, out_channel // 2, stride=1) 49 | 50 | self.conv5X5_1 = conv_bn(in_channel, out_channel // 4, stride=1, leaky=leaky) 51 | self.conv5X5_2 = conv_bn_no_relu(out_channel // 4, out_channel // 4, stride=1) 52 | 53 | self.conv7X7_2 = conv_bn(out_channel // 4, out_channel // 4, stride=1, leaky=leaky) 54 | self.conv7x7_3 = conv_bn_no_relu(out_channel // 4, out_channel // 4, stride=1) 55 | 56 | def forward(self, input): 57 | conv3X3 = self.conv3X3(input) 58 | 59 | conv5X5_1 = self.conv5X5_1(input) 60 | conv5X5 = self.conv5X5_2(conv5X5_1) 61 | 62 | conv7X7_2 = self.conv7X7_2(conv5X5_1) 63 | conv7X7 = self.conv7x7_3(conv7X7_2) 64 | 65 | out = torch.cat([conv3X3, conv5X5, conv7X7], dim=1) 66 | out = F.relu(out) 67 | return out 68 | 69 | 70 | class FPN(nn.Module): 71 | def __init__(self, in_channels_list, out_channels): 72 | super(FPN, self).__init__() 73 | leaky = 0 74 | if (out_channels <= 64): 75 | leaky = 0.1 76 | self.output1 = conv_bn1X1(in_channels_list[0], out_channels, stride=1, leaky=leaky) 77 | self.output2 = conv_bn1X1(in_channels_list[1], out_channels, stride=1, leaky=leaky) 78 | self.output3 = conv_bn1X1(in_channels_list[2], out_channels, stride=1, leaky=leaky) 79 | 80 | self.merge1 = conv_bn(out_channels, out_channels, leaky=leaky) 81 | self.merge2 = conv_bn(out_channels, out_channels, leaky=leaky) 82 | 83 | def forward(self, input): 84 | # names = list(input.keys()) 85 | input = list(input.values()) 86 | 87 | output1 = self.output1(input[0]) 88 | output2 = self.output2(input[1]) 89 | output3 = self.output3(input[2]) 90 | 91 | up3 = F.interpolate(output3, size=[output2.size(2), output2.size(3)], mode="nearest") 92 | output2 = output2 + up3 93 | output2 = self.merge2(output2) 94 | 95 | up2 = F.interpolate(output2, size=[output1.size(2), output1.size(3)], mode="nearest") 96 | output1 = output1 + up2 97 | output1 = self.merge1(output1) 98 | 99 | out = [output1, output2, output3] 100 | return out 101 | 102 | 103 | class MobileNetV1(nn.Module): 104 | def __init__(self): 105 | super(MobileNetV1, self).__init__() 106 | self.stage1 = nn.Sequential( 107 | conv_bn(3, 8, 2, leaky=0.1), # 3 108 | conv_dw(8, 16, 1), # 7 109 | conv_dw(16, 32, 2), # 11 110 | conv_dw(32, 32, 1), # 19 111 | conv_dw(32, 64, 2), # 27 112 | conv_dw(64, 64, 1), # 43 113 | ) 114 | self.stage2 = nn.Sequential( 115 | conv_dw(64, 128, 2), # 43 + 16 = 59 116 | conv_dw(128, 128, 1), # 59 + 32 = 91 117 | conv_dw(128, 128, 1), # 91 + 32 = 123 118 | conv_dw(128, 128, 1), # 123 + 32 = 155 119 | conv_dw(128, 128, 1), # 155 + 32 = 187 120 | conv_dw(128, 128, 1), # 187 + 32 = 219 121 | ) 122 | self.stage3 = nn.Sequential( 123 | conv_dw(128, 256, 2), # 219 +3 2 = 241 124 | conv_dw(256, 256, 1), # 241 + 64 = 301 125 | ) 126 | self.avg = nn.AdaptiveAvgPool2d((1, 1)) 127 | self.fc = nn.Linear(256, 1000) 128 | 129 | def forward(self, x): 130 | x = self.stage1(x) 131 | x = self.stage2(x) 132 | x = self.stage3(x) 133 | x = self.avg(x) 134 | # x = self.model(x) 135 | x = x.view(-1, 256) 136 | x = self.fc(x) 137 | return x 138 | -------------------------------------------------------------------------------- /retinaface/models/retinaface.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import torchvision.models._utils as _utils 5 | 6 | from retinaface.models.net import FPN as FPN 7 | from retinaface.models.net import MobileNetV1 as MobileNetV1 8 | from retinaface.models.net import SSH as SSH 9 | 10 | 11 | class ClassHead(nn.Module): 12 | def __init__(self, inchannels=512, num_anchors=3): 13 | super(ClassHead, self).__init__() 14 | self.num_anchors = num_anchors 15 | self.conv1x1 = nn.Conv2d(inchannels, self.num_anchors * 2, kernel_size=(1, 1), stride=1, padding=0) 16 | 17 | def forward(self, x): 18 | out = self.conv1x1(x) 19 | out = out.permute(0, 2, 3, 1).contiguous() 20 | 21 | return out.view(out.shape[0], -1, 2) 22 | 23 | 24 | class BboxHead(nn.Module): 25 | def __init__(self, inchannels=512, num_anchors=3): 26 | super(BboxHead, self).__init__() 27 | self.conv1x1 = nn.Conv2d(inchannels, num_anchors * 4, kernel_size=(1, 1), stride=1, padding=0) 28 | 29 | def forward(self, x): 30 | out = self.conv1x1(x) 31 | out = out.permute(0, 2, 3, 1).contiguous() 32 | 33 | return out.view(out.shape[0], -1, 4) 34 | 35 | 36 | class LandmarkHead(nn.Module): 37 | def __init__(self, inchannels=512, num_anchors=3): 38 | super(LandmarkHead, self).__init__() 39 | self.conv1x1 = nn.Conv2d(inchannels, num_anchors * 10, kernel_size=(1, 1), stride=1, padding=0) 40 | 41 | def forward(self, x): 42 | out = self.conv1x1(x) 43 | out = out.permute(0, 2, 3, 1).contiguous() 44 | 45 | return out.view(out.shape[0], -1, 10) 46 | 47 | 48 | class RetinaFace(nn.Module): 49 | def __init__(self, cfg=None, phase='train'): 50 | """ 51 | :param cfg: Network related settings. 52 | :param phase: train or test. 53 | """ 54 | super(RetinaFace, self).__init__() 55 | self.phase = phase 56 | # backbone = MobileNetV1() 57 | if cfg['name'] == 'mobilenet0.25': 58 | backbone = MobileNetV1() 59 | if cfg['pretrain']: 60 | checkpoint = torch.load("./weights/mobilenetV1X0.25_pretrain.tar", map_location=torch.device('cpu')) 61 | from collections import OrderedDict 62 | new_state_dict = OrderedDict() 63 | for k, v in checkpoint['state_dict'].items(): 64 | name = k[7:] # remove module. 65 | new_state_dict[name] = v 66 | # load params 67 | backbone.load_state_dict(new_state_dict) 68 | elif cfg['name'] == 'Resnet50': 69 | import torchvision.models as models 70 | backbone = models.resnet50(pretrained=cfg['pretrain']) 71 | 72 | self.body = _utils.IntermediateLayerGetter(backbone, cfg['return_layers']) 73 | in_channels_stage2 = cfg['in_channel'] 74 | in_channels_list = [ 75 | in_channels_stage2 * 2, 76 | in_channels_stage2 * 4, 77 | in_channels_stage2 * 8, 78 | ] 79 | out_channels = cfg['out_channel'] 80 | self.fpn = FPN(in_channels_list, out_channels) 81 | self.ssh1 = SSH(out_channels, out_channels) 82 | self.ssh2 = SSH(out_channels, out_channels) 83 | self.ssh3 = SSH(out_channels, out_channels) 84 | 85 | self.ClassHead = self._make_class_head(fpn_num=3, inchannels=cfg['out_channel']) 86 | self.BboxHead = self._make_bbox_head(fpn_num=3, inchannels=cfg['out_channel']) 87 | self.LandmarkHead = self._make_landmark_head(fpn_num=3, inchannels=cfg['out_channel']) 88 | 89 | def _make_class_head(self, fpn_num=3, inchannels=64, anchor_num=2): 90 | classhead = nn.ModuleList() 91 | for i in range(fpn_num): 92 | classhead.append(ClassHead(inchannels, anchor_num)) 93 | return classhead 94 | 95 | def _make_bbox_head(self, fpn_num=3, inchannels=64, anchor_num=2): 96 | bboxhead = nn.ModuleList() 97 | for i in range(fpn_num): 98 | bboxhead.append(BboxHead(inchannels, anchor_num)) 99 | return bboxhead 100 | 101 | def _make_landmark_head(self, fpn_num=3, inchannels=64, anchor_num=2): 102 | landmarkhead = nn.ModuleList() 103 | for i in range(fpn_num): 104 | landmarkhead.append(LandmarkHead(inchannels, anchor_num)) 105 | return landmarkhead 106 | 107 | def forward(self, inputs): 108 | out = self.body(inputs) 109 | 110 | # FPN 111 | fpn = self.fpn(out) 112 | 113 | # SSH 114 | feature1 = self.ssh1(fpn[0]) 115 | feature2 = self.ssh2(fpn[1]) 116 | feature3 = self.ssh3(fpn[2]) 117 | features = [feature1, feature2, feature3] 118 | 119 | bbox_regressions = torch.cat([self.BboxHead[i](feature) for i, feature in enumerate(features)], dim=1) 120 | classifications = torch.cat([self.ClassHead[i](feature) for i, feature in enumerate(features)], dim=1) 121 | ldm_regressions = torch.cat([self.LandmarkHead[i](feature) for i, feature in enumerate(features)], dim=1) 122 | 123 | if self.phase == 'train': 124 | output = (bbox_regressions, classifications, ldm_regressions) 125 | else: 126 | output = (bbox_regressions, F.softmax(classifications, dim=-1), ldm_regressions) 127 | return output 128 | -------------------------------------------------------------------------------- /retinaface/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/foamliu/MobileFaceNet-PyTorch/2c720d6875488e94f4d4eb870936cb05613b74d5/retinaface/utils/__init__.py -------------------------------------------------------------------------------- /retinaface/utils/box_utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | 4 | 5 | def point_form(boxes): 6 | """ Convert prior_boxes to (xmin, ymin, xmax, ymax) 7 | representation for comparison to point form ground truth data. 8 | Args: 9 | boxes: (tensor) center-size default boxes from priorbox layers. 10 | Return: 11 | boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes. 12 | """ 13 | return torch.cat((boxes[:, :2] - boxes[:, 2:]/2, # xmin, ymin 14 | boxes[:, :2] + boxes[:, 2:]/2), 1) # xmax, ymax 15 | 16 | 17 | def center_size(boxes): 18 | """ Convert prior_boxes to (cx, cy, w, h) 19 | representation for comparison to center-size form ground truth data. 20 | Args: 21 | boxes: (tensor) point_form boxes 22 | Return: 23 | boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes. 24 | """ 25 | return torch.cat((boxes[:, 2:] + boxes[:, :2])/2, # cx, cy 26 | boxes[:, 2:] - boxes[:, :2], 1) # w, h 27 | 28 | 29 | def intersect(box_a, box_b): 30 | """ We resize both tensors to [A,B,2] without new malloc: 31 | [A,2] -> [A,1,2] -> [A,B,2] 32 | [B,2] -> [1,B,2] -> [A,B,2] 33 | Then we compute the area of intersect between box_a and box_b. 34 | Args: 35 | box_a: (tensor) bounding boxes, Shape: [A,4]. 36 | box_b: (tensor) bounding boxes, Shape: [B,4]. 37 | Return: 38 | (tensor) intersection area, Shape: [A,B]. 39 | """ 40 | A = box_a.size(0) 41 | B = box_b.size(0) 42 | max_xy = torch.min(box_a[:, 2:].unsqueeze(1).expand(A, B, 2), 43 | box_b[:, 2:].unsqueeze(0).expand(A, B, 2)) 44 | min_xy = torch.max(box_a[:, :2].unsqueeze(1).expand(A, B, 2), 45 | box_b[:, :2].unsqueeze(0).expand(A, B, 2)) 46 | inter = torch.clamp((max_xy - min_xy), min=0) 47 | return inter[:, :, 0] * inter[:, :, 1] 48 | 49 | 50 | def jaccard(box_a, box_b): 51 | """Compute the jaccard overlap of two sets of boxes. The jaccard overlap 52 | is simply the intersection over union of two boxes. Here we operate on 53 | ground truth boxes and default boxes. 54 | E.g.: 55 | A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B) 56 | Args: 57 | box_a: (tensor) Ground truth bounding boxes, Shape: [num_objects,4] 58 | box_b: (tensor) Prior boxes from priorbox layers, Shape: [num_priors,4] 59 | Return: 60 | jaccard overlap: (tensor) Shape: [box_a.size(0), box_b.size(0)] 61 | """ 62 | inter = intersect(box_a, box_b) 63 | area_a = ((box_a[:, 2]-box_a[:, 0]) * 64 | (box_a[:, 3]-box_a[:, 1])).unsqueeze(1).expand_as(inter) # [A,B] 65 | area_b = ((box_b[:, 2]-box_b[:, 0]) * 66 | (box_b[:, 3]-box_b[:, 1])).unsqueeze(0).expand_as(inter) # [A,B] 67 | union = area_a + area_b - inter 68 | return inter / union # [A,B] 69 | 70 | 71 | def matrix_iou(a, b): 72 | """ 73 | return iou of a and b, numpy version for data augenmentation 74 | """ 75 | lt = np.maximum(a[:, np.newaxis, :2], b[:, :2]) 76 | rb = np.minimum(a[:, np.newaxis, 2:], b[:, 2:]) 77 | 78 | area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2) 79 | area_a = np.prod(a[:, 2:] - a[:, :2], axis=1) 80 | area_b = np.prod(b[:, 2:] - b[:, :2], axis=1) 81 | return area_i / (area_a[:, np.newaxis] + area_b - area_i) 82 | 83 | 84 | def matrix_iof(a, b): 85 | """ 86 | return iof of a and b, numpy version for data augenmentation 87 | """ 88 | lt = np.maximum(a[:, np.newaxis, :2], b[:, :2]) 89 | rb = np.minimum(a[:, np.newaxis, 2:], b[:, 2:]) 90 | 91 | area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2) 92 | area_a = np.prod(a[:, 2:] - a[:, :2], axis=1) 93 | return area_i / np.maximum(area_a[:, np.newaxis], 1) 94 | 95 | 96 | def match(threshold, truths, priors, variances, labels, landms, loc_t, conf_t, landm_t, idx): 97 | """Match each prior box with the ground truth box of the highest jaccard 98 | overlap, encode the bounding boxes, then return the matched indices 99 | corresponding to both confidence and location preds. 100 | Args: 101 | threshold: (float) The overlap threshold used when mathing boxes. 102 | truths: (tensor) Ground truth boxes, Shape: [num_obj, 4]. 103 | priors: (tensor) Prior boxes from priorbox layers, Shape: [n_priors,4]. 104 | variances: (tensor) Variances corresponding to each prior coord, 105 | Shape: [num_priors, 4]. 106 | labels: (tensor) All the class labels for the image, Shape: [num_obj]. 107 | landms: (tensor) Ground truth landms, Shape [num_obj, 10]. 108 | loc_t: (tensor) Tensor to be filled w/ endcoded location targets. 109 | conf_t: (tensor) Tensor to be filled w/ matched indices for conf preds. 110 | landm_t: (tensor) Tensor to be filled w/ endcoded landm targets. 111 | idx: (int) current batch index 112 | Return: 113 | The matched indices corresponding to 1)location 2)confidence 3)landm preds. 114 | """ 115 | # jaccard index 116 | overlaps = jaccard( 117 | truths, 118 | point_form(priors) 119 | ) 120 | # (Bipartite Matching) 121 | # [1,num_objects] best prior for each ground truth 122 | best_prior_overlap, best_prior_idx = overlaps.max(1, keepdim=True) 123 | 124 | # ignore hard gt 125 | valid_gt_idx = best_prior_overlap[:, 0] >= 0.2 126 | best_prior_idx_filter = best_prior_idx[valid_gt_idx, :] 127 | if best_prior_idx_filter.shape[0] <= 0: 128 | loc_t[idx] = 0 129 | conf_t[idx] = 0 130 | return 131 | 132 | # [1,num_priors] best ground truth for each prior 133 | best_truth_overlap, best_truth_idx = overlaps.max(0, keepdim=True) 134 | best_truth_idx.squeeze_(0) 135 | best_truth_overlap.squeeze_(0) 136 | best_prior_idx.squeeze_(1) 137 | best_prior_idx_filter.squeeze_(1) 138 | best_prior_overlap.squeeze_(1) 139 | best_truth_overlap.index_fill_(0, best_prior_idx_filter, 2) # ensure best prior 140 | # TODO refactor: index best_prior_idx with long tensor 141 | # ensure every gt matches with its prior of max overlap 142 | for j in range(best_prior_idx.size(0)): # 判别此anchor是预测哪一个boxes 143 | best_truth_idx[best_prior_idx[j]] = j 144 | matches = truths[best_truth_idx] # Shape: [num_priors,4] 此处为每一个anchor对应的bbox取出来 145 | conf = labels[best_truth_idx] # Shape: [num_priors] 此处为每一个anchor对应的label取出来 146 | conf[best_truth_overlap < threshold] = 0 # label as background overlap<0.35的全部作为负样本 147 | loc = encode(matches, priors, variances) 148 | 149 | matches_landm = landms[best_truth_idx] 150 | landm = encode_landm(matches_landm, priors, variances) 151 | loc_t[idx] = loc # [num_priors,4] encoded offsets to learn 152 | conf_t[idx] = conf # [num_priors] top class label for each prior 153 | landm_t[idx] = landm 154 | 155 | 156 | def encode(matched, priors, variances): 157 | """Encode the variances from the priorbox layers into the ground truth boxes 158 | we have matched (based on jaccard overlap) with the prior boxes. 159 | Args: 160 | matched: (tensor) Coords of ground truth for each prior in point-form 161 | Shape: [num_priors, 4]. 162 | priors: (tensor) Prior boxes in center-offset form 163 | Shape: [num_priors,4]. 164 | variances: (list[float]) Variances of priorboxes 165 | Return: 166 | encoded boxes (tensor), Shape: [num_priors, 4] 167 | """ 168 | 169 | # dist b/t match center and prior's center 170 | g_cxcy = (matched[:, :2] + matched[:, 2:])/2 - priors[:, :2] 171 | # encode variance 172 | g_cxcy /= (variances[0] * priors[:, 2:]) 173 | # match wh / prior wh 174 | g_wh = (matched[:, 2:] - matched[:, :2]) / priors[:, 2:] 175 | g_wh = torch.log(g_wh) / variances[1] 176 | # return target for smooth_l1_loss 177 | return torch.cat([g_cxcy, g_wh], 1) # [num_priors,4] 178 | 179 | def encode_landm(matched, priors, variances): 180 | """Encode the variances from the priorbox layers into the ground truth boxes 181 | we have matched (based on jaccard overlap) with the prior boxes. 182 | Args: 183 | matched: (tensor) Coords of ground truth for each prior in point-form 184 | Shape: [num_priors, 10]. 185 | priors: (tensor) Prior boxes in center-offset form 186 | Shape: [num_priors,4]. 187 | variances: (list[float]) Variances of priorboxes 188 | Return: 189 | encoded landm (tensor), Shape: [num_priors, 10] 190 | """ 191 | 192 | # dist b/t match center and prior's center 193 | matched = torch.reshape(matched, (matched.size(0), 5, 2)) 194 | priors_cx = priors[:, 0].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) 195 | priors_cy = priors[:, 1].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) 196 | priors_w = priors[:, 2].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) 197 | priors_h = priors[:, 3].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) 198 | priors = torch.cat([priors_cx, priors_cy, priors_w, priors_h], dim=2) 199 | g_cxcy = matched[:, :, :2] - priors[:, :, :2] 200 | # encode variance 201 | g_cxcy /= (variances[0] * priors[:, :, 2:]) 202 | # g_cxcy /= priors[:, :, 2:] 203 | g_cxcy = g_cxcy.reshape(g_cxcy.size(0), -1) 204 | # return target for smooth_l1_loss 205 | return g_cxcy 206 | 207 | 208 | # Adapted from https://github.com/Hakuyume/chainer-ssd 209 | def decode(loc, priors, variances): 210 | """Decode locations from predictions using priors to undo 211 | the encoding we did for offset regression at train time. 212 | Args: 213 | loc (tensor): location predictions for loc layers, 214 | Shape: [num_priors,4] 215 | priors (tensor): Prior boxes in center-offset form. 216 | Shape: [num_priors,4]. 217 | variances: (list[float]) Variances of priorboxes 218 | Return: 219 | decoded bounding box predictions 220 | """ 221 | 222 | boxes = torch.cat(( 223 | priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:], 224 | priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1) 225 | boxes[:, :2] -= boxes[:, 2:] / 2 226 | boxes[:, 2:] += boxes[:, :2] 227 | return boxes 228 | 229 | def decode_landm(pre, priors, variances): 230 | """Decode landm from predictions using priors to undo 231 | the encoding we did for offset regression at train time. 232 | Args: 233 | pre (tensor): landm predictions for loc layers, 234 | Shape: [num_priors,10] 235 | priors (tensor): Prior boxes in center-offset form. 236 | Shape: [num_priors,4]. 237 | variances: (list[float]) Variances of priorboxes 238 | Return: 239 | decoded landm predictions 240 | """ 241 | landms = torch.cat((priors[:, :2] + pre[:, :2] * variances[0] * priors[:, 2:], 242 | priors[:, :2] + pre[:, 2:4] * variances[0] * priors[:, 2:], 243 | priors[:, :2] + pre[:, 4:6] * variances[0] * priors[:, 2:], 244 | priors[:, :2] + pre[:, 6:8] * variances[0] * priors[:, 2:], 245 | priors[:, :2] + pre[:, 8:10] * variances[0] * priors[:, 2:], 246 | ), dim=1) 247 | return landms 248 | 249 | 250 | def log_sum_exp(x): 251 | """Utility function for computing log_sum_exp while determining 252 | This will be used to determine unaveraged confidence loss across 253 | all examples in a batch. 254 | Args: 255 | x (Variable(tensor)): conf_preds from conf layers 256 | """ 257 | x_max = x.data.max() 258 | return torch.log(torch.sum(torch.exp(x-x_max), 1, keepdim=True)) + x_max 259 | 260 | 261 | # Original author: Francisco Massa: 262 | # https://github.com/fmassa/object-detection.torch 263 | # Ported to PyTorch by Max deGroot (02/01/2017) 264 | def nms(boxes, scores, overlap=0.5, top_k=200): 265 | """Apply non-maximum suppression at test time to avoid detecting too many 266 | overlapping bounding boxes for a given object. 267 | Args: 268 | boxes: (tensor) The location preds for the img, Shape: [num_priors,4]. 269 | scores: (tensor) The class predscores for the img, Shape:[num_priors]. 270 | overlap: (float) The overlap thresh for suppressing unnecessary boxes. 271 | top_k: (int) The Maximum number of box preds to consider. 272 | Return: 273 | The indices of the kept boxes with respect to num_priors. 274 | """ 275 | 276 | keep = torch.Tensor(scores.size(0)).fill_(0).long() 277 | if boxes.numel() == 0: 278 | return keep 279 | x1 = boxes[:, 0] 280 | y1 = boxes[:, 1] 281 | x2 = boxes[:, 2] 282 | y2 = boxes[:, 3] 283 | area = torch.mul(x2 - x1, y2 - y1) 284 | v, idx = scores.sort(0) # sort in ascending order 285 | # I = I[v >= 0.01] 286 | idx = idx[-top_k:] # indices of the top-k largest vals 287 | xx1 = boxes.new() 288 | yy1 = boxes.new() 289 | xx2 = boxes.new() 290 | yy2 = boxes.new() 291 | w = boxes.new() 292 | h = boxes.new() 293 | 294 | # keep = torch.Tensor() 295 | count = 0 296 | while idx.numel() > 0: 297 | i = idx[-1] # index of current largest val 298 | # keep.append(i) 299 | keep[count] = i 300 | count += 1 301 | if idx.size(0) == 1: 302 | break 303 | idx = idx[:-1] # remove kept element from view 304 | # load bboxes of next highest vals 305 | torch.index_select(x1, 0, idx, out=xx1) 306 | torch.index_select(y1, 0, idx, out=yy1) 307 | torch.index_select(x2, 0, idx, out=xx2) 308 | torch.index_select(y2, 0, idx, out=yy2) 309 | # store element-wise max with next highest score 310 | xx1 = torch.clamp(xx1, min=x1[i]) 311 | yy1 = torch.clamp(yy1, min=y1[i]) 312 | xx2 = torch.clamp(xx2, max=x2[i]) 313 | yy2 = torch.clamp(yy2, max=y2[i]) 314 | w.resize_as_(xx2) 315 | h.resize_as_(yy2) 316 | w = xx2 - xx1 317 | h = yy2 - yy1 318 | # check sizes of xx1 and xx2.. after each iteration 319 | w = torch.clamp(w, min=0.0) 320 | h = torch.clamp(h, min=0.0) 321 | inter = w*h 322 | # IoU = i / (area(a) + area(b) - i) 323 | rem_areas = torch.index_select(area, 0, idx) # load remaining areas) 324 | union = (rem_areas - inter) + area[i] 325 | IoU = inter/union # store result in iou 326 | # keep only elements with an IoU <= overlap 327 | idx = idx[IoU.le(overlap)] 328 | return keep, count 329 | 330 | 331 | -------------------------------------------------------------------------------- /retinaface/utils/nms/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/foamliu/MobileFaceNet-PyTorch/2c720d6875488e94f4d4eb870936cb05613b74d5/retinaface/utils/nms/__init__.py -------------------------------------------------------------------------------- /retinaface/utils/nms/py_cpu_nms.py: -------------------------------------------------------------------------------- 1 | # -------------------------------------------------------- 2 | # Fast R-CNN 3 | # Copyright (c) 2015 Microsoft 4 | # Licensed under The MIT License [see LICENSE for details] 5 | # Written by Ross Girshick 6 | # -------------------------------------------------------- 7 | 8 | import numpy as np 9 | 10 | def py_cpu_nms(dets, thresh): 11 | """Pure Python NMS baseline.""" 12 | x1 = dets[:, 0] 13 | y1 = dets[:, 1] 14 | x2 = dets[:, 2] 15 | y2 = dets[:, 3] 16 | scores = dets[:, 4] 17 | 18 | areas = (x2 - x1 + 1) * (y2 - y1 + 1) 19 | order = scores.argsort()[::-1] 20 | 21 | keep = [] 22 | while order.size > 0: 23 | i = order[0] 24 | keep.append(i) 25 | xx1 = np.maximum(x1[i], x1[order[1:]]) 26 | yy1 = np.maximum(y1[i], y1[order[1:]]) 27 | xx2 = np.minimum(x2[i], x2[order[1:]]) 28 | yy2 = np.minimum(y2[i], y2[order[1:]]) 29 | 30 | w = np.maximum(0.0, xx2 - xx1 + 1) 31 | h = np.maximum(0.0, yy2 - yy1 + 1) 32 | inter = w * h 33 | ovr = inter / (areas[i] + areas[order[1:]] - inter) 34 | 35 | inds = np.where(ovr <= thresh)[0] 36 | order = order[inds + 1] 37 | 38 | return keep 39 | -------------------------------------------------------------------------------- /retinaface/utils/timer.py: -------------------------------------------------------------------------------- 1 | # -------------------------------------------------------- 2 | # Fast R-CNN 3 | # Copyright (c) 2015 Microsoft 4 | # Licensed under The MIT License [see LICENSE for details] 5 | # Written by Ross Girshick 6 | # -------------------------------------------------------- 7 | 8 | import time 9 | 10 | 11 | class Timer(object): 12 | """A simple timer.""" 13 | def __init__(self): 14 | self.total_time = 0. 15 | self.calls = 0 16 | self.start_time = 0. 17 | self.diff = 0. 18 | self.average_time = 0. 19 | 20 | def tic(self): 21 | # using time.time instead of time.clock because time time.clock 22 | # does not normalize for multithreading 23 | self.start_time = time.time() 24 | 25 | def toc(self, average=True): 26 | self.diff = time.time() - self.start_time 27 | self.total_time += self.diff 28 | self.calls += 1 29 | self.average_time = self.total_time / self.calls 30 | if average: 31 | return self.average_time 32 | else: 33 | return self.diff 34 | 35 | def clear(self): 36 | self.total_time = 0. 37 | self.calls = 0 38 | self.start_time = 0. 39 | self.diff = 0. 40 | self.average_time = 0. 41 | -------------------------------------------------------------------------------- /retinaface/weights/mobilenet0.25_Final.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/foamliu/MobileFaceNet-PyTorch/2c720d6875488e94f4d4eb870936cb05613b74d5/retinaface/weights/mobilenet0.25_Final.pth -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | from torch import nn 4 | from torch.optim.lr_scheduler import MultiStepLR 5 | from torch.utils.tensorboard import SummaryWriter 6 | 7 | from config import device, grad_clip, print_freq 8 | from data_gen import ArcFaceDataset 9 | from focal_loss import FocalLoss 10 | from megaface_eval import megaface_test 11 | from mobilefacenet import MobileFaceNet, ArcMarginModel 12 | from utils import parse_args, save_checkpoint, AverageMeter, accuracy, get_logger, clip_gradient 13 | 14 | 15 | def train_net(args): 16 | torch.manual_seed(7) 17 | np.random.seed(7) 18 | checkpoint = args.checkpoint 19 | start_epoch = 0 20 | best_acc = float('-inf') 21 | writer = SummaryWriter() 22 | epochs_since_improvement = 0 23 | 24 | # Initialize / load checkpoint 25 | if checkpoint is None: 26 | model = MobileFaceNet() 27 | metric_fc = ArcMarginModel(args) 28 | 29 | optimizer = torch.optim.SGD([{'params': model.conv1.parameters()}, 30 | {'params': model.dw_conv.parameters()}, 31 | {'params': model.features.parameters()}, 32 | {'params': model.conv2.parameters()}, 33 | {'params': model.gdconv.parameters()}, 34 | {'params': model.conv3.parameters(), 'weight_decay': 4e-4}, 35 | {'params': model.bn.parameters()}, 36 | {'params': metric_fc.parameters()}], 37 | lr=args.lr, momentum=args.mom, weight_decay=args.weight_decay, nesterov=True) 38 | 39 | model = nn.DataParallel(model) 40 | metric_fc = nn.DataParallel(metric_fc) 41 | 42 | else: 43 | checkpoint = torch.load(checkpoint) 44 | start_epoch = checkpoint['epoch'] + 1 45 | epochs_since_improvement = checkpoint['epochs_since_improvement'] 46 | model = checkpoint['model'] 47 | metric_fc = checkpoint['metric_fc'] 48 | optimizer = checkpoint['optimizer'] 49 | 50 | logger = get_logger() 51 | 52 | # Move to GPU, if available 53 | model = model.to(device) 54 | metric_fc = metric_fc.to(device) 55 | 56 | # Loss function 57 | if args.focal_loss: 58 | criterion = FocalLoss(gamma=args.gamma).to(device) 59 | else: 60 | criterion = nn.CrossEntropyLoss().to(device) 61 | 62 | # Custom dataloaders 63 | train_dataset = ArcFaceDataset('train') 64 | train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=4) 65 | 66 | scheduler = MultiStepLR(optimizer, milestones=[5, 10, 15, 20], gamma=0.1) 67 | 68 | # Epochs 69 | for epoch in range(start_epoch, args.end_epoch): 70 | # One epoch's training 71 | train_loss, train_acc = train(train_loader=train_loader, 72 | model=model, 73 | metric_fc=metric_fc, 74 | criterion=criterion, 75 | optimizer=optimizer, 76 | epoch=epoch, 77 | logger=logger) 78 | 79 | lr = optimizer.param_groups[0]['lr'] 80 | print('\nLearning rate={}\n'.format(lr)) 81 | 82 | writer.add_scalar('model/train_loss', train_loss, epoch) 83 | writer.add_scalar('model/train_acc', train_acc, epoch) 84 | writer.add_scalar('model/learning_rate', lr, epoch) 85 | 86 | # One epoch's validation 87 | megaface_acc = megaface_test(model) 88 | writer.add_scalar('model/megaface_accuracy', megaface_acc, epoch) 89 | 90 | # Check if there was an improvement 91 | is_best = megaface_acc > best_acc 92 | best_acc = max(megaface_acc, best_acc) 93 | if not is_best: 94 | epochs_since_improvement += 1 95 | print("\nEpochs since last improvement: %d\n" % (epochs_since_improvement,)) 96 | else: 97 | epochs_since_improvement = 0 98 | 99 | # Save checkpoint 100 | save_checkpoint(epoch, epochs_since_improvement, model, metric_fc, optimizer, best_acc, is_best) 101 | scheduler.step(epoch) 102 | 103 | 104 | def train(train_loader, model, metric_fc, criterion, optimizer, epoch, logger): 105 | model.train() # train mode (dropout and batchnorm is used) 106 | metric_fc.train() 107 | 108 | losses = AverageMeter() 109 | top5_accs = AverageMeter() 110 | 111 | # Batches 112 | for i, (img, label) in enumerate(train_loader): 113 | # Move to GPU, if available 114 | img = img.to(device) 115 | label = label.to(device) # [N, 1] 116 | 117 | # Forward prop. 118 | feature = model(img) # embedding => [N, 512] 119 | output = metric_fc(feature, label) # class_id_out => [N, 10575] 120 | 121 | # Calculate loss 122 | loss = criterion(output, label) 123 | 124 | # Back prop. 125 | optimizer.zero_grad() 126 | loss.backward() 127 | 128 | # Clip gradients 129 | clip_gradient(optimizer, grad_clip) 130 | 131 | # Update weights 132 | optimizer.step() 133 | 134 | # Keep track of metrics 135 | losses.update(loss.item()) 136 | top5_accuracy = accuracy(output, label, 5) 137 | top5_accs.update(top5_accuracy) 138 | 139 | # Print status 140 | if i % print_freq == 0: 141 | logger.info('Epoch: [{0}][{1}/{2}]\t' 142 | 'Loss {loss.val:.4f} ({loss.avg:.4f})\t' 143 | 'Top5 Accuracy {top5_accs.val:.3f} ({top5_accs.avg:.3f})'.format(epoch, i, len(train_loader), 144 | loss=losses, 145 | top5_accs=top5_accs)) 146 | 147 | return losses.avg, top5_accs.avg 148 | 149 | 150 | def main(): 151 | global args 152 | args = parse_args() 153 | train_net(args) 154 | 155 | 156 | if __name__ == '__main__': 157 | main() 158 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | 4 | import cv2 as cv 5 | import numpy as np 6 | import torch 7 | from PIL import Image 8 | 9 | from align_faces import get_reference_facial_points, warp_and_crop_face 10 | from config import image_h, image_w 11 | from retinaface.detector import detector 12 | 13 | 14 | def save_checkpoint(epoch, epochs_since_improvement, model, metric_fc, optimizer, acc, is_best): 15 | print('saving checkpoint ...') 16 | state = {'epoch': epoch, 17 | 'epochs_since_improvement': epochs_since_improvement, 18 | 'acc': acc, 19 | 'model': model, 20 | 'metric_fc': metric_fc, 21 | 'optimizer': optimizer} 22 | # filename = 'checkpoint_' + str(epoch) + '_' + str(loss) + '.tar' 23 | filename = 'checkpoint.tar' 24 | torch.save(state, filename) 25 | # If this checkpoint is the best so far, store a copy so it doesn't get overwritten by a worse checkpoint 26 | if is_best: 27 | torch.save(state, 'BEST_checkpoint.tar') 28 | 29 | 30 | class AverageMeter(object): 31 | """ 32 | Keeps track of most recent, average, sum, and count of a metric. 33 | """ 34 | 35 | def __init__(self): 36 | self.reset() 37 | 38 | def reset(self): 39 | self.val = 0 40 | self.avg = 0 41 | self.sum = 0 42 | self.count = 0 43 | 44 | def update(self, val, n=1): 45 | self.val = val 46 | self.sum += val * n 47 | self.count += n 48 | self.avg = self.sum / self.count 49 | 50 | 51 | def clip_gradient(optimizer, grad_clip): 52 | """ 53 | Clips gradients computed during backpropagation to avoid explosion of gradients. 54 | :param optimizer: optimizer with the gradients to be clipped 55 | :param grad_clip: clip value 56 | """ 57 | for group in optimizer.param_groups: 58 | for param in group['params']: 59 | if param.grad is not None: 60 | param.grad.data.clamp_(-grad_clip, grad_clip) 61 | 62 | 63 | def adjust_learning_rate(optimizer, shrink_factor): 64 | """ 65 | Shrinks learning rate by a specified factor. 66 | :param optimizer: optimizer whose learning rate must be shrunk. 67 | :param shrink_factor: factor in interval (0, 1) to multiply learning rate with. 68 | """ 69 | 70 | print("\nDECAYING learning rate.") 71 | for param_group in optimizer.param_groups: 72 | param_group['lr'] = param_group['lr'] * shrink_factor 73 | print("The new learning rate is %f\n" % (optimizer.param_groups[0]['lr'],)) 74 | 75 | 76 | def accuracy(scores, targets, k=1): 77 | batch_size = targets.size(0) 78 | _, ind = scores.topk(k, 1, True, True) 79 | correct = ind.eq(targets.view(-1, 1).expand_as(ind)) 80 | correct_total = correct.view(-1).float().sum() # 0D tensor 81 | return correct_total.item() * (100.0 / batch_size) 82 | 83 | 84 | def align_face(img_fn, facial5points): 85 | raw = cv.imread(img_fn, True) # BGR 86 | facial5points = np.reshape(facial5points, (2, 5)) 87 | 88 | crop_size = (image_h, image_w) 89 | 90 | default_square = True 91 | inner_padding_factor = 0.25 92 | outer_padding = (0, 0) 93 | output_size = (image_h, image_w) 94 | 95 | # get the reference 5 landmarks position in the crop settings 96 | reference_5pts = get_reference_facial_points( 97 | output_size, inner_padding_factor, outer_padding, default_square) 98 | 99 | # dst_img = warp_and_crop_face(raw, facial5points) 100 | dst_img = warp_and_crop_face(raw, facial5points, reference_pts=reference_5pts, crop_size=crop_size) 101 | return dst_img 102 | 103 | 104 | def get_face_attributes(full_path): 105 | try: 106 | img = Image.open(full_path).convert('RGB') 107 | bounding_boxes, landmarks = detector.detect_faces(img) 108 | 109 | if len(landmarks) > 0: 110 | landmarks = [int(round(x)) for x in landmarks[0]] 111 | return True, landmarks 112 | 113 | except KeyboardInterrupt: 114 | raise 115 | except: 116 | pass 117 | return False, None 118 | 119 | 120 | def select_significant_face(bounding_boxes): 121 | best_index = -1 122 | best_rank = float('-inf') 123 | for i, b in enumerate(bounding_boxes): 124 | bbox_w, bbox_h = b[2] - b[0], b[3] - b[1] 125 | area = bbox_w * bbox_h 126 | score = b[4] 127 | rank = score * area 128 | if rank > best_rank: 129 | best_rank = rank 130 | best_index = i 131 | 132 | return best_index 133 | 134 | 135 | def get_central_face_attributes(full_path): 136 | try: 137 | img = cv.imread(full_path) 138 | bounding_boxes, landmarks = detector.detect_faces(img) 139 | 140 | if len(landmarks) > 0: 141 | i = select_significant_face(bounding_boxes) 142 | return True, [bounding_boxes[i]], [landmarks[i]] 143 | 144 | except KeyboardInterrupt: 145 | raise 146 | except ValueError: 147 | pass 148 | except IOError: 149 | pass 150 | return False, None, None 151 | 152 | 153 | def get_all_face_attributes(full_path): 154 | img = Image.open(full_path).convert('RGB') 155 | bounding_boxes, landmarks = detector.detect_faces(img) 156 | return bounding_boxes, landmarks 157 | 158 | 159 | def draw_bboxes(img, bounding_boxes, facial_landmarks=[]): 160 | for b in bounding_boxes: 161 | cv.rectangle(img, (int(b[0]), int(b[1])), (int(b[2]), int(b[3])), (255, 255, 255), 1) 162 | 163 | for p in facial_landmarks: 164 | for i in range(5): 165 | cv.circle(img, (int(p[i]), int(p[i + 5])), 1, (0, 255, 0), -1) 166 | 167 | break # only first 168 | 169 | return img 170 | 171 | 172 | def parse_args(): 173 | parser = argparse.ArgumentParser(description='Train face network') 174 | # general 175 | parser.add_argument('--pretrained', type=bool, default=False, help='pretrained model') 176 | parser.add_argument('--end-epoch', type=int, default=1000, help='training epoch size.') 177 | parser.add_argument('--lr', type=float, default=0.1, help='start learning rate') 178 | parser.add_argument('--optimizer', default='sgd', help='optimizer') 179 | parser.add_argument('--weight-decay', type=float, default=4e-5, help='weight decay') 180 | parser.add_argument('--mom', type=float, default=0.9, help='momentum') 181 | parser.add_argument('--emb-size', type=int, default=512, help='embedding length') 182 | parser.add_argument('--batch-size', type=int, default=256, help='batch size in each context') 183 | parser.add_argument('--margin-m', type=float, default=0.5, help='angular margin m') 184 | parser.add_argument('--margin-s', type=float, default=64.0, help='feature scale s') 185 | parser.add_argument('--easy-margin', type=bool, default=False, help='easy margin') 186 | parser.add_argument('--focal-loss', type=bool, default=False, help='focal loss') 187 | parser.add_argument('--gamma', type=float, default=2.0, help='focusing parameter gamma') 188 | parser.add_argument('--checkpoint', type=str, default=None, help='checkpoint') 189 | args = parser.parse_args() 190 | return args 191 | 192 | 193 | def get_logger(): 194 | logger = logging.getLogger() 195 | handler = logging.StreamHandler() 196 | formatter = logging.Formatter("%(asctime)s %(levelname)s \t%(message)s") 197 | handler.setFormatter(formatter) 198 | logger.addHandler(handler) 199 | logger.setLevel(logging.INFO) 200 | return logger 201 | 202 | 203 | def ensure_folder(folder): 204 | import os 205 | if not os.path.isdir(folder): 206 | os.mkdir(folder) 207 | --------------------------------------------------------------------------------