├── LICENSE ├── README.md ├── bin ├── __init__.py ├── downstream.py └── self_learning.py ├── config ├── __init__.py ├── default_hparas.py ├── downstream │ ├── phn_clf_example.yml │ └── spk_clf_example.yml └── self_supervised │ ├── apc_example.yml │ ├── npc_example.yml │ └── vqapc_example.yml ├── dataset ├── __init__.py ├── librispeech.py ├── wsj_phn.py └── wsj_spk.py ├── eg.sh ├── main.py ├── model ├── __init__.py ├── apc.py ├── classifier.py ├── npc.py └── vq.py ├── preprocess ├── README.md ├── phn_split │ ├── dev93_phn.csv │ ├── phones.txt │ ├── si284-0.1_phn.csv │ └── si284-0.9_phn.csv ├── refactor_wsj.py └── spk_split │ ├── dev_spk │ ├── test_spk │ └── train_spk ├── requirements.txt └── src ├── __init__.py ├── audio.py ├── data.py ├── optim.py ├── solver.py └── util.py /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Alexander H. Liu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Non-Autoregressive Predictive Coding 2 | 3 | This repository contains the implementation of Non-Autoregressive Predictive Coding (NPC) as described in [the preprint paper](https://arxiv.org/abs/2011.00406) submitted to ICASSP 2021. 4 | 5 | 6 | A quick example for training NPC 7 | ``` 8 | python main.py --config config/self_supervised/npc_example.yml \ 9 | --task self-learning 10 | ``` 11 | 12 | - For more complete examples including downstream tasks, please see [the example script](eg.sh). 13 | 14 | - For preparing data, please visit [preprocess](preprocess/). 15 | 16 | - For detailed hyperparameters setting and description, please checkout [example config file of NPC](config/self_supervised/npc_example.yml). 17 | 18 | - For all run-time options, use `-h` flag. 19 | 20 | - Implementation of [Autoregressive Predictive Coding](https://arxiv.org/abs/1910.12607) (APC, 2019, Chung *et al*.) and [Vector-Quantized APC](https://arxiv.org/abs/2005.08392) (VQ-APC, 2020, Chung *et al*.) are also available using similar training/downstream execution with example config files [here](config/self_supervised/vqapc_example.yml). 21 | 22 | ## Some notes 23 | 24 | - We found the unmasked feature produced by the last ConvBlock layer a better representation. In the phone classification tasks, switching to the unmasked feature (PER 25.6%) provided a 1.6% improvement over the masked feature (PER 27.2%). Currently, this is not included in the preprint version and will be updated to the paper in the future. Please refer to [downstream examples](config/downstream) to activate this option. 25 | 26 | - APC/VQ-APC are implemented with the following modifications for improvement (for the unmodified version, please visit the official implementation of [APC](https://github.com/iamyuanchung/Autoregressive-Predictive-Coding) / [VQAPC](https://github.com/iamyuanchung/VQ-APC/tree/96230cc358b174b736b4c0e7664b3e72b304d9b0)) 27 | 28 | - Multi-group VQ available for VQ-APC, but with VQ on last layer only 29 | 30 | - Using utterance-wised CMVN surface feature(just as NPC did) 31 | 32 | - Using Gumbel Softmax from [official API](https://pytorch.org/docs/stable/nn.functional.html#gumbel-softmax) of pytorch 33 | 34 | - See [package requirement](requirements.txt) for toolkits used, `tensorboard` can be used to access log files in `--logdir`. 35 | 36 | 37 | ## Contact 38 | 39 | Feel free to contact me for questions or feedbacks, my email can be found in the paper or my [personal page](https://alexander-h-liu.github.io). 40 | 41 | ## Citation 42 | 43 | If you find our work and/or this repository helpful, please do consider citing us 44 | 45 | ``` 46 | @article{liu2020nonautoregressive, 47 | title = {Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies}, 48 | author = {Liu, Alexander and Chung, Yu-An and Glass, James}, 49 | journal = {arXiv preprint arXiv:2011.00406}, 50 | year = {2020} 51 | } 52 | ``` 53 | -------------------------------------------------------------------------------- /bin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Alexander-H-Liu/NPC/0d7d189faa21176c5e2d0e2fe3ff986717b1e926/bin/__init__.py -------------------------------------------------------------------------------- /bin/downstream.py: -------------------------------------------------------------------------------- 1 | import os 2 | import yaml 3 | import copy 4 | import torch 5 | from src.optim import Optimizer 6 | from src.data import prepare_data 7 | from src.util import human_format, cal_per 8 | from src.solver import BaseSolver # Some basic functions 9 | from model.classifier import CLF 10 | 11 | class Solver(BaseSolver): 12 | ''' Solver for training''' 13 | def __init__(self, config, paras): 14 | super().__init__(config, paras) 15 | # Logger settings 16 | self.best_dev_er = 1.0 17 | self.cur_epoch = 0 18 | # Configs following self-supervised learning 19 | self.task = self.paras.task 20 | assert self.task in ['phn-clf','spk-clf'], 'unsupported task' 21 | self.ssl_config = yaml.load( 22 | open(self.config['model']['feat']['config'], 'r'), 23 | Loader=yaml.FullLoader) 24 | self.feature = self.ssl_config['model']['method'] 25 | if self.feature == 'npc' and 'spec' in self.config['model']['feat']: 26 | # NPC has additional option to use unmasked feature 27 | self.feat_spec = self.config['model']['feat']['spec'] 28 | else: 29 | self.feat_spec = None 30 | self.config['data']['audio'] = self.ssl_config['data']['audio'] 31 | 32 | def fetch_data(self, data, train=True): 33 | ''' Move data to device ''' 34 | file_id, audio_feat, audio_len, label = data 35 | if self.gpu: 36 | audio_feat = audio_feat.cuda() 37 | label = label.cuda() 38 | # Extract feature 39 | with torch.no_grad(): 40 | if self.feat_spec is not None: 41 | # Get unmasked feature from particular NPC layer 42 | n_layer_feat = int(self.feat_spec.split('-')[-1]) 43 | audio_feat = self.feat_extractor.get_unmasked_feat(audio_feat,n_layer_feat) 44 | elif self.feature == 'npc': 45 | # Get masked feature from NPC 46 | _, audio_feat = self.feat_extractor(audio_feat,testing=True) 47 | else: 48 | # Get feature from APC based model 49 | _, audio_feat = self.feat_extractor(audio_feat, audio_len, 50 | testing=True) 51 | # Mean pool feature for spkr classification 52 | if self.task == 'spk-clf': 53 | single_feat = [] 54 | for a_feat, a_len in zip(audio_feat,audio_len): 55 | single_feat.append(a_feat[:a_len].mean(dim=0)) 56 | audio_feat = torch.stack(single_feat, dim=0) 57 | return file_id, audio_feat, audio_len, label 58 | 59 | def load_data(self): 60 | ''' Load data for training/validation ''' 61 | self.tr_set, self.dv_set, self.tt_set, self.audio_dim, msg = \ 62 | prepare_data(self.paras.njobs,self.paras.dev_njobs,self.paras.gpu, 63 | self.paras.pin_memory, **self.config['data']) 64 | self.verbose(msg) 65 | 66 | def set_model(self): 67 | ''' Setup model and optimizer ''' 68 | # Load SSL models for feature extraction 69 | self.verbose([' Load feat. extractor ckpt from '\ 70 | +self.config['model']['feat']['ckpt']]) 71 | if self.feature in ['apc','vqapc']: 72 | from model.apc import APC as Net 73 | elif self.feature == 'npc': 74 | from model.npc import NPC as Net 75 | if self.feat_spec is not None: 76 | self.verbose([' Using specific feature: '+self.feat_spec]) 77 | else: 78 | raise NotImplementedError 79 | self.feat_extractor = Net(input_size=self.audio_dim, 80 | **self.ssl_config['model']['paras']) 81 | ckpt = torch.load( self.config['model']['feat']['ckpt'], 82 | map_location=self.device if self.mode == 'train' else 'cpu') 83 | ckpt['model'] = {k.replace('module.','',1):v \ 84 | for k,v in ckpt['model'].items()} 85 | self.feat_extractor.load_state_dict(ckpt['model']) 86 | 87 | # Classifier model 88 | self.model = CLF(feat_dim=self.feat_extractor.code_dim, 89 | **self.config['model']['clf']) 90 | if self.gpu: 91 | self.feat_extractor = self.feat_extractor.cuda() 92 | self.feat_extractor.eval() 93 | self.model = self.model.cuda() 94 | model_paras = [{'params': self.model.parameters()}] 95 | 96 | # Losses 97 | ignore_idx = 0 if self.task == 'phn-clf' else -1 98 | self.loss = torch.nn.CrossEntropyLoss(ignore_index=ignore_idx) 99 | if self.gpu: 100 | self.loss = self.loss.cuda() 101 | 102 | # Optimizer 103 | self.optimizer = Optimizer(model_paras, **self.config['hparas']) 104 | self.verbose(self.optimizer.create_msg()) 105 | 106 | self.load_ckpt() 107 | self.model.train() 108 | 109 | 110 | def exec(self): 111 | ''' Training End-to-end ASR system ''' 112 | if self.paras.mode =='train': 113 | self.verbose('Total training epoch {}.'.format( 114 | human_format(self.epoch))) 115 | self.timer.set() 116 | ep_len = len(self.tr_set) 117 | for ep in range(self.epoch): 118 | if ep>0: 119 | # Lr decay if needed 120 | self.optimizer.decay() 121 | for data in self.tr_set: 122 | # Pre-step : do zero_grad 123 | self.optimizer.pre_step(self.step) 124 | 125 | # Fetch data 126 | self.timer.cnt('rd') 127 | _, audio_feat, audio_len, label = self.fetch_data(data) 128 | 129 | # Forward 130 | pred = self.model(audio_feat) 131 | if self.task =='phn-clf': 132 | pred = pred.permute(0,2,1) # BxCxT for phn clf 133 | loss = self.loss(pred, label) 134 | self.timer.cnt('fw') 135 | 136 | # Backprop 137 | grad_norm = self.backward(loss) 138 | self.step += 1 139 | 140 | # Logger 141 | if (self.step == 1) or (self.step%self.PROGRESS_STEP == 0): 142 | self.progress(' {:2.1f} % | Loss - {:.2f} | Grad. Norm - {:.2f} | {}' 143 | .format(100*float(self.step%ep_len)/ep_len, 144 | loss.cpu().item(), 145 | grad_norm, 146 | self.timer.show())) 147 | self.write_log(self.task+'_loss', {'tr': loss}) 148 | if self.task == 'phn-clf': 149 | tr_er = cal_per(pred,label,audio_len)[0] 150 | else: 151 | tr_er = (pred.argmax(dim=-1)!=label) 152 | tr_er = tr_er.sum().detach().cpu().float()/len(label) 153 | self.write_log(self.task+'_er',{'tr':tr_er}) 154 | # End of step 155 | self.timer.set() 156 | # End of epoch 157 | self.cur_epoch += 1 158 | self.validate() 159 | 160 | # Test at the end 161 | self.validate(test=True) 162 | self.log.close() 163 | 164 | def validate(self, test=False): 165 | # Eval mode 166 | self.model.eval() 167 | val_loss = [] 168 | split = 'dev' 169 | val_hit,val_total = 0.0, 0.0 170 | ds = self.tt_set if test else self.dv_set 171 | 172 | # In training mode, best model is stored in RAM for test 173 | # ToDo: load ckpt 174 | if test: 175 | split = 'test' 176 | if self.paras.mode =='train': 177 | self.model = self.best_model 178 | if self.gpu: 179 | self.model = self.model.cuda() 180 | 181 | for i, data in enumerate(ds): 182 | self.progress('Valid step - {}/{}'.format(i+1, len(ds))) 183 | # Fetch data 184 | _, audio_feat, audio_len, label = self.fetch_data(data) 185 | 186 | # Forward model 187 | with torch.no_grad(): 188 | # Prediction 189 | pred = self.model(audio_feat) 190 | if self.task == 'phn-clf': 191 | pred = pred.permute(0,2,1) # BxCxT 192 | # Accumulate batch result 193 | val_loss.append(self.loss(pred, label)) 194 | if self.task == 'phn-clf': 195 | _, hit, total = cal_per(pred, label, audio_len) 196 | val_hit += hit 197 | val_total += total 198 | else: 199 | hit = (pred.argmax(dim=-1)==label).sum() 200 | val_hit += hit.detach().cpu().float() 201 | val_total += len(label) 202 | # Write testing prediction if needed 203 | if test and self.paras.write_test: 204 | if self.task == 'phn-clf': 205 | pred = pred.argmax(dim=1).detach().cpu() 206 | label = label.cpu() 207 | with open(os.path.join(self.ckpdir,self.task+'.csv'),'a') as f: 208 | for p,l,a_len in zip(pred,label,audio_len): 209 | for x, y in zip(p[:a_len].tolist(),l[:a_len].tolist()): 210 | f.write('{}\t{}\n'.format(x,y)) 211 | 212 | # Record metric, store ckpt by dev error rate 213 | val_loss = sum(val_loss)/len(val_loss) 214 | val_er = 1.0-val_hit/val_total 215 | self.write_log(self.task+'_loss', {split:val_loss}) 216 | self.write_log(self.task+'_er', {split:val_er}) 217 | if split=='dev' and self.best_dev_er > val_er: 218 | self.best_dev_er = val_er 219 | self.save_checkpoint('best.pth',self.task+'_er',val_er) 220 | self.best_model = copy.deepcopy(self.model.cpu()) # Clone for test 221 | 222 | # Resume training 223 | if self.gpu: 224 | self.model = self.model.cuda() 225 | self.model.train() -------------------------------------------------------------------------------- /bin/self_learning.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | from src.optim import Optimizer 4 | from src.data import prepare_data 5 | from src.util import human_format, draw 6 | from src.solver import BaseSolver # Some basic functions 7 | 8 | class Solver(BaseSolver): 9 | ''' Solver for training''' 10 | def __init__(self, config, paras): 11 | super().__init__(config, paras) 12 | # Logger settings 13 | self.val_loss = 1000 14 | self.cur_epoch = 0 15 | 16 | def fetch_data(self, data): 17 | ''' Move data to device ''' 18 | file_id, audio_feat, audio_len = data 19 | if self.gpu: 20 | audio_feat = audio_feat.cuda() 21 | return file_id, audio_feat, audio_len 22 | 23 | def load_data(self): 24 | ''' Load data for training/validation ''' 25 | self.tr_set, self.dv_set, _, self.audio_dim, msg = \ 26 | prepare_data(self.paras.njobs, self.paras.dev_njobs, self.paras.gpu, 27 | self.paras.pin_memory, **self.config['data']) 28 | self.verbose(msg) 29 | 30 | def set_model(self): 31 | ''' Setup model and optimizer ''' 32 | # Model 33 | self.method = self.config['model']['method'] 34 | if self.method in ['apc','vqapc']: 35 | self.n_future = self.config['model']['n_future'] 36 | from model.apc import APC as Net 37 | elif self.method == 'npc': 38 | from model.npc import NPC as Net 39 | else: 40 | raise NotImplementedError 41 | self.model = Net(input_size=self.audio_dim, **self.config['model']['paras']) 42 | if self.gpu: 43 | self.model = self.model.cuda() 44 | self.verbose(self.model.create_msg()) 45 | model_paras = [{'params': self.model.parameters()}] 46 | 47 | # Loss 48 | if 'npc' in self.method: 49 | # Avoid reduction for NPC for zero-padding 50 | self.loss = torch.nn.L1Loss(reduction='none') 51 | else: 52 | # APC family have zero-padding with torch API 53 | self.loss = torch.nn.L1Loss() 54 | if self.gpu: 55 | self.loss = self.loss.cuda() 56 | 57 | # Optimizer 58 | self.optimizer = Optimizer(model_paras, **self.config['hparas']) 59 | self.verbose(self.optimizer.create_msg()) 60 | 61 | # Automatically load pre-trained model if self.paras.load is given 62 | self.load_ckpt() 63 | 64 | # ToDo: Data Parallel? 65 | # self.model = torch.nn.DataParallel(self.model) 66 | self.model.train() 67 | 68 | def exec(self): 69 | ''' Training End-to-end ASR system ''' 70 | self.verbose('Total training epoch {}.'.format( 71 | human_format(self.epoch))) 72 | self.timer.set() 73 | aug_loss = None 74 | ep_len = len(self.tr_set) 75 | 76 | for ep in range(self.epoch): 77 | # Pre-step, decay 78 | if ep>0: 79 | self.optimizer.decay() 80 | 81 | for data in self.tr_set: 82 | # Pre-step : update tf_rate/lr_rate and do zero_grad 83 | self.optimizer.pre_step(self.step) 84 | 85 | # Fetch data 86 | _, audio_feat, audio_len = self.fetch_data(data) 87 | self.timer.cnt('rd') 88 | 89 | # Forward real data 90 | if 'npc' in self.method: 91 | # NPC: input = target 92 | pred, _ = self.model(audio_feat) 93 | loss = self.loss(pred, audio_feat) 94 | # Compute loss on valid part only 95 | effective_loss = 0 96 | for i,a_len in enumerate(audio_len): 97 | effective_loss += loss[i,:a_len,:].mean(dim=-1).sum() 98 | loss = effective_loss/sum(audio_len) 99 | else: 100 | # APC: input = shifted target 101 | audio_len = [l-self.n_future for l in audio_len] 102 | pred, _ = self.model(audio_feat[:,:-self.n_future,:], audio_len, testing=False) 103 | loss = self.loss(pred, audio_feat[:,self.n_future:,:]) 104 | self.timer.cnt('fw') 105 | # Backprop 106 | grad_norm = self.backward(loss) 107 | self.step += 1 108 | 109 | # Logger 110 | if (self.step == 1) or (self.step % self.PROGRESS_STEP == 0): 111 | self.progress(' {:2.1f} % | Loss - {:.2f} | Grad. Norm - {:.2f} | {}' 112 | .format(100*float(self.step%ep_len)/ep_len, 113 | loss.cpu().item(), 114 | grad_norm, 115 | self.timer.show())) 116 | self.write_log('loss', {'tr': loss}) 117 | 118 | if (self.step == 1) or (self.step % self.PLOT_STEP == 0): 119 | # Perplexity of P(token) 120 | g1_ppx, g2_ppx = self.model.report_ppx() 121 | self.write_log('ppx', {'group 1':g1_ppx, 122 | 'group 2':g2_ppx}) 123 | g1_usg, g2_usg = self.model.report_usg() # Empty cache 124 | # Plots 125 | if self.paras.draw: 126 | g1_hist = draw(g1_usg, hist=True) 127 | g2_hist = draw(g2_usg, hist=True) 128 | self.write_log('VQ Group 1 Hist.',g1_hist) 129 | self.write_log('VQ Group 2 Hist.',g2_hist) 130 | # Some spectrograms 131 | plt_idx = 0 132 | self.write_log('Spectrogram (raw)', draw(audio_feat[plt_idx])) 133 | self.write_log('Spectrogram (pred)', draw(pred[plt_idx])) 134 | 135 | # End of step 136 | self.timer.set() 137 | # End of epoch 138 | self.cur_epoch += 1 139 | self.validate() 140 | self.log.close() 141 | 142 | def validate(self): 143 | # Eval mode 144 | self.model.eval() 145 | dev_loss = [] 146 | for i, data in enumerate(self.dv_set): 147 | self.progress('Valid step - {}/{}'.format(i+1, len(self.dv_set))) 148 | # Fetch data 149 | _, audio_feat, audio_len = self.fetch_data(data) 150 | 151 | # Forward model 152 | with torch.no_grad(): 153 | if 'npc' in self.method: 154 | pred, _ = self.model(audio_feat, testing=True) 155 | loss = self.loss(pred, audio_feat) 156 | # Compute loss on valid part only 157 | effective_loss = 0 158 | for i,a_len in enumerate(audio_len): 159 | effective_loss += loss[i,:a_len,:].mean(dim=-1).sum() 160 | loss = effective_loss/sum(audio_len) 161 | else: 162 | audio_len = [l-self.n_future for l in audio_len] 163 | pred, _ = self.model(audio_feat[:,:-self.n_future,:], audio_len, testing=True) 164 | loss = self.loss(pred, audio_feat[:,self.n_future:,:]) 165 | dev_loss.append(loss.cpu().item()) 166 | 167 | # Record metric 168 | dev_loss = sum(dev_loss)/len(dev_loss) 169 | self.write_log('loss', {'dev':dev_loss}) 170 | if dev_loss < self.val_loss: 171 | self.val_loss = dev_loss 172 | self.save_checkpoint('best_loss.pth', 'loss', dev_loss) 173 | # Resume training 174 | self.model.train() -------------------------------------------------------------------------------- /config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Alexander-H-Liu/NPC/0d7d189faa21176c5e2d0e2fe3ff986717b1e926/config/__init__.py -------------------------------------------------------------------------------- /config/default_hparas.py: -------------------------------------------------------------------------------- 1 | # Default parameters which will set as attr. of solver 2 | default_hparas = { 3 | 'GRAD_CLIP': 5.0, # Grad. clip threshold 4 | 'PROGRESS_STEP': 10, # Std. output refresh freq. 5 | 'PLOT_STEP': 500, 6 | 'TB_FLUSH_FREQ': 180 # Update frequency of tensorboard (secs) 7 | } 8 | 9 | WINDOW_TYPE = 'hamming' -------------------------------------------------------------------------------- /config/downstream/phn_clf_example.yml: -------------------------------------------------------------------------------- 1 | data: 2 | # Dataset-related setting 3 | dataset: 4 | name: 'wsj_phn' # Specify dataset, must match to dataset/.py 5 | path: '/path/to/preprocessed/wsj' # Path to preprocessed WSJ dataset 6 | train_split: ['si284-0.9'] # Splits to be used as training set 7 | dev_split: ['si284-0.1'] # Splits to be used as valid. set 8 | test_split: ['dev93'] # Splits to be used as testing set 9 | batch_size: 32 # Batch sizes 10 | audio_max_frames: 3000 # Max length of spectrogram to ensure batch size 11 | 12 | model: 13 | feat: 14 | config: '/path/to/config.yml' # Config file of representation model 15 | ckpt: '/path/to/ckpt.pth' # Path to stored ckpt of model 16 | spec: 'unmasked-3' # Improved result from unmasked feature of last layer, 17 | # this isn't updated to the preprint paper yet. Remove 18 | # this option to use masked feature as described in 19 | # preprint version of NPC paper. 20 | clf: 21 | num_layers: 0 # 0 layer indicated linear classifier 22 | hidden_size: 0 # No hidden size for linear classifier 23 | n_class: 43 # 43 phones (including padding) 24 | 25 | hparas: 26 | optimizer: 'Adam' 27 | lr: 0.001 28 | epoch: 100 29 | decay: 0.9 30 | -------------------------------------------------------------------------------- /config/downstream/spk_clf_example.yml: -------------------------------------------------------------------------------- 1 | # See phn_clf_example, the only differences are dataset name and splits used 2 | data: 3 | dataset: 4 | name: 'wsj_spk' 5 | path: '/path/to/preprocessed/wsj' 6 | train_split: ['train_spk'] 7 | dev_split: ['dev_spk'] 8 | test_split: ['test_spk'] 9 | batch_size: 32 10 | audio_max_frames: 3000 11 | 12 | model: 13 | feat: 14 | config: '/path/to/config.yml' 15 | ckpt: /path/to/ckpt.pth' 16 | spec: 'unmasked-3' 17 | clf: 18 | num_layers: 0 19 | hidden_size: 0 20 | n_class: 259 21 | 22 | hparas: 23 | optimizer: 'Adam' 24 | lr: 0.001 25 | epoch: 100 26 | -------------------------------------------------------------------------------- /config/self_supervised/apc_example.yml: -------------------------------------------------------------------------------- 1 | # See vqapc_example, the only difference is that vq is disabled 2 | data: 3 | dataset: 4 | name: 'LibriSpeech' 5 | path: '/path/to/LibriSpeech' 6 | train_split: ['train-clean-360'] 7 | dev_split: ['dev-clean'] 8 | batch_size: 32 9 | audio_max_frames: 1500 10 | audio: 11 | feat_type: 'fbank' 12 | feat_dim: 80 13 | frame_length: 25 14 | frame_shift: 10 15 | decode_wav: False 16 | 17 | model: 18 | method: 'apc' 19 | n_future: 5 20 | paras: 21 | num_layers: 3 22 | hidden_size: 512 23 | dropout: 0.1 24 | residual: True 25 | 26 | hparas: 27 | optimizer: 'Adam' 28 | lr: 0.0001 29 | epoch: 100 30 | -------------------------------------------------------------------------------- /config/self_supervised/npc_example.yml: -------------------------------------------------------------------------------- 1 | data: 2 | # Dataset-related setting 3 | dataset: 4 | name: 'LibriSpeech' # Specify dataset, must match to dataset/.py 5 | path: '/path/to/LibriSpeech' # Path to unzipped LibriSpeech dataset 6 | train_split: ['train-clean-360'] # Splits to be used as training set 7 | dev_split: ['dev-clean'] # Splits to be used as valid. set 8 | batch_size: 32 # Batch sizes 9 | audio_max_frames: 1500 # Max length of spectrogram to ensure batch size 10 | # Attributes of audio feature 11 | audio: 12 | feat_type: 'fbank' # Feature type 13 | feat_dim: 80 # Feature dimension 14 | frame_length: 25 # Window size in ms 15 | frame_shift: 10 # Hop size in ms 16 | cmvn: True # Apply uttr.-wised CMVN on Mel spectrogram 17 | 18 | model: 19 | method: 'npc' # Accepts npc/apc/vqapc 20 | paras: 21 | kernel_size: 15 # Receptive field size (R) = kernel_size + 2*(n_blocks) 22 | mask_size: 5 # Desired input mask size (M_in) as described in NPC paper 23 | n_blocks: 4 # Number of ConvBlocks stacked in NPC model 24 | hidden_size: 512 # Dimension of feature of all layers 25 | dropout: 0.1 # Dropout in ConvBlock 26 | residual: True # Residual connection in ConvBlock 27 | batch_norm: True # Apply BatchNorm in ConvBlock 28 | activate: 'relu' # Activation function of ConvBlock 29 | disable_cross_layer: False # Apply Masked ConvBlock at last layer only 30 | vq: 31 | codebook_size: [64,64,64,64] # Codebook size of each group in VQ-layer 32 | code_dim: [128,128,128,128] # Dim of each group summing up to hidden_size 33 | gumbel_temperature: 1.0 # Temperature of Gumbel Softmax in VQ-layer 34 | 35 | hparas: 36 | optimizer: 'Adam' 37 | lr: 0.001 38 | epoch: 100 39 | -------------------------------------------------------------------------------- /config/self_supervised/vqapc_example.yml: -------------------------------------------------------------------------------- 1 | # All hyper-parameters taken from the official implementation at 2 | # https://github.com/iamyuanchung/VQ-APC/tree/96230cc358b174b736b4c0e7664b3e72b304d9b0 3 | data: 4 | # Dataset-related setting 5 | dataset: 6 | name: 'LibriSpeech' # Specify dataset, must match to dataset/.py 7 | path: '/path/to/LibriSpeech' # Path to unzipped LibriSpeech dataset 8 | train_split: ['train-clean-360'] # Splits to be used as training set 9 | dev_split: ['dev-clean'] # Splits to be used as valid. set 10 | batch_size: 32 # Batch sizes 11 | audio_max_frames: 1500 # Max length of spectrogram to ensure batch size 12 | # Attributes of audio feature 13 | audio: 14 | feat_type: 'fbank' # Feature type 15 | feat_dim: 80 # Feature dimension 16 | frame_length: 25 # Window size in ms 17 | frame_shift: 10 # Hop size in ms 18 | cmvn: True # Apply uttr.-wised CMVN on Mel spectrogram 19 | 20 | model: 21 | method: 'vqapc' # Accepts npc/apc/vqapc 22 | n_future: 5 # Target the next n-th frame in future for learning 23 | paras: 24 | num_layers: 3 # Number of GRU layers to be stacked in APC model 25 | hidden_size: 512 # Dimension of feature of all layers 26 | dropout: 0.1 # Dropout between GRU layers 27 | residual: True # Residual Connection between GRU layers 28 | vq: # Remove this part to train APC 29 | codebook_size: [512] # Codebook size of each group in VQ-layer 30 | code_dim: [512] # Dim of each group summing up to hidden_size 31 | gumbel_temperature: 0.5 # Temperature of Gumbel Softmax in VQ-layer 32 | 33 | hparas: 34 | optimizer: 'Adam' 35 | lr: 0.0001 36 | epoch: 100 37 | -------------------------------------------------------------------------------- /dataset/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Alexander-H-Liu/NPC/0d7d189faa21176c5e2d0e2fe3ff986717b1e926/dataset/__init__.py -------------------------------------------------------------------------------- /dataset/librispeech.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | from os.path import join 4 | from pathlib import Path 5 | from torch.utils.data import Dataset 6 | from torch.nn.utils.rnn import pad_sequence 7 | 8 | class DSet(Dataset): 9 | ''' LibriSpeech parser which takes raw LibriSpeech structure''' 10 | def __init__(self, path, split): 11 | # Setup 12 | self.path = path 13 | # List all wave files 14 | self.file_list = [] 15 | for s in split: 16 | split_list = list(Path(join(path, s)).rglob("*.flac")) 17 | assert len(split_list) > 0, "No data found @ {}".format(join(path,s)) 18 | self.file_list += split_list 19 | 20 | def __getitem__(self, index): 21 | return self.file_list[index] 22 | 23 | def __len__(self): 24 | return len(self.file_list) 25 | 26 | def collect_batch(batch, audio_transform, audio_max_frames, mode): 27 | '''Collects a batch, should be list of file_path ''' 28 | # Load Batch 29 | file_id, audio_feat, audio_len = [], [], [] 30 | with torch.no_grad(): 31 | # Load each uttr. in batch 32 | for f_path in batch: 33 | file_id.append(f_path) 34 | # Audio feature (sequence) on-the-fly 35 | y = audio_transform(filepath=f_path) 36 | if mode=='train': 37 | # Crop to avoid OOM 38 | y = _crop(y,audio_max_frames) 39 | audio_feat.append(y) 40 | audio_len.append(len(y)) 41 | # Descending audio length within each batch 42 | audio_len, file_id, audio_feat = zip(*[(aud_l, f_id, feat) 43 | for aud_l, f_id, feat in sorted(zip(audio_len, file_id, audio_feat), 44 | reverse=True, key=lambda x:x[0])]) 45 | # Zero padding 46 | audio_feat = pad_sequence(audio_feat, batch_first=True) 47 | return file_id, audio_feat, audio_len 48 | 49 | def _crop(y,max_len): 50 | if len(y) > max_len: 51 | offset = np.random.randint(len(y)-max_len) 52 | return y[offset:offset+max_len] 53 | else: 54 | return y -------------------------------------------------------------------------------- /dataset/wsj_phn.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import pandas as pd 4 | from os.path import join 5 | from pathlib import Path 6 | from torch.utils.data import Dataset 7 | from torch.nn.utils.rnn import pad_sequence 8 | 9 | class DSet(Dataset): 10 | ''' This is the WSJ parser ''' 11 | def __init__(self, path, split): 12 | # Setup 13 | self.path = path 14 | self.wav_form = join(path, 'wav', '{}.wav') 15 | self.phn_form = join(path, 'phn', '{}.pt') 16 | # List all wave files 17 | self.file_list = [] 18 | for s in split: 19 | s_list = pd.read_csv(join(path,'meta',s+'_phn.csv'),header=None)[0].tolist() 20 | assert len(s_list) > 0, "No data found @ {}".format(join(path,s)) 21 | self.file_list += s_list 22 | 23 | def __getitem__(self, index): 24 | fid = self.file_list[index] 25 | return self.wav_form.format(fid), self.phn_form.format(fid) 26 | 27 | def __len__(self): 28 | return len(self.file_list) 29 | 30 | def collect_batch(batch, audio_transform, audio_max_frames, mode): 31 | '''Collects a batch, should be list of file_path ''' 32 | # Load Batch 33 | file_id, audio_feat, phn_seq, audio_len = [], [], [], [] 34 | with torch.no_grad(): 35 | for wav,phn in batch: 36 | file_id.append(wav.rsplit('/',1)[-1].replace('.wav','')) 37 | # Audio feature (sequence) on-the-fly 38 | x = audio_transform(filepath=wav) 39 | # Phn label sequence (test set shouldn't be cropped) 40 | if mode =='test': 41 | phn = phn.replace('.pt','_nocrop.pt') 42 | y = torch.load(phn)+1 # 0 = pad 43 | # Crop to avoid batch too large 44 | x,y = _crop(x,y,audio_max_frames, mode) 45 | audio_feat.append(x) 46 | audio_len.append(len(x)) 47 | phn_seq.append(y[:len(x)]) 48 | # Descending audio length within each batch 49 | audio_len, audio_feat, phn_seq, file_id = zip(*[(fl, f, phn, fid) 50 | for fl, f, phn, fid in sorted(zip(audio_len, audio_feat, phn_seq, file_id), 51 | reverse=True, key=lambda x:x[0])]) 52 | # Zero padding 53 | audio_feat = pad_sequence(audio_feat, batch_first=True) 54 | phn_seq = pad_sequence(phn_seq, batch_first=True) 55 | return file_id, audio_feat, audio_len, phn_seq 56 | 57 | def _crop(x, y, max_len, mode): 58 | if len(x)>len(y): 59 | if mode == 'test': 60 | raise NotImplementedError('Test set are not supposed to be cropped') 61 | else: 62 | # Crop files that are too long 63 | x = x[:len(y)] 64 | if len(x) > max_len: 65 | return x[:max_len],y[:max_len] 66 | else: 67 | return x,y -------------------------------------------------------------------------------- /dataset/wsj_spk.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import pandas as pd 4 | from os.path import join 5 | from pathlib import Path 6 | from torch.utils.data import Dataset 7 | from torch.nn.utils.rnn import pad_sequence 8 | 9 | MAX_SPKR_CNT = 259 10 | 11 | 12 | class DSet(Dataset): 13 | ''' This is the raw WSJ parser ''' 14 | def __init__(self, path, split): 15 | # Setup 16 | self.path = path 17 | self.wav_form = join(path, 'wav', '{}.wav') 18 | # List all wave files 19 | self.file_list = [] 20 | spk2id = {} 21 | self.utt2id = {} 22 | spk_cnt = 0 23 | for s in split: 24 | with open(join(path,'spk',s), 'r') as f: 25 | # Notes on using setting from Yu-An 26 | # All speakers 27 | # - Must be sorted in same order in different split 28 | # - Must appear at least once in all split 29 | # Using only up to 259 spkrs 30 | for line in f: 31 | uttr,spk = line.strip().split() 32 | if spk not in spk2id: 33 | spk2id[spk] = spk_cnt 34 | spk_cnt +=1 35 | if spk_cnt >= MAX_SPKR_CNT: 36 | break 37 | self.utt2id[uttr] = spk2id[spk] 38 | self.file_list = list(self.utt2id.keys()) 39 | 40 | def __getitem__(self, index): 41 | fid = self.file_list[index] 42 | return self.wav_form.format(fid), self.utt2id[fid] 43 | 44 | def __len__(self): 45 | return len(self.file_list) 46 | 47 | def collect_batch(batch, audio_transform, audio_max_frames, mode): 48 | '''Collects a batch, should be list of file_path ''' 49 | # Load Batch 50 | file_id, audio_feat, spkr_label, audio_len = [], [], [], [] 51 | with torch.no_grad(): 52 | for wav,spkr in batch: 53 | file_id.append(wav.rsplit('/',1)[-1].replace('.wav','')) 54 | # Audio feature (sequence) on-the-fly 55 | x = audio_transform(filepath=wav) 56 | # Crop to avoid batch too large 57 | if len(x)>audio_max_frames: 58 | x = x[:audio_max_frames] 59 | audio_feat.append(x) 60 | audio_len.append(len(x)) 61 | spkr_label.append(spkr) 62 | # Descending audio length within each batch 63 | audio_len, audio_feat, spkr_label, file_id = zip(*[(fl, f, spk, fid) 64 | for fl, f, spk, fid in sorted(zip(audio_len, audio_feat, spkr_label, file_id), 65 | reverse=True, key=lambda x:x[0])]) 66 | # Zero padding 67 | audio_feat = pad_sequence(audio_feat, batch_first=True) 68 | spkr_label = torch.LongTensor(spkr_label) 69 | return file_id, audio_feat, audio_len, spkr_label 70 | 71 | -------------------------------------------------------------------------------- /eg.sh: -------------------------------------------------------------------------------- 1 | # NPC self-supervised learning example 2 | python main.py --config config/self_supervised/npc_example.yml \ 3 | --njobs 32 \ 4 | --dev_njobs 4 \ 5 | --task self-learning\ 6 | --ckpdir ckpt/ \ 7 | --logdir log/ \ 8 | --seed 0 9 | 10 | # Using NPC representation in phone classification 11 | # (change all phn to spk for speaker classification) 12 | python main.py --config config/downstream/phn_clf_example.yml \ 13 | --njobs 24 \ 14 | --dev_njobs 8 \ 15 | --task phn-clf\ 16 | --ckpdir ckpt/ \ 17 | --logdir log/ \ 18 | --seed 0 -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | from __future__ import absolute_import 4 | from __future__ import division 5 | from __future__ import unicode_literals 6 | from __future__ import print_function 7 | import yaml 8 | import torch 9 | import random 10 | import argparse 11 | import numpy as np 12 | 13 | 14 | 15 | # Experiment arguments 16 | parser = argparse.ArgumentParser(description='VQ-APC learning framework') 17 | parser.add_argument('--config', type=str, help='Path to experiment config.') 18 | parser.add_argument('--name', default=None, type=str, help='Name for logging.') 19 | parser.add_argument('--load', default=None, type=str, 20 | help='Load pre-trained model (for training only)', required=False) 21 | parser.add_argument('--seed', default=0, type=int, 22 | help='Random seed for reproducable results.', required=False) 23 | parser.add_argument('--task', choices=['self-learning', 'phn-clf', 'spk-clf'], 24 | help='Choice of task to be performed', required=True) 25 | parser.add_argument('--mode', choices=['train', 'test'], default='train', 26 | help='Test mode will load model and test only', required=False) 27 | 28 | # Hardware related 29 | parser.add_argument('--njobs', default=6, type=int, 30 | help='Number of threads for dataloader/decoding.', required=False) 31 | parser.add_argument('--dev_njobs', default=1, type=int, 32 | help='Number of threads for dev set dataloader (used in training mode only)', 33 | required=False) 34 | parser.add_argument('--cpu', action='store_true', help='Disable GPU training.') 35 | parser.add_argument('--no-pin', action='store_true', 36 | help='Disable pin-memory for dataloader') 37 | 38 | # Misc. 39 | parser.add_argument('--logdir', default='log/', type=str, 40 | help='Logging path.', required=False) 41 | parser.add_argument('--ckpdir', default='ckpt/', type=str, 42 | help='Checkpoint path.', required=False) 43 | parser.add_argument('--draw', action='store_true', 44 | help='Plot spectrogram / histogram to tensorboard', required=False) 45 | parser.add_argument('--write-test', action='store_true', 46 | help='Store phn classification result.', required=False) 47 | parser.add_argument('--no-msg', action='store_true', help='Hide all messages.') 48 | 49 | paras = parser.parse_args() 50 | setattr(paras, 'gpu', not paras.cpu) 51 | setattr(paras, 'pin_memory', not paras.no_pin) 52 | setattr(paras, 'verbose', not paras.no_msg) 53 | config = yaml.load(open(paras.config, 'r'), Loader=yaml.FullLoader) 54 | 55 | 56 | # For reproducibility, comment these to speed up training 57 | torch.backends.cudnn.deterministic = True 58 | torch.backends.cudnn.benchmark = False 59 | random.seed(paras.seed) 60 | np.random.seed(paras.seed) 61 | torch.manual_seed(paras.seed) 62 | if torch.cuda.is_available(): 63 | torch.cuda.manual_seed_all(paras.seed) 64 | 65 | # Create Solver to deploy task 66 | if paras.task == 'self-learning': 67 | # Train speech representation models 68 | from bin.self_learning import Solver 69 | assert paras.mode == 'train', 'self-learning does not have testing mode' 70 | elif paras.task in ['phn-clf','spk-clf']: 71 | from bin.downstream import Solver 72 | else: 73 | raise NotImplementedError 74 | 75 | # Execution 76 | solver = Solver(config, paras) 77 | solver.load_data() 78 | solver.set_model() 79 | solver.exec() 80 | -------------------------------------------------------------------------------- /model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Alexander-H-Liu/NPC/0d7d189faa21176c5e2d0e2fe3ff986717b1e926/model/__init__.py -------------------------------------------------------------------------------- /model/apc.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import torch 3 | import torch.nn as nn 4 | from model.vq import VQLayer 5 | from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence 6 | 7 | 8 | class APC(nn.Module): 9 | ''' 10 | APC/VQAPC modified from 11 | https://github.com/iamyuanchung/VQ-APC/blob/283d338/vqapc_model.py 12 | ''' 13 | def __init__(self, input_size, hidden_size, num_layers, dropout, residual, vq=None): 14 | """ 15 | input_size: an int indicating the input feature size, e.g., 80 for Mel. 16 | hidden_size: an int indicating the RNN hidden size. 17 | num_layers: an int indicating the number of RNN layers. 18 | dropout: a float indicating the RNN dropout rate. 19 | residual: a bool indicating whether to apply residual connections. 20 | """ 21 | super(APC, self).__init__() 22 | 23 | assert num_layers > 0 24 | self.hidden_size = hidden_size 25 | self.code_dim = hidden_size # ToDo: different size? 26 | self.num_layers = num_layers 27 | in_sizes = [input_size] + [hidden_size] * (num_layers - 1) 28 | out_sizes = [hidden_size] * num_layers 29 | self.rnn_layers = nn.ModuleList( 30 | [nn.GRU(input_size=in_size, hidden_size=out_size, batch_first=True) 31 | for (in_size, out_size) in zip(in_sizes, out_sizes)]) 32 | 33 | self.rnn_dropout = nn.Dropout(dropout) 34 | 35 | self.rnn_residual = residual 36 | 37 | # Create N-group VQ layers (Last layer only) 38 | self.apply_vq = vq is not None 39 | if self.apply_vq: 40 | self.vq_layers = [] 41 | vq_config = copy.deepcopy(vq) 42 | codebook_size = vq_config.pop('codebook_size') 43 | self.vq_code_dims = vq_config.pop('code_dim') 44 | assert len(self.vq_code_dims)==len(codebook_size) 45 | assert sum(self.vq_code_dims)==hidden_size 46 | for cs,cd in zip(codebook_size,self.vq_code_dims): 47 | self.vq_layers.append(VQLayer(input_size=cd, 48 | code_dim=cd, 49 | codebook_size=cs, 50 | **vq_config)) 51 | self.vq_layers = nn.ModuleList(self.vq_layers) 52 | 53 | # TODO: Start with a high temperature and anneal to a small one. 54 | # Final regression layer 55 | self.postnet = nn.Linear(hidden_size, input_size) 56 | 57 | def create_msg(self): 58 | msg_list = [] 59 | msg_list.append('Model spec.| Method = APC\t| Apply VQ = {}\t'\ 60 | .format(self.apply_vq)) 61 | msg_list.append(' | n layers = {}\t| Hidden dim = {}'\ 62 | .format(self.num_layers, self.hidden_size)) 63 | return msg_list 64 | 65 | def report_ppx(self): 66 | if self.apply_vq: 67 | # ToDo: support more than 2 groups 68 | ppx = [m.report_ppx() for m in self.vq_layers] + [None] 69 | return ppx[0], ppx[1] 70 | else: 71 | return None, None 72 | 73 | def report_usg(self): 74 | if self.apply_vq: 75 | # ToDo: support more than 2 groups 76 | usg = [m.report_usg() for m in self.vq_layers] + [None] 77 | return usg[0], usg[1] 78 | else: 79 | return None, None 80 | 81 | def forward(self, frames_BxLxM, seq_lengths_B, testing): 82 | """ 83 | Input: 84 | frames_BxLxM: a 3d-tensor representing the input features. 85 | seq_lengths_B: sequence length of frames_BxLxM. 86 | testing: a bool indicating training or testing phase. 87 | Return: 88 | predicted_BxLxM: the predicted output; used for training. 89 | hiddens_NxBxLxH: the RNN hidden representations across all layers. 90 | """ 91 | max_seq_len = frames_BxLxM.size(1) 92 | 93 | # N is the number of RNN layers. 94 | hiddens_NxBxLxH = [] 95 | 96 | # RNN 97 | # Prepare initial packed RNN input. 98 | packed_rnn_inputs = pack_padded_sequence(frames_BxLxM, seq_lengths_B, 99 | batch_first=True, 100 | enforce_sorted=True) 101 | for i, rnn_layer in enumerate(self.rnn_layers): 102 | # https://discuss.pytorch.org/t/rnn-module-weights-are-not-part-of-single-contiguous-chunk-of-memory/6011/14 103 | rnn_layer.flatten_parameters() 104 | packed_rnn_outputs, _ = rnn_layer(packed_rnn_inputs) 105 | 106 | # Unpack RNN output of current layer. 107 | rnn_outputs_BxLxH, _ = pad_packed_sequence(packed_rnn_outputs, 108 | batch_first=True, 109 | total_length=max_seq_len) 110 | # Apply dropout to output. 111 | rnn_outputs_BxLxH = self.rnn_dropout(rnn_outputs_BxLxH) 112 | 113 | # Apply residual connections. 114 | if self.rnn_residual and i > 0: 115 | # Unpack the original input. 116 | rnn_inputs_BxLxH, _ = pad_packed_sequence(packed_rnn_inputs, 117 | batch_first=True, 118 | total_length=max_seq_len) 119 | rnn_outputs_BxLxH += rnn_inputs_BxLxH 120 | 121 | hiddens_NxBxLxH.append(rnn_outputs_BxLxH) 122 | 123 | # VQ at last layer only 124 | if self.apply_vq and (i == len(self.rnn_layers)-1): 125 | q_feat = [] 126 | offet = 0 127 | for vq_layer,cd in zip(self.vq_layers,self.vq_code_dims): 128 | _, q_f = vq_layer(rnn_outputs_BxLxH[:,:,offet:offet+cd], testing) 129 | q_feat.append(q_f) 130 | offet += cd 131 | rnn_outputs_BxLxH = torch.cat(q_feat,dim=-1) 132 | 133 | # Prepare packed input for the next layer. 134 | # Note : enforce sorted = False might lead to CUDNN_STATUS_EXECUTION_FAILED 135 | if i < len(self.rnn_layers)-1: 136 | packed_rnn_inputs = pack_padded_sequence(rnn_outputs_BxLxH, 137 | seq_lengths_B, batch_first=True, 138 | enforce_sorted=True) 139 | # Only return last layer feature 140 | feature = hiddens_NxBxLxH[-1] 141 | 142 | # Generate final output from codes. 143 | predicted_BxLxM = self.postnet(rnn_outputs_BxLxH) 144 | return predicted_BxLxM, feature -------------------------------------------------------------------------------- /model/classifier.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | class CLF(nn.Module): 5 | def __init__(self, feat_dim, num_layers, hidden_size, n_class): 6 | """ Simple MLP Classifier (num_layers=0 for linear classifier) """ 7 | super(CLF, self).__init__() 8 | input_size = feat_dim 9 | self.num_layers = num_layers 10 | if num_layers>0: 11 | input_sizes = [input_size] + [hidden_size] * (num_layers - 1) 12 | output_sizes = [hidden_size] * num_layers 13 | self.layers = nn.ModuleList( 14 | [nn.Linear(in_features=in_size, out_features=out_size) 15 | for (in_size, out_size) in zip(input_sizes, output_sizes)]) 16 | self.output = nn.Linear(hidden_size, num_classes) 17 | self.relu = nn.ReLU() 18 | else: 19 | self.output = nn.Linear(input_size, n_class) 20 | 21 | def forward(self, input_feature): 22 | if self.num_layers>0: 23 | for layer in self.layers: 24 | input_feature = self.relu(layer(input_feature)) 25 | pred = self.output(input_feature) 26 | return pred 27 | -------------------------------------------------------------------------------- /model/npc.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import torch 3 | import torch.nn as nn 4 | from model.vq import VQLayer 5 | 6 | class MaskConvBlock(nn.Module): 7 | """ Masked Convolution Blocks as described in NPC paper """ 8 | def __init__(self, input_size, hidden_size, kernel_size, mask_size): 9 | super(MaskConvBlock, self).__init__() 10 | assert kernel_size-mask_size>0,"Mask > kernel somewhere in the model" 11 | # CNN for computing feature (ToDo: other activation?) 12 | self.act = nn.Tanh() 13 | self.pad_size = (kernel_size-1)//2 14 | self.conv = nn.Conv1d(in_channels=input_size, 15 | out_channels=hidden_size, 16 | kernel_size=kernel_size, 17 | padding=self.pad_size 18 | ) 19 | # Fixed mask for NPC 20 | mask_head = (kernel_size-mask_size)//2 21 | mask_tail = mask_head + mask_size 22 | conv_mask = torch.ones_like(self.conv.weight) 23 | conv_mask[:,:,mask_head:mask_tail] = 0 24 | self.register_buffer('conv_mask', conv_mask) 25 | 26 | def forward(self, feat): 27 | feat = nn.functional.conv1d(feat, 28 | self.conv_mask*self.conv.weight, 29 | bias=self.conv.bias, 30 | padding=self.pad_size 31 | ) 32 | feat = feat.permute(0,2,1) # BxCxT -> BxTxC 33 | feat = self.act(feat) 34 | return feat 35 | 36 | class ConvBlock(nn.Module): 37 | """ Convolution Blocks as described in NPC paper """ 38 | def __init__(self, input_size, hidden_size, residual, dropout, 39 | batch_norm, activate): 40 | super(ConvBlock, self).__init__() 41 | self.residual = residual 42 | if activate == 'relu': 43 | self.act = nn.ReLU() 44 | elif activate == 'tanh': 45 | self.act = nn.Tanh() 46 | else: 47 | raise NotImplementedError 48 | self.conv = nn.Conv1d(input_size, 49 | hidden_size, 50 | kernel_size=3, 51 | stride=1, 52 | padding=1 53 | ) 54 | self.linear = nn.Conv1d(hidden_size, 55 | hidden_size, 56 | kernel_size=1, 57 | stride=1, 58 | padding=0 59 | ) 60 | self.batch_norm = batch_norm 61 | if batch_norm: 62 | self.bn1 = nn.BatchNorm1d(hidden_size) 63 | self.bn2 = nn.BatchNorm1d(hidden_size) 64 | self.dropout = nn.Dropout(dropout) 65 | 66 | def forward(self, feat): 67 | res = feat 68 | out = self.conv(feat) 69 | if self.batch_norm: 70 | out = self.bn1(out) 71 | out = self.act(out) 72 | out = self.linear(out) 73 | if self.batch_norm: 74 | out = self.bn2(out) 75 | out = self.dropout(out) 76 | if self.residual: 77 | out = out + res 78 | return self.act(out) 79 | 80 | 81 | class NPC(nn.Module): 82 | """ NPC model with stacked ConvBlocks & Masked ConvBlocks """ 83 | def __init__(self, input_size, hidden_size, n_blocks, dropout, residual, 84 | kernel_size, mask_size, vq=None, batch_norm=True, 85 | activate='relu', disable_cross_layer=False, 86 | dim_bottleneck=None): 87 | super(NPC, self).__init__() 88 | 89 | # Setup 90 | assert kernel_size%2==1,'Kernel size can only be odd numbers' 91 | assert mask_size%2==1,'Mask size can only be odd numbers' 92 | assert n_blocks>=1,'At least 1 block needed' 93 | self.code_dim = hidden_size 94 | self.n_blocks = n_blocks 95 | self.input_mask_size = mask_size 96 | self.kernel_size = kernel_size 97 | self.disable_cross_layer = disable_cross_layer 98 | self.apply_vq = vq is not None 99 | self.apply_ae = dim_bottleneck is not None 100 | if self.apply_ae: 101 | assert not self.apply_vq 102 | self.dim_bottleneck = dim_bottleneck 103 | 104 | # Build blocks 105 | self.blocks, self.masked_convs = [], [] 106 | cur_mask_size = mask_size 107 | for i in range(n_blocks): 108 | h_dim = input_size if i==0 else hidden_size 109 | res = False if i==0 else residual 110 | # ConvBlock 111 | self.blocks.append(ConvBlock(h_dim, hidden_size, res, 112 | dropout, batch_norm, activate)) 113 | # Masked ConvBlock on each or last layer 114 | cur_mask_size = cur_mask_size + 2 115 | if self.disable_cross_layer and (i!=(n_blocks-1)): 116 | self.masked_convs.append(None) 117 | else: 118 | self.masked_convs.append(MaskConvBlock(hidden_size, 119 | hidden_size, 120 | kernel_size, 121 | cur_mask_size)) 122 | self.blocks = nn.ModuleList(self.blocks) 123 | self.masked_convs = nn.ModuleList(self.masked_convs) 124 | 125 | # Creates N-group VQ 126 | if self.apply_vq: 127 | self.vq_layers = [] 128 | vq_config = copy.deepcopy(vq) 129 | codebook_size = vq_config.pop('codebook_size') 130 | self.vq_code_dims = vq_config.pop('code_dim') 131 | assert len(self.vq_code_dims)==len(codebook_size) 132 | assert sum(self.vq_code_dims)==hidden_size 133 | for cs,cd in zip(codebook_size,self.vq_code_dims): 134 | self.vq_layers.append(VQLayer(input_size=cd, 135 | code_dim=cd, 136 | codebook_size=cs, 137 | **vq_config)) 138 | self.vq_layers = nn.ModuleList(self.vq_layers) 139 | 140 | # Back to spectrogram 141 | if self.apply_ae: 142 | self.ae_bottleneck = nn.Linear(hidden_size, 143 | self.dim_bottleneck,bias=False) 144 | self.postnet = nn.Linear(self.dim_bottleneck, input_size) 145 | else: 146 | self.postnet = nn.Linear(hidden_size, input_size) 147 | 148 | def create_msg(self): 149 | msg_list = [] 150 | msg_list.append('Model spec.| Method = NPC\t| # of Blocks = {}\t'\ 151 | .format(self.n_blocks)) 152 | msg_list.append(' | Desired input mask size = {}'\ 153 | .format(self.input_mask_size)) 154 | msg_list.append(' | Receptive field size = {}'\ 155 | .format(self.kernel_size+2*self.n_blocks)) 156 | return msg_list 157 | 158 | 159 | def report_ppx(self): 160 | ''' Returns perplexity of VQ distribution ''' 161 | if self.apply_vq: 162 | # ToDo: support more than 2 groups 163 | rt = [vq_layer.report_ppx() for vq_layer in self.vq_layers]+[None] 164 | return rt[0],rt[1] 165 | else: 166 | return None, None 167 | 168 | def report_usg(self): 169 | ''' Returns usage of VQ codebook ''' 170 | if self.apply_vq: 171 | # ToDo: support more than 2 groups 172 | rt = [vq_layer.report_usg() for vq_layer in self.vq_layers]+[None] 173 | return rt[0],rt[1] 174 | else: 175 | return None, None 176 | 177 | def get_unmasked_feat(self, sp_seq, n_layer): 178 | ''' Returns unmasked features from n-th layer ConvBlock ''' 179 | unmasked_feat = sp_seq.permute(0,2,1) # BxTxC -> BxCxT 180 | for i in range(self.n_blocks): 181 | unmasked_feat = self.blocks[i](unmasked_feat) 182 | if i == n_layer: 183 | unmasked_feat = unmasked_feat.permute(0,2,1) 184 | break 185 | return unmasked_feat 186 | 187 | def forward(self, sp_seq, testing=False): 188 | # BxTxC -> BxCxT (reversed in Masked ConvBlock) 189 | unmasked_feat = sp_seq.permute(0,2,1) 190 | # Forward through each layer 191 | for i in range(self.n_blocks): 192 | unmasked_feat = self.blocks[i](unmasked_feat) 193 | if self.disable_cross_layer: 194 | # Last layer masked feature only 195 | if i==(self.n_blocks-1): 196 | feat = self.masked_convs[i](unmasked_feat) 197 | else: 198 | # Masked feature aggregation 199 | masked_feat = self.masked_convs[i](unmasked_feat) 200 | if i == 0: 201 | feat = masked_feat 202 | else: 203 | feat = feat + masked_feat 204 | # Apply bottleneck and predict spectrogram 205 | if self.apply_vq: 206 | q_feat = [] 207 | offet = 0 208 | for vq_layer,cd in zip(self.vq_layers,self.vq_code_dims): 209 | _, q_f = vq_layer(feat[:,:,offet:offet+cd], testing) 210 | q_feat.append(q_f) 211 | offet += cd 212 | q_feat = torch.cat(q_feat,dim=-1) 213 | pred = self.postnet(q_feat) 214 | elif self.apply_ae: 215 | feat = self.ae_bottleneck(feat) 216 | pred = self.postnet(feat) 217 | else: 218 | pred = self.postnet(feat) 219 | return pred, feat -------------------------------------------------------------------------------- /model/vq.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import torch.nn as nn 4 | from torch.nn.functional import gumbel_softmax 5 | 6 | EPS = 1e-10 7 | 8 | class VQLayer(nn.Module): 9 | ''' 10 | VQ-layer modified from 11 | https://github.com/iamyuanchung/VQ-APC/blob/283d338/vqapc_model.py 12 | ''' 13 | def __init__(self, input_size, codebook_size, code_dim, gumbel_temperature): 14 | ''' 15 | Defines a VQ layer that follows an RNN layer. 16 | input_size: an int indicating the pre-quantized input feature size, 17 | usually the hidden size of RNN. 18 | codebook_size: an int indicating the number of codes. 19 | code_dim: an int indicating the size of each code. If not the last layer, 20 | then must equal to the RNN hidden size. 21 | gumbel_temperature: a float indicating the temperature for gumbel-softmax. 22 | ''' 23 | super(VQLayer, self).__init__() 24 | # Directly map to logits without any transformation. 25 | self.codebook_size = codebook_size 26 | self.vq_logits = nn.Linear(input_size, codebook_size) 27 | self.gumbel_temperature = gumbel_temperature 28 | self.codebook_CxE = nn.Linear(codebook_size, code_dim, bias=False) 29 | self.token_usg = np.zeros(codebook_size) 30 | 31 | def forward(self, inputs_BxLxI, testing, lens=None): 32 | logits_BxLxC = self.vq_logits(inputs_BxLxI) 33 | if testing: 34 | # During inference, just take the max index. 35 | shape = logits_BxLxC.size() 36 | _, ind = logits_BxLxC.max(dim=-1) 37 | onehot_BxLxC = torch.zeros_like(logits_BxLxC).view(-1, shape[-1]) 38 | onehot_BxLxC.scatter_(1, ind.view(-1, 1), 1) 39 | onehot_BxLxC = onehot_BxLxC.view(*shape) 40 | else: 41 | onehot_BxLxC = gumbel_softmax(logits_BxLxC, tau=self.gumbel_temperature, 42 | hard=True, eps=EPS, dim=-1) 43 | self.token_usg += onehot_BxLxC.detach().cpu()\ 44 | .reshape(-1,self.codebook_size).sum(dim=0).numpy() 45 | codes_BxLxE = self.codebook_CxE(onehot_BxLxC) 46 | 47 | return logits_BxLxC, codes_BxLxE 48 | 49 | def report_ppx(self): 50 | ''' Computes perplexity of distribution over codebook ''' 51 | acc_usg = self.token_usg/sum(self.token_usg) 52 | return 2**sum(-acc_usg * np.log2(acc_usg+EPS)) 53 | 54 | def report_usg(self): 55 | ''' Computes usage each entry in codebook ''' 56 | acc_usg = self.token_usg/sum(self.token_usg) 57 | # Reset 58 | self.token_usg = np.zeros(self.codebook_size) 59 | return acc_usg -------------------------------------------------------------------------------- /preprocess/README.md: -------------------------------------------------------------------------------- 1 | ## Preparing LibriSpeech 2 | 3 | Simply download LibriSpeech from [OpenSLR](http://www.openslr.org/12/) and unzip it. Fill in the `path` in config file for self-supervised learning with the path to unzipped LibriSpeech. 4 | 5 | ## Preparing WSJ 6 | 7 | 0. Download WSJ dataset (requires [LDC](https://ldc.upenn.edu) license) 8 | 9 | 1. Download and compile [sph2pipe_v2.5](https://www.openslr.org/3/) to read WSJ dataset 10 | 11 | ``` 12 | wget http://www.openslr.org/resources/3/sph2pipe_v2.5.tar.gz 13 | tar xzf sph2pipe_v2.5.tar.gz 14 | cd sph2pipe_v2.5; gcc -o sph2pipe *.c -lm 15 | ``` 16 | 17 | 2. Refactor (generate wav files and place them all together) WSJ with 18 | 19 | ``` 20 | python refactor_wsj.py --wsj_root /path/to/downloaded/wsj/ \ 21 | --dest /path/to/store/new/wsj/ 22 | ``` 23 | 24 | 4. (For phone classification only.) For each utterance, please use Kaldi to obtain force aligment and store the corresponding phone [index](phone.txt) sequence with `torch.save` at `/path/to/store/new/wsj/phn/fileid.pt` (or `fileid_nocrop.pt` for `dev93` split) where `fileid.wav` can be found at `/path/to/store/new/wsj/wav/` after previous step. Last, copy the list of `fileid` of different splits to he refactored wsj dataset for use with 25 | 26 | ``` 27 | cp -r phn_split/ /path/to/store/new/wsj/wav/meta/ 28 | ``` 29 | 30 | 5. (For speaker classification only.) The list of `fileid` & `speaker` pairs used in different splits are stored at `spk/`. Copy them to the refactored wsj dataset for use with 31 | 32 | ``` 33 | cp -r spk_split/ /path/to/store/new/wsj/wav/spk/ 34 | ``` 35 | 36 | 6. Modify the `path` in config file for downstream tasks to `/path/to/store/new/wsj/` -------------------------------------------------------------------------------- /preprocess/phn_split/dev93_phn.csv: -------------------------------------------------------------------------------- 1 | 4k0c0301 2 | 4k0c0302 3 | 4k0c0303 4 | 4k0c0304 5 | 4k0c0305 6 | 4k0c0306 7 | 4k0c0307 8 | 4k0c0308 9 | 4k0c0309 10 | 4k0c030a 11 | 4k0c030b 12 | 4k0c030c 13 | 4k0c030d 14 | 4k0c030e 15 | 4k0c030f 16 | 4k0c030g 17 | 4k0c030h 18 | 4k0c030i 19 | 4k0c030j 20 | 4k0c030k 21 | 4k0c030l 22 | 4k0c030m 23 | 4k0c030n 24 | 4k0c030o 25 | 4k0c030p 26 | 4k0c030q 27 | 4k0c030r 28 | 4k0c030s 29 | 4k0c030t 30 | 4k0c030u 31 | 4k0c030v 32 | 4k0c030w 33 | 4k0c030x 34 | 4k0c030y 35 | 4k0c030z 36 | 4k0c0310 37 | 4k0c0311 38 | 4k0c0312 39 | 4k0c0313 40 | 4k0c0314 41 | 4k0c0315 42 | 4k0c0316 43 | 4k0c0317 44 | 4k0c0318 45 | 4k0c0319 46 | 4k0c031a 47 | 4k0c031b 48 | 4k0c031c 49 | 4k0c031d 50 | 4k0c031e 51 | 4k1c0301 52 | 4k1c0302 53 | 4k1c0303 54 | 4k1c0304 55 | 4k1c0305 56 | 4k1c0306 57 | 4k1c0307 58 | 4k1c0308 59 | 4k1c0309 60 | 4k1c030a 61 | 4k1c030b 62 | 4k1c030c 63 | 4k1c030d 64 | 4k1c030e 65 | 4k1c030f 66 | 4k1c030g 67 | 4k1c030h 68 | 4k1c030i 69 | 4k1c030j 70 | 4k1c030k 71 | 4k1c030l 72 | 4k1c030m 73 | 4k1c030n 74 | 4k1c030o 75 | 4k1c030p 76 | 4k1c030q 77 | 4k1c030r 78 | 4k1c030s 79 | 4k1c030t 80 | 4k1c030u 81 | 4k1c030v 82 | 4k1c030w 83 | 4k1c030x 84 | 4k1c030y 85 | 4k1c030z 86 | 4k1c0310 87 | 4k1c0311 88 | 4k1c0312 89 | 4k1c0313 90 | 4k1c0314 91 | 4k1c0315 92 | 4k1c0316 93 | 4k1c0317 94 | 4k1c0318 95 | 4k1c0319 96 | 4k1c031a 97 | 4k1c031b 98 | 4k1c031c 99 | 4k1c031d 100 | 4k1c031e 101 | 4k2c0301 102 | 4k2c0302 103 | 4k2c0303 104 | 4k2c0304 105 | 4k2c0305 106 | 4k2c0306 107 | 4k2c0307 108 | 4k2c0308 109 | 4k2c0309 110 | 4k2c030a 111 | 4k2c030b 112 | 4k2c030c 113 | 4k2c030d 114 | 4k2c030e 115 | 4k2c030f 116 | 4k2c030g 117 | 4k2c030h 118 | 4k2c030i 119 | 4k2c030j 120 | 4k2c030k 121 | 4k2c030l 122 | 4k2c030m 123 | 4k2c030n 124 | 4k2c030o 125 | 4k2c030p 126 | 4k2c030q 127 | 4k2c030r 128 | 4k2c030s 129 | 4k2c030t 130 | 4k2c030u 131 | 4k2c030v 132 | 4k2c030w 133 | 4k2c030x 134 | 4k2c030y 135 | 4k2c030z 136 | 4k2c0310 137 | 4k2c0311 138 | 4k2c0312 139 | 4k2c0313 140 | 4k2c0314 141 | 4k2c0315 142 | 4k2c0316 143 | 4k2c0317 144 | 4k2c0318 145 | 4k2c0319 146 | 4k2c031a 147 | 4k2c031b 148 | 4k2c031c 149 | 4k2c031d 150 | 4k2c031e 151 | 4k2c031f 152 | 4k2c031g 153 | 4k3c0301 154 | 4k3c0302 155 | 4k3c0303 156 | 4k3c0304 157 | 4k3c0305 158 | 4k3c0306 159 | 4k3c0307 160 | 4k3c0308 161 | 4k3c0309 162 | 4k3c030a 163 | 4k3c030b 164 | 4k3c030c 165 | 4k3c030d 166 | 4k3c030e 167 | 4k3c030f 168 | 4k3c030g 169 | 4k3c030h 170 | 4k3c030i 171 | 4k3c030j 172 | 4k3c030k 173 | 4k3c030l 174 | 4k3c030m 175 | 4k3c030n 176 | 4k3c030o 177 | 4k3c030p 178 | 4k3c030q 179 | 4k3c030r 180 | 4k3c030s 181 | 4k3c030t 182 | 4k3c030u 183 | 4k3c030v 184 | 4k3c030w 185 | 4k3c030x 186 | 4k3c030y 187 | 4k3c030z 188 | 4k3c0310 189 | 4k3c0311 190 | 4k3c0312 191 | 4k3c0313 192 | 4k3c0314 193 | 4k3c0315 194 | 4k3c0316 195 | 4k3c0317 196 | 4k3c0318 197 | 4k3c0319 198 | 4k3c031a 199 | 4k3c031b 200 | 4k3c031c 201 | 4k3c031d 202 | 4k3c031e 203 | 4k3c031f 204 | 4k4c0301 205 | 4k4c0302 206 | 4k4c0303 207 | 4k4c0304 208 | 4k4c0305 209 | 4k4c0306 210 | 4k4c0307 211 | 4k4c0308 212 | 4k4c0309 213 | 4k4c030a 214 | 4k4c030b 215 | 4k4c030c 216 | 4k4c030d 217 | 4k4c030e 218 | 4k4c030f 219 | 4k4c030g 220 | 4k4c030h 221 | 4k4c030i 222 | 4k4c030j 223 | 4k4c030k 224 | 4k4c030l 225 | 4k4c030m 226 | 4k4c030n 227 | 4k4c030o 228 | 4k4c030p 229 | 4k4c030q 230 | 4k4c030r 231 | 4k4c030s 232 | 4k4c030t 233 | 4k4c030u 234 | 4k4c030v 235 | 4k4c030w 236 | 4k4c030x 237 | 4k4c030y 238 | 4k4c030z 239 | 4k4c0310 240 | 4k4c0311 241 | 4k4c0312 242 | 4k4c0313 243 | 4k4c0314 244 | 4k4c0315 245 | 4k4c0316 246 | 4k4c0317 247 | 4k4c0318 248 | 4k4c0319 249 | 4k4c031a 250 | 4k4c031b 251 | 4k4c031c 252 | 4k4c031d 253 | 4k4c031e 254 | 4k4c031f 255 | 4k4c031g 256 | 4k4c031h 257 | 4k6c0301 258 | 4k6c0302 259 | 4k6c0303 260 | 4k6c0304 261 | 4k6c0305 262 | 4k6c0306 263 | 4k6c0307 264 | 4k6c0308 265 | 4k6c0309 266 | 4k6c030a 267 | 4k6c030b 268 | 4k6c030c 269 | 4k6c030d 270 | 4k6c030e 271 | 4k6c030f 272 | 4k6c030g 273 | 4k6c030h 274 | 4k6c030i 275 | 4k6c030j 276 | 4k6c030k 277 | 4k6c030l 278 | 4k6c030m 279 | 4k6c030n 280 | 4k6c030o 281 | 4k6c030p 282 | 4k6c030q 283 | 4k6c030r 284 | 4k6c030s 285 | 4k6c030t 286 | 4k6c030u 287 | 4k6c030v 288 | 4k6c030w 289 | 4k6c030x 290 | 4k6c030y 291 | 4k6c030z 292 | 4k6c0310 293 | 4k6c0311 294 | 4k6c0312 295 | 4k6c0313 296 | 4k6c0314 297 | 4k6c0315 298 | 4k6c0316 299 | 4k6c0317 300 | 4k6c0318 301 | 4k6c0319 302 | 4k6c031a 303 | 4k6c031b 304 | 4k6c031c 305 | 4k6c031d 306 | 4k6c031e 307 | 4k7c0301 308 | 4k7c0302 309 | 4k7c0303 310 | 4k7c0304 311 | 4k7c0305 312 | 4k7c0306 313 | 4k7c0307 314 | 4k7c0308 315 | 4k7c0309 316 | 4k7c030a 317 | 4k7c030b 318 | 4k7c030c 319 | 4k7c030d 320 | 4k7c030e 321 | 4k7c030f 322 | 4k7c030g 323 | 4k7c030h 324 | 4k7c030i 325 | 4k7c030j 326 | 4k7c030k 327 | 4k7c030l 328 | 4k7c030m 329 | 4k7c030n 330 | 4k7c030o 331 | 4k7c030p 332 | 4k7c030q 333 | 4k7c030r 334 | 4k7c030s 335 | 4k7c030t 336 | 4k7c030u 337 | 4k7c030v 338 | 4k7c030w 339 | 4k7c030x 340 | 4k7c030y 341 | 4k7c030z 342 | 4k7c0310 343 | 4k7c0311 344 | 4k7c0312 345 | 4k7c0313 346 | 4k7c0314 347 | 4k7c0315 348 | 4k7c0316 349 | 4k7c0317 350 | 4k7c0318 351 | 4k7c0319 352 | 4k7c031a 353 | 4k7c031b 354 | 4k7c031c 355 | 4k7c031d 356 | 4k7c031e 357 | 4k8c0301 358 | 4k8c0302 359 | 4k8c0303 360 | 4k8c0304 361 | 4k8c0305 362 | 4k8c0306 363 | 4k8c0307 364 | 4k8c0308 365 | 4k8c0309 366 | 4k8c030a 367 | 4k8c030b 368 | 4k8c030c 369 | 4k8c030d 370 | 4k8c030e 371 | 4k8c030f 372 | 4k8c030g 373 | 4k8c030h 374 | 4k8c030i 375 | 4k8c030j 376 | 4k8c030k 377 | 4k8c030l 378 | 4k8c030m 379 | 4k8c030n 380 | 4k8c030o 381 | 4k8c030p 382 | 4k8c030q 383 | 4k8c030r 384 | 4k8c030s 385 | 4k8c030t 386 | 4k8c030u 387 | 4k8c030v 388 | 4k8c030w 389 | 4k8c030x 390 | 4k8c030y 391 | 4k8c030z 392 | 4k8c0310 393 | 4k8c0311 394 | 4k8c0312 395 | 4k8c0313 396 | 4k8c0314 397 | 4k8c0315 398 | 4k8c0316 399 | 4k8c0317 400 | 4k8c0318 401 | 4k8c0319 402 | 4k8c031a 403 | 4k8c031b 404 | 4k9c0301 405 | 4k9c0302 406 | 4k9c0303 407 | 4k9c0304 408 | 4k9c0305 409 | 4k9c0306 410 | 4k9c0307 411 | 4k9c0308 412 | 4k9c0309 413 | 4k9c030a 414 | 4k9c030b 415 | 4k9c030c 416 | 4k9c030d 417 | 4k9c030e 418 | 4k9c030f 419 | 4k9c030g 420 | 4k9c030h 421 | 4k9c030i 422 | 4k9c030j 423 | 4k9c030k 424 | 4k9c030l 425 | 4k9c030m 426 | 4k9c030n 427 | 4k9c030o 428 | 4k9c030p 429 | 4k9c030q 430 | 4k9c030r 431 | 4k9c030s 432 | 4k9c030t 433 | 4k9c030u 434 | 4k9c030v 435 | 4k9c030w 436 | 4k9c030x 437 | 4k9c030y 438 | 4k9c030z 439 | 4k9c0310 440 | 4k9c0311 441 | 4k9c0312 442 | 4k9c0313 443 | 4k9c0314 444 | 4k9c0315 445 | 4k9c0316 446 | 4k9c0317 447 | 4k9c0318 448 | 4k9c0319 449 | 4k9c031a 450 | 4k9c031b 451 | 4k9c031c 452 | 4k9c031d 453 | 4kac0301 454 | 4kac0302 455 | 4kac0303 456 | 4kac0304 457 | 4kac0305 458 | 4kac0306 459 | 4kac0307 460 | 4kac0308 461 | 4kac0309 462 | 4kac030a 463 | 4kac030b 464 | 4kac030c 465 | 4kac030d 466 | 4kac030e 467 | 4kac030f 468 | 4kac030g 469 | 4kac030h 470 | 4kac030i 471 | 4kac030j 472 | 4kac030k 473 | 4kac030l 474 | 4kac030m 475 | 4kac030n 476 | 4kac030o 477 | 4kac030p 478 | 4kac030q 479 | 4kac030r 480 | 4kac030s 481 | 4kac030t 482 | 4kac030u 483 | 4kac030v 484 | 4kac030w 485 | 4kac030x 486 | 4kac030y 487 | 4kac030z 488 | 4kac0310 489 | 4kac0311 490 | 4kac0312 491 | 4kac0313 492 | 4kac0314 493 | 4kac0315 494 | 4kac0316 495 | 4kac0317 496 | 4kac0318 497 | 4kac0319 498 | 4kac031a 499 | 4kac031b 500 | 4kac031c 501 | 4kac031d 502 | 4kac031e 503 | 4kac031f 504 | -------------------------------------------------------------------------------- /preprocess/phn_split/phones.txt: -------------------------------------------------------------------------------- 1 | AA 2 | AE 3 | AH 4 | AO 5 | AW 6 | AY 7 | B 8 | CH 9 | D 10 | DH 11 | EH 12 | ER 13 | EY 14 | F 15 | G 16 | HH 17 | IH 18 | IY 19 | JH 20 | K 21 | L 22 | M 23 | N 24 | NG 25 | NSN 26 | OW 27 | OY 28 | P 29 | R 30 | S 31 | SH 32 | SIL 33 | SPN 34 | T 35 | TH 36 | UH 37 | UW 38 | V 39 | W 40 | Y 41 | Z 42 | ZH 43 | -------------------------------------------------------------------------------- /preprocess/phn_split/si284-0.1_phn.csv: -------------------------------------------------------------------------------- 1 | 4avc021c 2 | 4avc021d 3 | 4avc021e 4 | 4avc021f 5 | 4avc0301 6 | 4avc0302 7 | 4avc0303 8 | 4avc0304 9 | 4avc0305 10 | 4avc0306 11 | 4avc0307 12 | 4avc0308 13 | 4avc0309 14 | 4avc030a 15 | 4avc030b 16 | 4avc030c 17 | 4avc030d 18 | 4avc030e 19 | 4avc030f 20 | 4avc030g 21 | 4avc030h 22 | 4avc030i 23 | 4avc030j 24 | 4avc030k 25 | 4avc030l 26 | 4avc030m 27 | 4avc030n 28 | 4avc030o 29 | 4avc030p 30 | 4avc030q 31 | 4avc030r 32 | 4avc030s 33 | 4avc030t 34 | 4avc030u 35 | 4avc030v 36 | 4avc030w 37 | 4avc030x 38 | 4avc030y 39 | 4avc030z 40 | 4avc0310 41 | 4avc0311 42 | 4avc0312 43 | 4avc0313 44 | 4avc0314 45 | 4avc0315 46 | 4avc0316 47 | 4avc0317 48 | 4avc0318 49 | 4avc0319 50 | 4avc031a 51 | 4avc031b 52 | 4avc031c 53 | 4avc031d 54 | 4avc031e 55 | 4avc031f 56 | 4avc0401 57 | 4avc0402 58 | 4avc0403 59 | 4avc0404 60 | 4avc0405 61 | 4avc0406 62 | 4avc0407 63 | 4avc0408 64 | 4avc0409 65 | 4avc040a 66 | 4avc040b 67 | 4avc040c 68 | 4avc040d 69 | 4avc040e 70 | 4avc040f 71 | 4avc040g 72 | 4avc040h 73 | 4avc040i 74 | 4avc040j 75 | 4avc040k 76 | 4avc040l 77 | 4avc040m 78 | 4avc040n 79 | 4avc040o 80 | 4avc040p 81 | 4avc040q 82 | 4avc040r 83 | 4avc040s 84 | 4avc040t 85 | 4avc040u 86 | 4avc040v 87 | 4avc040w 88 | 4avc040x 89 | 4avc040y 90 | 4avc040z 91 | 4avc0410 92 | 4avc0411 93 | 4avc0412 94 | 4avc0413 95 | 4avc0414 96 | 4avc0415 97 | 4avc0416 98 | 4avc0417 99 | 4avc0418 100 | 4avc0419 101 | 4avc041a 102 | 4avc041b 103 | 4avc041c 104 | 4avc041d 105 | 4avc041e 106 | 4awc0201 107 | 4awc0202 108 | 4awc0203 109 | 4awc0204 110 | 4awc0205 111 | 4awc0206 112 | 4awc0207 113 | 4awc0208 114 | 4awc0209 115 | 4awc020a 116 | 4awc020b 117 | 4awc020c 118 | 4awc020d 119 | 4awc020e 120 | 4awc020f 121 | 4awc020g 122 | 4awc020h 123 | 4awc020i 124 | 4awc020j 125 | 4awc020k 126 | 4awc020l 127 | 4awc020m 128 | 4awc020n 129 | 4awc020o 130 | 4awc020p 131 | 4awc020q 132 | 4awc020r 133 | 4awc020s 134 | 4awc020t 135 | 4awc020u 136 | 4awc020v 137 | 4awc020w 138 | 4awc020x 139 | 4awc020y 140 | 4awc020z 141 | 4awc0210 142 | 4awc0211 143 | 4awc0212 144 | 4awc0213 145 | 4awc0214 146 | 4awc0215 147 | 4awc0216 148 | 4awc0217 149 | 4awc0218 150 | 4awc0219 151 | 4awc021a 152 | 4awc021b 153 | 4awc021c 154 | 4awc021d 155 | 4awc021e 156 | 4awc021f 157 | 4awc021g 158 | 4awc021h 159 | 4awc0301 160 | 4awc0302 161 | 4awc0303 162 | 4awc0304 163 | 4awc0305 164 | 4awc0306 165 | 4awc0307 166 | 4awc0308 167 | 4awc0309 168 | 4awc030a 169 | 4awc030b 170 | 4awc030c 171 | 4awc030d 172 | 4awc030e 173 | 4awc030f 174 | 4awc030g 175 | 4awc030h 176 | 4awc030i 177 | 4awc030j 178 | 4awc030k 179 | 4awc030l 180 | 4awc030m 181 | 4awc030n 182 | 4awc030o 183 | 4awc030p 184 | 4awc030q 185 | 4awc030r 186 | 4awc030s 187 | 4awc030t 188 | 4awc030u 189 | 4awc030v 190 | 4awc030w 191 | 4awc030x 192 | 4awc030y 193 | 4awc030z 194 | 4awc0310 195 | 4awc0311 196 | 4awc0312 197 | 4awc0313 198 | 4awc0314 199 | 4awc0315 200 | 4awc0316 201 | 4awc0317 202 | 4awc0318 203 | 4awc0319 204 | 4awc031a 205 | 4awc031b 206 | 4awc0401 207 | 4awc0402 208 | 4awc0403 209 | 4awc0404 210 | 4awc0405 211 | 4awc0406 212 | 4awc0407 213 | 4awc0408 214 | 4awc0409 215 | 4awc040a 216 | 4awc040b 217 | 4awc040c 218 | 4awc040d 219 | 4awc040e 220 | 4awc040f 221 | 4awc040g 222 | 4awc040h 223 | 4awc040i 224 | 4awc040j 225 | 4awc040k 226 | 4awc040l 227 | 4awc040m 228 | 4awc040n 229 | 4awc040o 230 | 4awc040p 231 | 4awc040q 232 | 4awc040r 233 | 4awc040s 234 | 4awc040t 235 | 4awc040u 236 | 4awc040v 237 | 4awc040w 238 | 4awc040x 239 | 4awc040y 240 | 4awc040z 241 | 4awc0410 242 | 4awc0411 243 | 4awc0412 244 | 4awc0413 245 | 4awc0414 246 | 4awc0415 247 | 4awc0416 248 | 4awc0417 249 | 4awc0418 250 | 4awc0419 251 | 4awc041a 252 | 4awc041b 253 | 4awc041c 254 | 4awc041d 255 | 4awc041e 256 | 4awc041f 257 | 4axc0201 258 | 4axc0202 259 | 4axc0203 260 | 4axc0204 261 | 4axc0205 262 | 4axc0206 263 | 4axc0207 264 | 4axc0208 265 | 4axc0209 266 | 4axc020a 267 | 4axc020b 268 | 4axc020c 269 | 4axc020d 270 | 4axc020e 271 | 4axc020f 272 | 4axc020g 273 | 4axc020h 274 | 4axc020i 275 | 4axc020j 276 | 4axc020k 277 | 4axc020l 278 | 4axc020m 279 | 4axc020n 280 | 4axc020o 281 | 4axc020p 282 | 4axc020q 283 | 4axc020r 284 | 4axc020s 285 | 4axc020t 286 | 4axc020u 287 | 4axc020v 288 | 4axc020w 289 | 4axc020x 290 | 4axc020y 291 | 4axc020z 292 | 4axc0210 293 | 4axc0211 294 | 4axc0212 295 | 4axc0213 296 | 4axc0214 297 | 4axc0215 298 | 4axc0216 299 | 4axc0217 300 | 4axc0218 301 | 4axc0219 302 | 4axc021a 303 | 4axc021b 304 | 4axc021c 305 | 4axc021d 306 | 4axc021e 307 | 4axc021f 308 | 4axc021g 309 | 4axc021h 310 | 4axc0301 311 | 4axc0302 312 | 4axc0303 313 | 4axc0304 314 | 4axc0305 315 | 4axc0306 316 | 4axc0307 317 | 4axc0308 318 | 4axc0309 319 | 4axc030a 320 | 4axc030b 321 | 4axc030c 322 | 4axc030d 323 | 4axc030e 324 | 4axc030f 325 | 4axc030g 326 | 4axc030h 327 | 4axc030i 328 | 4axc030j 329 | 4axc030k 330 | 4axc030l 331 | 4axc030m 332 | 4axc030n 333 | 4axc030o 334 | 4axc030p 335 | 4axc030q 336 | 4axc030r 337 | 4axc030s 338 | 4axc030t 339 | 4axc030u 340 | 4axc030v 341 | 4axc030w 342 | 4axc030x 343 | 4axc030y 344 | 4axc030z 345 | 4axc0310 346 | 4axc0311 347 | 4axc0312 348 | 4axc0313 349 | 4axc0314 350 | 4axc0315 351 | 4axc0316 352 | 4axc0317 353 | 4axc0318 354 | 4axc0319 355 | 4axc031a 356 | 4axc031b 357 | 4axc031c 358 | 4axc031d 359 | 4axc0401 360 | 4axc0402 361 | 4axc0403 362 | 4axc0404 363 | 4axc0405 364 | 4axc0406 365 | 4axc0407 366 | 4axc0408 367 | 4axc0409 368 | 4axc040a 369 | 4axc040b 370 | 4axc040c 371 | 4axc040d 372 | 4axc040e 373 | 4axc040f 374 | 4axc040g 375 | 4axc040h 376 | 4axc040i 377 | 4axc040j 378 | 4axc040k 379 | 4axc040l 380 | 4axc040m 381 | 4axc040n 382 | 4axc040o 383 | 4axc040p 384 | 4axc040q 385 | 4axc040r 386 | 4axc040s 387 | 4axc040t 388 | 4axc040u 389 | 4axc040v 390 | 4axc040w 391 | 4axc040x 392 | 4axc040y 393 | 4axc040z 394 | 4axc0410 395 | 4axc0411 396 | 4axc0412 397 | 4axc0413 398 | 4axc0414 399 | 4axc0415 400 | 4axc0416 401 | 4axc0417 402 | 4axc0418 403 | 4axc0419 404 | 4axc041a 405 | 4axc041b 406 | 4axc041c 407 | 4axc041d 408 | 4axc041e 409 | 4ayc0201 410 | 4ayc0202 411 | 4ayc0203 412 | 4ayc0204 413 | 4ayc0205 414 | 4ayc0206 415 | 4ayc0207 416 | 4ayc0208 417 | 4ayc0209 418 | 4ayc020a 419 | 4ayc020b 420 | 4ayc020c 421 | 4ayc020d 422 | 4ayc020e 423 | 4ayc020f 424 | 4ayc020g 425 | 4ayc020h 426 | 4ayc020i 427 | 4ayc020j 428 | 4ayc020k 429 | 4ayc020l 430 | 4ayc020m 431 | 4ayc020n 432 | 4ayc020o 433 | 4ayc020p 434 | 4ayc020q 435 | 4ayc020r 436 | 4ayc020s 437 | 4ayc020t 438 | 4ayc020u 439 | 4ayc020v 440 | 4ayc020w 441 | 4ayc020x 442 | 4ayc020y 443 | 4ayc020z 444 | 4ayc0210 445 | 4ayc0211 446 | 4ayc0212 447 | 4ayc0213 448 | 4ayc0214 449 | 4ayc0215 450 | 4ayc0216 451 | 4ayc0217 452 | 4ayc0218 453 | 4ayc0219 454 | 4ayc021a 455 | 4ayc021b 456 | 4ayc021c 457 | 4ayc021d 458 | 4ayc021e 459 | 4ayc021f 460 | 4ayc0301 461 | 4ayc0302 462 | 4ayc0303 463 | 4ayc0304 464 | 4ayc0305 465 | 4ayc0306 466 | 4ayc0307 467 | 4ayc0308 468 | 4ayc0309 469 | 4ayc030a 470 | 4ayc030b 471 | 4ayc030c 472 | 4ayc030d 473 | 4ayc030e 474 | 4ayc030f 475 | 4ayc030g 476 | 4ayc030h 477 | 4ayc030i 478 | 4ayc030j 479 | 4ayc030k 480 | 4ayc030l 481 | 4ayc030m 482 | 4ayc030n 483 | 4ayc030o 484 | 4ayc030p 485 | 4ayc030q 486 | 4ayc030r 487 | 4ayc030s 488 | 4ayc030t 489 | 4ayc030u 490 | 4ayc030v 491 | 4ayc030w 492 | 4ayc030x 493 | 4ayc030y 494 | 4ayc030z 495 | 4ayc0310 496 | 4ayc0311 497 | 4ayc0312 498 | 4ayc0313 499 | 4ayc0314 500 | 4ayc0315 501 | 4ayc0316 502 | 4ayc0317 503 | 4ayc0318 504 | 4ayc0319 505 | 4ayc031a 506 | 4ayc031b 507 | 4ayc031c 508 | 4ayc031d 509 | 4ayc0401 510 | 4ayc0402 511 | 4ayc0403 512 | 4ayc0404 513 | 4ayc0405 514 | 4ayc0406 515 | 4ayc0407 516 | 4ayc0408 517 | 4ayc0409 518 | 4ayc040a 519 | 4ayc040b 520 | 4ayc040c 521 | 4ayc040d 522 | 4ayc040e 523 | 4ayc040f 524 | 4ayc040g 525 | 4ayc040h 526 | 4ayc040i 527 | 4ayc040j 528 | 4ayc040k 529 | 4ayc040l 530 | 4ayc040m 531 | 4ayc040n 532 | 4ayc040o 533 | 4ayc040p 534 | 4ayc040q 535 | 4ayc040r 536 | 4ayc040s 537 | 4ayc040t 538 | 4ayc040u 539 | 4ayc040v 540 | 4ayc040w 541 | 4ayc040x 542 | 4ayc040y 543 | 4ayc040z 544 | 4ayc0410 545 | 4ayc0411 546 | 4ayc0412 547 | 4ayc0413 548 | 4ayc0414 549 | 4ayc0415 550 | 4ayc0416 551 | 4ayc0417 552 | 4ayc0418 553 | 4ayc0419 554 | 4ayc041a 555 | 4ayc041b 556 | 4ayc041c 557 | 4ayc041d 558 | 4ayc041e 559 | 4azc0201 560 | 4azc0202 561 | 4azc0203 562 | 4azc0204 563 | 4azc0205 564 | 4azc0206 565 | 4azc0207 566 | 4azc0208 567 | 4azc0209 568 | 4azc020a 569 | 4azc020b 570 | 4azc020c 571 | 4azc020d 572 | 4azc020e 573 | 4azc020f 574 | 4azc020g 575 | 4azc020h 576 | 4azc020i 577 | 4azc020j 578 | 4azc020k 579 | 4azc020l 580 | 4azc020m 581 | 4azc020n 582 | 4azc020o 583 | 4azc020p 584 | 4azc020q 585 | 4azc020r 586 | 4azc020s 587 | 4azc020t 588 | 4azc020u 589 | 4azc020v 590 | 4azc020w 591 | 4azc020x 592 | 4azc020y 593 | 4azc020z 594 | 4azc0210 595 | 4azc0211 596 | 4azc0212 597 | 4azc0213 598 | 4azc0214 599 | 4azc0215 600 | 4azc0216 601 | 4azc0217 602 | 4azc0218 603 | 4azc0219 604 | 4azc021a 605 | 4azc021b 606 | 4azc021c 607 | 4azc021d 608 | 4azc021e 609 | 4azc0301 610 | 4azc0302 611 | 4azc0303 612 | 4azc0304 613 | 4azc0305 614 | 4azc0306 615 | 4azc0307 616 | 4azc0308 617 | 4azc0309 618 | 4azc030a 619 | 4azc030b 620 | 4azc030c 621 | 4azc030d 622 | 4azc030e 623 | 4azc030f 624 | 4azc030g 625 | 4azc030h 626 | 4azc030i 627 | 4azc030j 628 | 4azc030k 629 | 4azc030l 630 | 4azc030m 631 | 4azc030n 632 | 4azc030o 633 | 4azc030p 634 | 4azc030q 635 | 4azc030r 636 | 4azc030s 637 | 4azc030t 638 | 4azc030u 639 | 4azc030v 640 | 4azc030w 641 | 4azc030x 642 | 4azc030y 643 | 4azc030z 644 | 4azc0310 645 | 4azc0311 646 | 4azc0312 647 | 4azc0313 648 | 4azc0314 649 | 4azc0315 650 | 4azc0316 651 | 4azc0317 652 | 4azc0318 653 | 4azc0319 654 | 4azc031a 655 | 4azc031b 656 | 4azc031c 657 | 4azc031d 658 | 4azc031e 659 | 4azc031f 660 | 4azc031g 661 | 4azc0401 662 | 4azc0402 663 | 4azc0403 664 | 4azc0404 665 | 4azc0405 666 | 4azc0406 667 | 4azc0407 668 | 4azc0408 669 | 4azc0409 670 | 4azc040a 671 | 4azc040b 672 | 4azc040c 673 | 4azc040d 674 | 4azc040e 675 | 4azc040f 676 | 4azc040g 677 | 4azc040h 678 | 4azc040i 679 | 4azc040j 680 | 4azc040k 681 | 4azc040l 682 | 4azc040m 683 | 4azc040n 684 | 4azc040o 685 | 4azc040p 686 | 4azc040q 687 | 4azc040r 688 | 4azc040s 689 | 4azc040t 690 | 4azc040u 691 | 4azc040v 692 | 4azc040w 693 | 4azc040x 694 | 4azc040y 695 | 4azc040z 696 | 4azc0410 697 | 4azc0411 698 | 4azc0412 699 | 4azc0413 700 | 4azc0414 701 | 4azc0415 702 | 4azc0416 703 | 4azc0417 704 | 4azc0418 705 | 4azc0419 706 | 4azc041a 707 | 4azc041b 708 | 4azc041c 709 | 4b0c0201 710 | 4b0c0202 711 | 4b0c0203 712 | 4b0c0204 713 | 4b0c0205 714 | 4b0c0206 715 | 4b0c0207 716 | 4b0c0208 717 | 4b0c0209 718 | 4b0c020a 719 | 4b0c020b 720 | 4b0c020c 721 | 4b0c020d 722 | 4b0c020e 723 | 4b0c020f 724 | 4b0c020g 725 | 4b0c020h 726 | 4b0c020i 727 | 4b0c020j 728 | 4b0c020k 729 | 4b0c020l 730 | 4b0c020m 731 | 4b0c020n 732 | 4b0c020o 733 | 4b0c020p 734 | 4b0c020q 735 | 4b0c020r 736 | 4b0c020s 737 | 4b0c020t 738 | 4b0c020u 739 | 4b0c020v 740 | 4b0c020w 741 | 4b0c020x 742 | 4b0c020y 743 | 4b0c020z 744 | 4b0c0210 745 | 4b0c0211 746 | 4b0c0212 747 | 4b0c0213 748 | 4b0c0214 749 | 4b0c0215 750 | 4b0c0216 751 | 4b0c0217 752 | 4b0c0218 753 | 4b0c0219 754 | 4b0c021a 755 | 4b0c021b 756 | 4b0c021c 757 | 4b0c021d 758 | 4b0c021e 759 | 4b0c0301 760 | 4b0c0302 761 | 4b0c0303 762 | 4b0c0304 763 | 4b0c0305 764 | 4b0c0306 765 | 4b0c0307 766 | 4b0c0308 767 | 4b0c0309 768 | 4b0c030a 769 | 4b0c030b 770 | 4b0c030c 771 | 4b0c030d 772 | 4b0c030e 773 | 4b0c030f 774 | 4b0c030g 775 | 4b0c030h 776 | 4b0c030i 777 | 4b0c030j 778 | 4b0c030k 779 | 4b0c030l 780 | 4b0c030m 781 | 4b0c030n 782 | 4b0c030o 783 | 4b0c030p 784 | 4b0c030q 785 | 4b0c030r 786 | 4b0c030s 787 | 4b0c030t 788 | 4b0c030u 789 | 4b0c030v 790 | 4b0c030w 791 | 4b0c030x 792 | 4b0c030y 793 | 4b0c030z 794 | 4b0c0310 795 | 4b0c0311 796 | 4b0c0312 797 | 4b0c0313 798 | 4b0c0314 799 | 4b0c0315 800 | 4b0c0316 801 | 4b0c0317 802 | 4b0c0318 803 | 4b0c0319 804 | 4b0c031a 805 | 4b0c031b 806 | 4b0c031c 807 | 4b0c031d 808 | 4b0c031e 809 | 4b0c031f 810 | 4b0c031g 811 | 4b0c0401 812 | 4b0c0402 813 | 4b0c0403 814 | 4b0c0404 815 | 4b0c0405 816 | 4b0c0406 817 | 4b0c0407 818 | 4b0c0408 819 | 4b0c0409 820 | 4b0c040a 821 | 4b0c040b 822 | 4b0c040c 823 | 4b0c040d 824 | 4b0c040e 825 | 4b0c040f 826 | 4b0c040g 827 | 4b0c040h 828 | 4b0c040i 829 | 4b0c040j 830 | 4b0c040k 831 | 4b0c040l 832 | 4b0c040m 833 | 4b0c040n 834 | 4b0c040o 835 | 4b0c040p 836 | 4b0c040q 837 | 4b0c040r 838 | 4b0c040s 839 | 4b0c040t 840 | 4b0c040u 841 | 4b0c040v 842 | 4b0c040w 843 | 4b0c040x 844 | 4b0c040y 845 | 4b0c040z 846 | 4b0c0410 847 | 4b0c0411 848 | 4b0c0412 849 | 4b0c0413 850 | 4b0c0414 851 | 4b0c0415 852 | 4b0c0416 853 | 4b0c0417 854 | 4b0c0418 855 | 4b0c0419 856 | 4b0c041a 857 | 4b0c041b 858 | 4b0c041c 859 | 4b0c041d 860 | 4b0c041e 861 | 4b0c041f 862 | 4b0c041g 863 | 4b0c041h 864 | 4b1c0201 865 | 4b1c0202 866 | 4b1c0203 867 | 4b1c0204 868 | 4b1c0205 869 | 4b1c0206 870 | 4b1c0207 871 | 4b1c0208 872 | 4b1c0209 873 | 4b1c020a 874 | 4b1c020b 875 | 4b1c020c 876 | 4b1c020d 877 | 4b1c020e 878 | 4b1c020f 879 | 4b1c020g 880 | 4b1c020h 881 | 4b1c020i 882 | 4b1c020j 883 | 4b1c020k 884 | 4b1c020l 885 | 4b1c020m 886 | 4b1c020n 887 | 4b1c020o 888 | 4b1c020p 889 | 4b1c020q 890 | 4b1c020r 891 | 4b1c020s 892 | 4b1c020t 893 | 4b1c020u 894 | 4b1c020v 895 | 4b1c020w 896 | 4b1c020x 897 | 4b1c020y 898 | 4b1c020z 899 | 4b1c0210 900 | 4b1c0211 901 | 4b1c0212 902 | 4b1c0213 903 | 4b1c0214 904 | 4b1c0215 905 | 4b1c0216 906 | 4b1c0217 907 | 4b1c0218 908 | 4b1c0219 909 | 4b1c021a 910 | 4b1c021b 911 | 4b1c021c 912 | 4b1c021d 913 | 4b1c021e 914 | 4b1c021f 915 | 4b1c021g 916 | 4b1c0301 917 | 4b1c0302 918 | 4b1c0303 919 | 4b1c0304 920 | 4b1c0305 921 | 4b1c0306 922 | 4b1c0307 923 | 4b1c0308 924 | 4b1c0309 925 | 4b1c030a 926 | 4b1c030b 927 | 4b1c030c 928 | 4b1c030d 929 | 4b1c030e 930 | 4b1c030f 931 | 4b1c030g 932 | 4b1c030h 933 | 4b1c030i 934 | 4b1c030j 935 | 4b1c030k 936 | 4b1c030l 937 | 4b1c030m 938 | 4b1c030n 939 | 4b1c030o 940 | 4b1c030p 941 | 4b1c030q 942 | 4b1c030r 943 | 4b1c030s 944 | 4b1c030t 945 | 4b1c030u 946 | 4b1c030v 947 | 4b1c030w 948 | 4b1c030x 949 | 4b1c030y 950 | 4b1c030z 951 | 4b1c0310 952 | 4b1c0311 953 | 4b1c0312 954 | 4b1c0313 955 | 4b1c0314 956 | 4b1c0315 957 | 4b1c0316 958 | 4b1c0317 959 | 4b1c0318 960 | 4b1c0319 961 | 4b1c031a 962 | 4b1c031b 963 | 4b1c031c 964 | 4b1c0401 965 | 4b1c0402 966 | 4b1c0403 967 | 4b1c0404 968 | 4b1c0405 969 | 4b1c0406 970 | 4b1c0407 971 | 4b1c0408 972 | 4b1c0409 973 | 4b1c040a 974 | 4b1c040b 975 | 4b1c040c 976 | 4b1c040d 977 | 4b1c040e 978 | 4b1c040f 979 | 4b1c040g 980 | 4b1c040h 981 | 4b1c040i 982 | 4b1c040j 983 | 4b1c040k 984 | 4b1c040l 985 | 4b1c040m 986 | 4b1c040n 987 | 4b1c040o 988 | 4b1c040p 989 | 4b1c040q 990 | 4b1c040r 991 | 4b1c040s 992 | 4b1c040t 993 | 4b1c040u 994 | 4b1c040v 995 | 4b1c040w 996 | 4b1c040x 997 | 4b1c040y 998 | 4b1c040z 999 | 4b1c0410 1000 | 4b1c0411 1001 | 4b1c0412 1002 | 4b1c0413 1003 | 4b1c0414 1004 | 4b1c0415 1005 | 4b1c0416 1006 | 4b1c0417 1007 | 4b1c0418 1008 | 4b1c0419 1009 | 4b1c041a 1010 | 4b1c041b 1011 | 4b1c041c 1012 | 4b1c041d 1013 | 4b1c041e 1014 | 4b2c0201 1015 | 4b2c0202 1016 | 4b2c0203 1017 | 4b2c0204 1018 | 4b2c0205 1019 | 4b2c0206 1020 | 4b2c0207 1021 | 4b2c0208 1022 | 4b2c0209 1023 | 4b2c020a 1024 | 4b2c020b 1025 | 4b2c020c 1026 | 4b2c020d 1027 | 4b2c020e 1028 | 4b2c020f 1029 | 4b2c020g 1030 | 4b2c020h 1031 | 4b2c020i 1032 | 4b2c020j 1033 | 4b2c020k 1034 | 4b2c020l 1035 | 4b2c020m 1036 | 4b2c020n 1037 | 4b2c020o 1038 | 4b2c020p 1039 | 4b2c020q 1040 | 4b2c020r 1041 | 4b2c020s 1042 | 4b2c020t 1043 | 4b2c020u 1044 | 4b2c020v 1045 | 4b2c020w 1046 | 4b2c020x 1047 | 4b2c020y 1048 | 4b2c020z 1049 | 4b2c0210 1050 | 4b2c0211 1051 | 4b2c0212 1052 | 4b2c0213 1053 | 4b2c0214 1054 | 4b2c0215 1055 | 4b2c0216 1056 | 4b2c0217 1057 | 4b2c0218 1058 | 4b2c0219 1059 | 4b2c021a 1060 | 4b2c021b 1061 | 4b2c021c 1062 | 4b2c021d 1063 | 4b2c021e 1064 | 4b2c021f 1065 | 4b2c021g 1066 | 4b2c0301 1067 | 4b2c0302 1068 | 4b2c0303 1069 | 4b2c0304 1070 | 4b2c0305 1071 | 4b2c0306 1072 | 4b2c0307 1073 | 4b2c0308 1074 | 4b2c0309 1075 | 4b2c030a 1076 | 4b2c030b 1077 | 4b2c030c 1078 | 4b2c030d 1079 | 4b2c030e 1080 | 4b2c030f 1081 | 4b2c030g 1082 | 4b2c030h 1083 | 4b2c030i 1084 | 4b2c030j 1085 | 4b2c030k 1086 | 4b2c030l 1087 | 4b2c030m 1088 | 4b2c030n 1089 | 4b2c030o 1090 | 4b2c030p 1091 | 4b2c030q 1092 | 4b2c030r 1093 | 4b2c030s 1094 | 4b2c030t 1095 | 4b2c030u 1096 | 4b2c030v 1097 | 4b2c030w 1098 | 4b2c030x 1099 | 4b2c030y 1100 | 4b2c030z 1101 | 4b2c0310 1102 | 4b2c0311 1103 | 4b2c0312 1104 | 4b2c0313 1105 | 4b2c0314 1106 | 4b2c0315 1107 | 4b2c0316 1108 | 4b2c0317 1109 | 4b2c0318 1110 | 4b2c0319 1111 | 4b2c031a 1112 | 4b2c031b 1113 | 4b2c031c 1114 | 4b2c031d 1115 | 4b2c0401 1116 | 4b2c0402 1117 | 4b2c0403 1118 | 4b2c0404 1119 | 4b2c0405 1120 | 4b2c0406 1121 | 4b2c0407 1122 | 4b2c0408 1123 | 4b2c0409 1124 | 4b2c040a 1125 | 4b2c040b 1126 | 4b2c040c 1127 | 4b2c040d 1128 | 4b2c040e 1129 | 4b2c040f 1130 | 4b2c040g 1131 | 4b2c040h 1132 | 4b2c040i 1133 | 4b2c040j 1134 | 4b2c040k 1135 | 4b2c040l 1136 | 4b2c040m 1137 | 4b2c040n 1138 | 4b2c040o 1139 | 4b2c040p 1140 | 4b2c040q 1141 | 4b2c040r 1142 | 4b2c040s 1143 | 4b2c040t 1144 | 4b2c040u 1145 | 4b2c040v 1146 | 4b2c040w 1147 | 4b2c040x 1148 | 4b2c040y 1149 | 4b2c040z 1150 | 4b2c0410 1151 | 4b2c0411 1152 | 4b2c0412 1153 | 4b2c0413 1154 | 4b2c0414 1155 | 4b2c0415 1156 | 4b2c0416 1157 | 4b2c0417 1158 | 4b2c0418 1159 | 4b2c0419 1160 | 4b2c041a 1161 | 4b2c041b 1162 | 4b2c041c 1163 | 4b2c041d 1164 | 4b2c041e 1165 | 4b2c041f 1166 | 4b3c0201 1167 | 4b3c0202 1168 | 4b3c0203 1169 | 4b3c0204 1170 | 4b3c0205 1171 | 4b3c0206 1172 | 4b3c0207 1173 | 4b3c0208 1174 | 4b3c0209 1175 | 4b3c020a 1176 | 4b3c020b 1177 | 4b3c020c 1178 | 4b3c020d 1179 | 4b3c020e 1180 | 4b3c020f 1181 | 4b3c020g 1182 | 4b3c020h 1183 | 4b3c020i 1184 | 4b3c020j 1185 | 4b3c020k 1186 | 4b3c020l 1187 | 4b3c020m 1188 | 4b3c020n 1189 | 4b3c020o 1190 | 4b3c020p 1191 | 4b3c020q 1192 | 4b3c020r 1193 | 4b3c020s 1194 | 4b3c020t 1195 | 4b3c020u 1196 | 4b3c020v 1197 | 4b3c020w 1198 | 4b3c020x 1199 | 4b3c020y 1200 | 4b3c020z 1201 | 4b3c0210 1202 | 4b3c0211 1203 | 4b3c0212 1204 | 4b3c0213 1205 | 4b3c0214 1206 | 4b3c0215 1207 | 4b3c0216 1208 | 4b3c0217 1209 | 4b3c0218 1210 | 4b3c0219 1211 | 4b3c021a 1212 | 4b3c021b 1213 | 4b3c021c 1214 | 4b3c021d 1215 | 4b3c021e 1216 | 4b3c021f 1217 | 4b3c0301 1218 | 4b3c0302 1219 | 4b3c0303 1220 | 4b3c0304 1221 | 4b3c0305 1222 | 4b3c0306 1223 | 4b3c0307 1224 | 4b3c0308 1225 | 4b3c0309 1226 | 4b3c030a 1227 | 4b3c030b 1228 | 4b3c030c 1229 | 4b3c030d 1230 | 4b3c030e 1231 | 4b3c030f 1232 | 4b3c030g 1233 | 4b3c030h 1234 | 4b3c030i 1235 | 4b3c030j 1236 | 4b3c030k 1237 | 4b3c030l 1238 | 4b3c030m 1239 | 4b3c030n 1240 | 4b3c030o 1241 | 4b3c030p 1242 | 4b3c030q 1243 | 4b3c030r 1244 | 4b3c030s 1245 | 4b3c030t 1246 | 4b3c030u 1247 | 4b3c030v 1248 | 4b3c030w 1249 | 4b3c030x 1250 | 4b3c030y 1251 | 4b3c030z 1252 | 4b3c0310 1253 | 4b3c0311 1254 | 4b3c0312 1255 | 4b3c0313 1256 | 4b3c0314 1257 | 4b3c0315 1258 | 4b3c0316 1259 | 4b3c0317 1260 | 4b3c0318 1261 | 4b3c0319 1262 | 4b3c031a 1263 | 4b3c031b 1264 | 4b3c031c 1265 | 4b3c031d 1266 | 4b3c0401 1267 | 4b3c0402 1268 | 4b3c0403 1269 | 4b3c0404 1270 | 4b3c0405 1271 | 4b3c0406 1272 | 4b3c0407 1273 | 4b3c0408 1274 | 4b3c0409 1275 | 4b3c040a 1276 | 4b3c040b 1277 | 4b3c040c 1278 | 4b3c040d 1279 | 4b3c040e 1280 | 4b3c040f 1281 | 4b3c040g 1282 | 4b3c040h 1283 | 4b3c040i 1284 | 4b3c040j 1285 | 4b3c040k 1286 | 4b3c040l 1287 | 4b3c040m 1288 | 4b3c040n 1289 | 4b3c040o 1290 | 4b3c040p 1291 | 4b3c040q 1292 | 4b3c040r 1293 | 4b3c040s 1294 | 4b3c040t 1295 | 4b3c040u 1296 | 4b3c040v 1297 | 4b3c040w 1298 | 4b3c040x 1299 | 4b3c040y 1300 | 4b3c040z 1301 | 4b3c0410 1302 | 4b3c0411 1303 | 4b3c0412 1304 | 4b3c0413 1305 | 4b3c0414 1306 | 4b3c0415 1307 | 4b3c0416 1308 | 4b3c0417 1309 | 4b3c0418 1310 | 4b3c0419 1311 | 4b3c041a 1312 | 4b3c041b 1313 | 4b3c041c 1314 | 4b3c041d 1315 | 4b3c041e 1316 | 4b3c041f 1317 | 4b3c041g 1318 | 4b3c041h 1319 | 4b3c041i 1320 | 4b4c0201 1321 | 4b4c0202 1322 | 4b4c0203 1323 | 4b4c0204 1324 | 4b4c0205 1325 | 4b4c0206 1326 | 4b4c0207 1327 | 4b4c0208 1328 | 4b4c0209 1329 | 4b4c020a 1330 | 4b4c020b 1331 | 4b4c020c 1332 | 4b4c020d 1333 | 4b4c020e 1334 | 4b4c020f 1335 | 4b4c020g 1336 | 4b4c020h 1337 | 4b4c020i 1338 | 4b4c020j 1339 | 4b4c020k 1340 | 4b4c020l 1341 | 4b4c020m 1342 | 4b4c020n 1343 | 4b4c020o 1344 | 4b4c020p 1345 | 4b4c020q 1346 | 4b4c020r 1347 | 4b4c020s 1348 | 4b4c020t 1349 | 4b4c020u 1350 | 4b4c020v 1351 | 4b4c020w 1352 | 4b4c020x 1353 | 4b4c020y 1354 | 4b4c020z 1355 | 4b4c0210 1356 | 4b4c0211 1357 | 4b4c0212 1358 | 4b4c0213 1359 | 4b4c0214 1360 | 4b4c0215 1361 | 4b4c0216 1362 | 4b4c0217 1363 | 4b4c0218 1364 | 4b4c0219 1365 | 4b4c021a 1366 | 4b4c021b 1367 | 4b4c021c 1368 | 4b4c021d 1369 | 4b4c021e 1370 | 4b4c021f 1371 | 4b4c0301 1372 | 4b4c0302 1373 | 4b4c0303 1374 | 4b4c0304 1375 | 4b4c0305 1376 | 4b4c0306 1377 | 4b4c0307 1378 | 4b4c0308 1379 | 4b4c0309 1380 | 4b4c030a 1381 | 4b4c030b 1382 | 4b4c030c 1383 | 4b4c030d 1384 | 4b4c030e 1385 | 4b4c030f 1386 | 4b4c030g 1387 | 4b4c030h 1388 | 4b4c030i 1389 | 4b4c030j 1390 | 4b4c030k 1391 | 4b4c030l 1392 | 4b4c030m 1393 | 4b4c030n 1394 | 4b4c030o 1395 | 4b4c030p 1396 | 4b4c030q 1397 | 4b4c030r 1398 | 4b4c030s 1399 | 4b4c030t 1400 | 4b4c030u 1401 | 4b4c030v 1402 | 4b4c030w 1403 | 4b4c030x 1404 | 4b4c030y 1405 | 4b4c030z 1406 | 4b4c0310 1407 | 4b4c0311 1408 | 4b4c0312 1409 | 4b4c0313 1410 | 4b4c0314 1411 | 4b4c0315 1412 | 4b4c0316 1413 | 4b4c0317 1414 | 4b4c0318 1415 | 4b4c0319 1416 | 4b4c031a 1417 | 4b4c031b 1418 | 4b4c031c 1419 | 4b4c031d 1420 | 4b4c0401 1421 | 4b4c0402 1422 | 4b4c0403 1423 | 4b4c0404 1424 | 4b4c0405 1425 | 4b4c0406 1426 | 4b4c0407 1427 | 4b4c0408 1428 | 4b4c0409 1429 | 4b4c040a 1430 | 4b4c040b 1431 | 4b4c040c 1432 | 4b4c040d 1433 | 4b4c040e 1434 | 4b4c040f 1435 | 4b4c040g 1436 | 4b4c040h 1437 | 4b4c040i 1438 | 4b4c040j 1439 | 4b4c040k 1440 | 4b4c040l 1441 | 4b4c040m 1442 | 4b4c040n 1443 | 4b4c040o 1444 | 4b4c040p 1445 | 4b4c040q 1446 | 4b4c040r 1447 | 4b4c040s 1448 | 4b4c040t 1449 | 4b4c040u 1450 | 4b4c040v 1451 | 4b4c040w 1452 | 4b4c040x 1453 | 4b4c040y 1454 | 4b4c040z 1455 | 4b4c0410 1456 | 4b4c0411 1457 | 4b4c0412 1458 | 4b4c0413 1459 | 4b4c0414 1460 | 4b4c0415 1461 | 4b4c0416 1462 | 4b4c0417 1463 | 4b4c0418 1464 | 4b4c0419 1465 | 4b4c041a 1466 | 4b4c041b 1467 | 4b4c041c 1468 | 4b4c041d 1469 | 4b4c041e 1470 | 4b4c041f 1471 | 4b5c0201 1472 | 4b5c0202 1473 | 4b5c0203 1474 | 4b5c0204 1475 | 4b5c0205 1476 | 4b5c0206 1477 | 4b5c0207 1478 | 4b5c0208 1479 | 4b5c0209 1480 | 4b5c020a 1481 | 4b5c020b 1482 | 4b5c020c 1483 | 4b5c020d 1484 | 4b5c020e 1485 | 4b5c020f 1486 | 4b5c020g 1487 | 4b5c020h 1488 | 4b5c020i 1489 | 4b5c020j 1490 | 4b5c020k 1491 | 4b5c020l 1492 | 4b5c020m 1493 | 4b5c020n 1494 | 4b5c020o 1495 | 4b5c020p 1496 | 4b5c020q 1497 | 4b5c020r 1498 | 4b5c020s 1499 | 4b5c020t 1500 | 4b5c020u 1501 | 4b5c020v 1502 | 4b5c020w 1503 | 4b5c020x 1504 | 4b5c020y 1505 | 4b5c020z 1506 | 4b5c0210 1507 | 4b5c0211 1508 | 4b5c0212 1509 | 4b5c0213 1510 | 4b5c0214 1511 | 4b5c0215 1512 | 4b5c0216 1513 | 4b5c0217 1514 | 4b5c0218 1515 | 4b5c0219 1516 | 4b5c021a 1517 | 4b5c021b 1518 | 4b5c021c 1519 | 4b5c021d 1520 | 4b5c021e 1521 | 4b5c0301 1522 | 4b5c0302 1523 | 4b5c0303 1524 | 4b5c0304 1525 | 4b5c0305 1526 | 4b5c0306 1527 | 4b5c0307 1528 | 4b5c0308 1529 | 4b5c0309 1530 | 4b5c030a 1531 | 4b5c030b 1532 | 4b5c030c 1533 | 4b5c030d 1534 | 4b5c030e 1535 | 4b5c030f 1536 | 4b5c030g 1537 | 4b5c030h 1538 | 4b5c030i 1539 | 4b5c030j 1540 | 4b5c030k 1541 | 4b5c030l 1542 | 4b5c030m 1543 | 4b5c030n 1544 | 4b5c030o 1545 | 4b5c030p 1546 | 4b5c030q 1547 | 4b5c030r 1548 | 4b5c030s 1549 | 4b5c030t 1550 | 4b5c030u 1551 | 4b5c030v 1552 | 4b5c030w 1553 | 4b5c030x 1554 | 4b5c030y 1555 | 4b5c030z 1556 | 4b5c0310 1557 | 4b5c0311 1558 | 4b5c0312 1559 | 4b5c0313 1560 | 4b5c0314 1561 | 4b5c0315 1562 | 4b5c0316 1563 | 4b5c0317 1564 | 4b5c0318 1565 | 4b5c0319 1566 | 4b5c031a 1567 | 4b5c031b 1568 | 4b5c031c 1569 | 4b5c031d 1570 | 4b5c031e 1571 | 4b5c031f 1572 | 4b5c031g 1573 | 4b5c0401 1574 | 4b5c0402 1575 | 4b5c0403 1576 | 4b5c0404 1577 | 4b5c0405 1578 | 4b5c0406 1579 | 4b5c0407 1580 | 4b5c0408 1581 | 4b5c0409 1582 | 4b5c040a 1583 | 4b5c040b 1584 | 4b5c040c 1585 | 4b5c040d 1586 | 4b5c040e 1587 | 4b5c040f 1588 | 4b5c040g 1589 | 4b5c040h 1590 | 4b5c040i 1591 | 4b5c040j 1592 | 4b5c040k 1593 | 4b5c040l 1594 | 4b5c040m 1595 | 4b5c040n 1596 | 4b5c040o 1597 | 4b5c040p 1598 | 4b5c040q 1599 | 4b5c040r 1600 | 4b5c040s 1601 | 4b5c040t 1602 | 4b5c040u 1603 | 4b5c040v 1604 | 4b5c040w 1605 | 4b5c040x 1606 | 4b5c040y 1607 | 4b5c040z 1608 | 4b5c0410 1609 | 4b5c0411 1610 | 4b5c0412 1611 | 4b5c0413 1612 | 4b5c0414 1613 | 4b5c0415 1614 | 4b5c0416 1615 | 4b5c0417 1616 | 4b5c0418 1617 | 4b5c0419 1618 | 4b5c041a 1619 | 4b5c041b 1620 | 4b5c041c 1621 | 4b5c041d 1622 | 4b5c041e 1623 | 4b6c0201 1624 | 4b6c0202 1625 | 4b6c0203 1626 | 4b6c0204 1627 | 4b6c0205 1628 | 4b6c0206 1629 | 4b6c0207 1630 | 4b6c0208 1631 | 4b6c0209 1632 | 4b6c020a 1633 | 4b6c020b 1634 | 4b6c020c 1635 | 4b6c020d 1636 | 4b6c020e 1637 | 4b6c020f 1638 | 4b6c020g 1639 | 4b6c020h 1640 | 4b6c020i 1641 | 4b6c020j 1642 | 4b6c020k 1643 | 4b6c020l 1644 | 4b6c020m 1645 | 4b6c020n 1646 | 4b6c020o 1647 | 4b6c020p 1648 | 4b6c020q 1649 | 4b6c020r 1650 | 4b6c020s 1651 | 4b6c020t 1652 | 4b6c020u 1653 | 4b6c020v 1654 | 4b6c020w 1655 | 4b6c020x 1656 | 4b6c020y 1657 | 4b6c020z 1658 | 4b6c0210 1659 | 4b6c0211 1660 | 4b6c0212 1661 | 4b6c0213 1662 | 4b6c0214 1663 | 4b6c0215 1664 | 4b6c0216 1665 | 4b6c0217 1666 | 4b6c0218 1667 | 4b6c0219 1668 | 4b6c021a 1669 | 4b6c021b 1670 | 4b6c021c 1671 | 4b6c021d 1672 | 4b6c021e 1673 | 4b6c021f 1674 | 4b6c021g 1675 | 4b6c0301 1676 | 4b6c0302 1677 | 4b6c0303 1678 | 4b6c0304 1679 | 4b6c0305 1680 | 4b6c0306 1681 | 4b6c0307 1682 | 4b6c0308 1683 | 4b6c0309 1684 | 4b6c030a 1685 | 4b6c030b 1686 | 4b6c030c 1687 | 4b6c030d 1688 | 4b6c030e 1689 | 4b6c030f 1690 | 4b6c030g 1691 | 4b6c030h 1692 | 4b6c030i 1693 | 4b6c030j 1694 | 4b6c030k 1695 | 4b6c030l 1696 | 4b6c030m 1697 | 4b6c030n 1698 | 4b6c030o 1699 | 4b6c030p 1700 | 4b6c030q 1701 | 4b6c030r 1702 | 4b6c030s 1703 | 4b6c030t 1704 | 4b6c030u 1705 | 4b6c030v 1706 | 4b6c030w 1707 | 4b6c030x 1708 | 4b6c030y 1709 | 4b6c030z 1710 | 4b6c0310 1711 | 4b6c0311 1712 | 4b6c0312 1713 | 4b6c0313 1714 | 4b6c0314 1715 | 4b6c0315 1716 | 4b6c0316 1717 | 4b6c0317 1718 | 4b6c0318 1719 | 4b6c0319 1720 | 4b6c031a 1721 | 4b6c031b 1722 | 4b6c031c 1723 | 4b6c0401 1724 | 4b6c0402 1725 | 4b6c0403 1726 | 4b6c0404 1727 | 4b6c0405 1728 | 4b6c0406 1729 | 4b6c0407 1730 | 4b6c0408 1731 | 4b6c0409 1732 | 4b6c040a 1733 | 4b6c040b 1734 | 4b6c040c 1735 | 4b6c040d 1736 | 4b6c040e 1737 | 4b6c040f 1738 | 4b6c040g 1739 | 4b6c040h 1740 | 4b6c040i 1741 | 4b6c040j 1742 | 4b6c040k 1743 | 4b6c040l 1744 | 4b6c040m 1745 | 4b6c040n 1746 | 4b6c040o 1747 | 4b6c040p 1748 | 4b6c040q 1749 | 4b6c040r 1750 | 4b6c040s 1751 | 4b6c040t 1752 | 4b6c040u 1753 | 4b6c040v 1754 | 4b6c040w 1755 | 4b6c040x 1756 | 4b6c040y 1757 | 4b6c040z 1758 | 4b6c0410 1759 | 4b6c0411 1760 | 4b6c0412 1761 | 4b6c0413 1762 | 4b6c0414 1763 | 4b6c0415 1764 | 4b6c0416 1765 | 4b6c0417 1766 | 4b6c0418 1767 | 4b6c0419 1768 | 4b6c041a 1769 | 4b6c041b 1770 | 4b6c041c 1771 | 4b6c041d 1772 | 4b6c041e 1773 | 4b6c041f 1774 | 4b6c041g 1775 | 4b6c041h 1776 | 4b7c0201 1777 | 4b7c0202 1778 | 4b7c0203 1779 | 4b7c0204 1780 | 4b7c0205 1781 | 4b7c0206 1782 | 4b7c0207 1783 | 4b7c0208 1784 | 4b7c0209 1785 | 4b7c020a 1786 | 4b7c020b 1787 | 4b7c020c 1788 | 4b7c020d 1789 | 4b7c020e 1790 | 4b7c020f 1791 | 4b7c020g 1792 | 4b7c020h 1793 | 4b7c020i 1794 | 4b7c020j 1795 | 4b7c020k 1796 | 4b7c020l 1797 | 4b7c020m 1798 | 4b7c020n 1799 | 4b7c020o 1800 | 4b7c020p 1801 | 4b7c020q 1802 | 4b7c020r 1803 | 4b7c020s 1804 | 4b7c020t 1805 | 4b7c020u 1806 | 4b7c020v 1807 | 4b7c020w 1808 | 4b7c020x 1809 | 4b7c020y 1810 | 4b7c020z 1811 | 4b7c0210 1812 | 4b7c0211 1813 | 4b7c0212 1814 | 4b7c0213 1815 | 4b7c0214 1816 | 4b7c0215 1817 | 4b7c0216 1818 | 4b7c0217 1819 | 4b7c0218 1820 | 4b7c0219 1821 | 4b7c021a 1822 | 4b7c021b 1823 | 4b7c021c 1824 | 4b7c021d 1825 | 4b7c021e 1826 | 4b7c021f 1827 | 4b7c021g 1828 | 4b7c021h 1829 | 4b7c0301 1830 | 4b7c0302 1831 | 4b7c0303 1832 | 4b7c0304 1833 | 4b7c0305 1834 | 4b7c0306 1835 | 4b7c0307 1836 | 4b7c0308 1837 | 4b7c0309 1838 | 4b7c030a 1839 | 4b7c030b 1840 | 4b7c030c 1841 | 4b7c030d 1842 | 4b7c030e 1843 | 4b7c030f 1844 | 4b7c030g 1845 | 4b7c030h 1846 | 4b7c030i 1847 | 4b7c030j 1848 | 4b7c030k 1849 | 4b7c030l 1850 | 4b7c030m 1851 | 4b7c030n 1852 | 4b7c030o 1853 | 4b7c030p 1854 | 4b7c030q 1855 | 4b7c030r 1856 | 4b7c030s 1857 | 4b7c030t 1858 | 4b7c030u 1859 | 4b7c030v 1860 | 4b7c030w 1861 | 4b7c030x 1862 | 4b7c030y 1863 | 4b7c030z 1864 | 4b7c0310 1865 | 4b7c0311 1866 | 4b7c0312 1867 | 4b7c0313 1868 | 4b7c0314 1869 | 4b7c0315 1870 | 4b7c0316 1871 | 4b7c0317 1872 | 4b7c0318 1873 | 4b7c0319 1874 | 4b7c031a 1875 | 4b7c031b 1876 | 4b7c031c 1877 | 4b7c031d 1878 | 4b7c031e 1879 | 4b7c0401 1880 | 4b7c0402 1881 | 4b7c0403 1882 | 4b7c0404 1883 | 4b7c0405 1884 | 4b7c0406 1885 | 4b7c0407 1886 | 4b7c0408 1887 | 4b7c0409 1888 | 4b7c040a 1889 | 4b7c040b 1890 | 4b7c040c 1891 | 4b7c040d 1892 | 4b7c040e 1893 | 4b7c040f 1894 | 4b7c040g 1895 | 4b7c040h 1896 | 4b7c040i 1897 | 4b7c040j 1898 | 4b7c040k 1899 | 4b7c040l 1900 | 4b7c040m 1901 | 4b7c040n 1902 | 4b7c040o 1903 | 4b7c040p 1904 | 4b7c040q 1905 | 4b7c040r 1906 | 4b7c040s 1907 | 4b7c040t 1908 | 4b7c040u 1909 | 4b7c040v 1910 | 4b7c040w 1911 | 4b7c040x 1912 | 4b7c040y 1913 | 4b7c040z 1914 | 4b7c0410 1915 | 4b7c0411 1916 | 4b7c0412 1917 | 4b7c0413 1918 | 4b7c0414 1919 | 4b7c0415 1920 | 4b7c0416 1921 | 4b7c0417 1922 | 4b7c0418 1923 | 4b7c0419 1924 | 4b7c041a 1925 | 4b7c041b 1926 | 4b8c0201 1927 | 4b8c0202 1928 | 4b8c0203 1929 | 4b8c0204 1930 | 4b8c0205 1931 | 4b8c0206 1932 | 4b8c0207 1933 | 4b8c0208 1934 | 4b8c0209 1935 | 4b8c020a 1936 | 4b8c020b 1937 | 4b8c020c 1938 | 4b8c020d 1939 | 4b8c020e 1940 | 4b8c020f 1941 | 4b8c020g 1942 | 4b8c020h 1943 | 4b8c020i 1944 | 4b8c020j 1945 | 4b8c020k 1946 | 4b8c020l 1947 | 4b8c020m 1948 | 4b8c020n 1949 | 4b8c020o 1950 | 4b8c020p 1951 | 4b8c020q 1952 | 4b8c020r 1953 | 4b8c020s 1954 | 4b8c020t 1955 | 4b8c020u 1956 | 4b8c020v 1957 | 4b8c020w 1958 | 4b8c020x 1959 | 4b8c020y 1960 | 4b8c020z 1961 | 4b8c0210 1962 | 4b8c0211 1963 | 4b8c0212 1964 | 4b8c0213 1965 | 4b8c0214 1966 | 4b8c0215 1967 | 4b8c0216 1968 | 4b8c0217 1969 | 4b8c0218 1970 | 4b8c0219 1971 | 4b8c021a 1972 | 4b8c021b 1973 | 4b8c021c 1974 | 4b8c021d 1975 | 4b8c021e 1976 | 4b8c021f 1977 | 4b8c0301 1978 | 4b8c0302 1979 | 4b8c0303 1980 | 4b8c0304 1981 | 4b8c0305 1982 | 4b8c0306 1983 | 4b8c0307 1984 | 4b8c0308 1985 | 4b8c0309 1986 | 4b8c030a 1987 | 4b8c030b 1988 | 4b8c030c 1989 | 4b8c030d 1990 | 4b8c030e 1991 | 4b8c030f 1992 | 4b8c030g 1993 | 4b8c030h 1994 | 4b8c030i 1995 | 4b8c030j 1996 | 4b8c030k 1997 | 4b8c030l 1998 | 4b8c030n 1999 | 4b8c030o 2000 | 4b8c030p 2001 | 4b8c030q 2002 | 4b8c030r 2003 | 4b8c030s 2004 | 4b8c030t 2005 | 4b8c030u 2006 | 4b8c030v 2007 | 4b8c030w 2008 | 4b8c030x 2009 | 4b8c030y 2010 | 4b8c030z 2011 | 4b8c0310 2012 | 4b8c0311 2013 | 4b8c0312 2014 | 4b8c0313 2015 | 4b8c0314 2016 | 4b8c0315 2017 | 4b8c0316 2018 | 4b8c0317 2019 | 4b8c0318 2020 | 4b8c0319 2021 | 4b8c031a 2022 | 4b8c031b 2023 | 4b8c031c 2024 | 4b8c031d 2025 | 4b8c031e 2026 | 4b8c031f 2027 | 4b8c0401 2028 | 4b8c0402 2029 | 4b8c0403 2030 | 4b8c0404 2031 | 4b8c0405 2032 | 4b8c0406 2033 | 4b8c0407 2034 | 4b8c0408 2035 | 4b8c0409 2036 | 4b8c040a 2037 | 4b8c040b 2038 | 4b8c040c 2039 | 4b8c040d 2040 | 4b8c040e 2041 | 4b8c040f 2042 | 4b8c040g 2043 | 4b8c040h 2044 | 4b8c040i 2045 | 4b8c040j 2046 | 4b8c040k 2047 | 4b8c040l 2048 | 4b8c040m 2049 | 4b8c040n 2050 | 4b8c040o 2051 | 4b8c040p 2052 | 4b8c040q 2053 | 4b8c040r 2054 | 4b8c040s 2055 | 4b8c040t 2056 | 4b8c040u 2057 | 4b8c040v 2058 | 4b8c040w 2059 | 4b8c040x 2060 | 4b8c040y 2061 | 4b8c040z 2062 | 4b8c0410 2063 | 4b8c0411 2064 | 4b8c0412 2065 | 4b8c0413 2066 | 4b8c0414 2067 | 4b8c0415 2068 | 4b8c0416 2069 | 4b8c0417 2070 | 4b8c0418 2071 | 4b8c0419 2072 | 4b8c041a 2073 | 4b8c041b 2074 | 4b8c041c 2075 | 4b8c041d 2076 | 4b9c0201 2077 | 4b9c0202 2078 | 4b9c0203 2079 | 4b9c0204 2080 | 4b9c0205 2081 | 4b9c0206 2082 | 4b9c0207 2083 | 4b9c0208 2084 | 4b9c0209 2085 | 4b9c020a 2086 | 4b9c020b 2087 | 4b9c020c 2088 | 4b9c020d 2089 | 4b9c020e 2090 | 4b9c020f 2091 | 4b9c020g 2092 | 4b9c020h 2093 | 4b9c020i 2094 | 4b9c020j 2095 | 4b9c020k 2096 | 4b9c020l 2097 | 4b9c020m 2098 | 4b9c020n 2099 | 4b9c020o 2100 | 4b9c020p 2101 | 4b9c020q 2102 | 4b9c020r 2103 | 4b9c020s 2104 | 4b9c020t 2105 | 4b9c020u 2106 | 4b9c020v 2107 | 4b9c020w 2108 | 4b9c020x 2109 | 4b9c020y 2110 | 4b9c020z 2111 | 4b9c0210 2112 | 4b9c0211 2113 | 4b9c0212 2114 | 4b9c0213 2115 | 4b9c0214 2116 | 4b9c0215 2117 | 4b9c0216 2118 | 4b9c0217 2119 | 4b9c0218 2120 | 4b9c0219 2121 | 4b9c021a 2122 | 4b9c021b 2123 | 4b9c021c 2124 | 4b9c021d 2125 | 4b9c021e 2126 | 4b9c021f 2127 | 4b9c021g 2128 | 4b9c0301 2129 | 4b9c0302 2130 | 4b9c0303 2131 | 4b9c0304 2132 | 4b9c0305 2133 | 4b9c0306 2134 | 4b9c0307 2135 | 4b9c0308 2136 | 4b9c0309 2137 | 4b9c030a 2138 | 4b9c030b 2139 | 4b9c030c 2140 | 4b9c030d 2141 | 4b9c030e 2142 | 4b9c030f 2143 | 4b9c030g 2144 | 4b9c030h 2145 | 4b9c030i 2146 | 4b9c030j 2147 | 4b9c030k 2148 | 4b9c030l 2149 | 4b9c030m 2150 | 4b9c030n 2151 | 4b9c030o 2152 | 4b9c030p 2153 | 4b9c030q 2154 | 4b9c030r 2155 | 4b9c030s 2156 | 4b9c030t 2157 | 4b9c030u 2158 | 4b9c030v 2159 | 4b9c030w 2160 | 4b9c030x 2161 | 4b9c030y 2162 | 4b9c030z 2163 | 4b9c0310 2164 | 4b9c0311 2165 | 4b9c0312 2166 | 4b9c0313 2167 | 4b9c0314 2168 | 4b9c0315 2169 | 4b9c0316 2170 | 4b9c0317 2171 | 4b9c0318 2172 | 4b9c0319 2173 | 4b9c031a 2174 | 4b9c031b 2175 | 4b9c031c 2176 | 4b9c031d 2177 | 4b9c0401 2178 | 4b9c0402 2179 | 4b9c0403 2180 | 4b9c0404 2181 | 4b9c0405 2182 | 4b9c0406 2183 | 4b9c0407 2184 | 4b9c0408 2185 | 4b9c0409 2186 | 4b9c040a 2187 | 4b9c040b 2188 | 4b9c040c 2189 | 4b9c040d 2190 | 4b9c040e 2191 | 4b9c040f 2192 | 4b9c040g 2193 | 4b9c040h 2194 | 4b9c040i 2195 | 4b9c040j 2196 | 4b9c040k 2197 | 4b9c040l 2198 | 4b9c040m 2199 | 4b9c040n 2200 | 4b9c040o 2201 | 4b9c040p 2202 | 4b9c040q 2203 | 4b9c040r 2204 | 4b9c040s 2205 | 4b9c040t 2206 | 4b9c040u 2207 | 4b9c040v 2208 | 4b9c040w 2209 | 4b9c040x 2210 | 4b9c040y 2211 | 4b9c040z 2212 | 4b9c0410 2213 | 4b9c0411 2214 | 4b9c0412 2215 | 4b9c0413 2216 | 4b9c0414 2217 | 4b9c0415 2218 | 4b9c0416 2219 | 4b9c0417 2220 | 4b9c0418 2221 | 4b9c0419 2222 | 4b9c041a 2223 | 4b9c041b 2224 | 4b9c041c 2225 | 4b9c041d 2226 | 4bac0201 2227 | 4bac0202 2228 | 4bac0203 2229 | 4bac0204 2230 | 4bac0205 2231 | 4bac0206 2232 | 4bac0207 2233 | 4bac0208 2234 | 4bac0209 2235 | 4bac020a 2236 | 4bac020b 2237 | 4bac020c 2238 | 4bac020d 2239 | 4bac020e 2240 | 4bac020f 2241 | 4bac020g 2242 | 4bac020h 2243 | 4bac020i 2244 | 4bac020j 2245 | 4bac020k 2246 | 4bac020l 2247 | 4bac020m 2248 | 4bac020n 2249 | 4bac020o 2250 | 4bac020p 2251 | 4bac020q 2252 | 4bac020r 2253 | 4bac020s 2254 | 4bac020t 2255 | 4bac020u 2256 | 4bac020v 2257 | 4bac020w 2258 | 4bac020x 2259 | 4bac020y 2260 | 4bac020z 2261 | 4bac0210 2262 | 4bac0211 2263 | 4bac0212 2264 | 4bac0213 2265 | 4bac0214 2266 | 4bac0215 2267 | 4bac0216 2268 | 4bac0217 2269 | 4bac0218 2270 | 4bac0219 2271 | 4bac021a 2272 | 4bac021b 2273 | 4bac021c 2274 | 4bac021d 2275 | 4bac021e 2276 | 4bac021f 2277 | 4bac0301 2278 | 4bac0302 2279 | 4bac0303 2280 | 4bac0304 2281 | 4bac0305 2282 | 4bac0306 2283 | 4bac0307 2284 | 4bac0308 2285 | 4bac0309 2286 | 4bac030a 2287 | 4bac030b 2288 | 4bac030c 2289 | 4bac030d 2290 | 4bac030e 2291 | 4bac030f 2292 | 4bac030g 2293 | 4bac030h 2294 | 4bac030i 2295 | 4bac030j 2296 | 4bac030k 2297 | 4bac030l 2298 | 4bac030m 2299 | 4bac030n 2300 | 4bac030o 2301 | 4bac030p 2302 | 4bac030q 2303 | 4bac030r 2304 | 4bac030s 2305 | 4bac030t 2306 | 4bac030u 2307 | 4bac030v 2308 | 4bac030w 2309 | 4bac030x 2310 | 4bac030y 2311 | 4bac030z 2312 | 4bac0310 2313 | 4bac0311 2314 | 4bac0312 2315 | 4bac0313 2316 | 4bac0314 2317 | 4bac0315 2318 | 4bac0316 2319 | 4bac0317 2320 | 4bac0318 2321 | 4bac0319 2322 | 4bac031a 2323 | 4bac031b 2324 | 4bac031c 2325 | 4bac031d 2326 | 4bac031e 2327 | 4bac0401 2328 | 4bac0402 2329 | 4bac0403 2330 | 4bac0404 2331 | 4bac0405 2332 | 4bac0406 2333 | 4bac0407 2334 | 4bac0408 2335 | 4bac0409 2336 | 4bac040a 2337 | 4bac040b 2338 | 4bac040c 2339 | 4bac040d 2340 | 4bac040e 2341 | 4bac040f 2342 | 4bac040g 2343 | 4bac040h 2344 | 4bac040i 2345 | 4bac040j 2346 | 4bac040k 2347 | 4bac040l 2348 | 4bac040m 2349 | 4bac040n 2350 | 4bac040o 2351 | 4bac040p 2352 | 4bac040q 2353 | 4bac040r 2354 | 4bac040s 2355 | 4bac040t 2356 | 4bac040u 2357 | 4bac040v 2358 | 4bac040w 2359 | 4bac040x 2360 | 4bac040y 2361 | 4bac040z 2362 | 4bac0410 2363 | 4bac0411 2364 | 4bac0412 2365 | 4bac0413 2366 | 4bac0414 2367 | 4bac0415 2368 | 4bac0416 2369 | 4bac0417 2370 | 4bac0418 2371 | 4bac0419 2372 | 4bac041a 2373 | 4bac041b 2374 | 4bac041c 2375 | 4bac041d 2376 | 4bbc0201 2377 | 4bbc0202 2378 | 4bbc0203 2379 | 4bbc0204 2380 | 4bbc0205 2381 | 4bbc0206 2382 | 4bbc0207 2383 | 4bbc0208 2384 | 4bbc0209 2385 | 4bbc020a 2386 | 4bbc020b 2387 | 4bbc020c 2388 | 4bbc020d 2389 | 4bbc020e 2390 | 4bbc020f 2391 | 4bbc020g 2392 | 4bbc020h 2393 | 4bbc020i 2394 | 4bbc020j 2395 | 4bbc020k 2396 | 4bbc020l 2397 | 4bbc020m 2398 | 4bbc020n 2399 | 4bbc020o 2400 | 4bbc020p 2401 | 4bbc020q 2402 | 4bbc020r 2403 | 4bbc020s 2404 | 4bbc020t 2405 | 4bbc020u 2406 | 4bbc020v 2407 | 4bbc020w 2408 | 4bbc020x 2409 | 4bbc020y 2410 | 4bbc020z 2411 | 4bbc0210 2412 | 4bbc0211 2413 | 4bbc0212 2414 | 4bbc0213 2415 | 4bbc0214 2416 | 4bbc0215 2417 | 4bbc0216 2418 | 4bbc0217 2419 | 4bbc0218 2420 | 4bbc0219 2421 | 4bbc021a 2422 | 4bbc021b 2423 | 4bbc021c 2424 | 4bbc021d 2425 | 4bbc021e 2426 | 4bbc021f 2427 | 4bbc0301 2428 | 4bbc0302 2429 | 4bbc0303 2430 | 4bbc0304 2431 | 4bbc0305 2432 | 4bbc0306 2433 | 4bbc0307 2434 | 4bbc0308 2435 | 4bbc0309 2436 | 4bbc030a 2437 | 4bbc030b 2438 | 4bbc030c 2439 | 4bbc030d 2440 | 4bbc030e 2441 | 4bbc030f 2442 | 4bbc030g 2443 | 4bbc030h 2444 | 4bbc030i 2445 | 4bbc030j 2446 | 4bbc030k 2447 | 4bbc030l 2448 | 4bbc030m 2449 | 4bbc030n 2450 | 4bbc030o 2451 | 4bbc030p 2452 | 4bbc030q 2453 | 4bbc030r 2454 | 4bbc030s 2455 | 4bbc030t 2456 | 4bbc030u 2457 | 4bbc030v 2458 | 4bbc030w 2459 | 4bbc030x 2460 | 4bbc030y 2461 | 4bbc030z 2462 | 4bbc0310 2463 | 4bbc0311 2464 | 4bbc0312 2465 | 4bbc0313 2466 | 4bbc0314 2467 | 4bbc0315 2468 | 4bbc0316 2469 | 4bbc0317 2470 | 4bbc0318 2471 | 4bbc0319 2472 | 4bbc031a 2473 | 4bbc031b 2474 | 4bbc031c 2475 | 4bbc031d 2476 | 4bbc031e 2477 | 4bbc0401 2478 | 4bbc0402 2479 | 4bbc0403 2480 | 4bbc0404 2481 | 4bbc0405 2482 | 4bbc0406 2483 | 4bbc0407 2484 | 4bbc0408 2485 | 4bbc0409 2486 | 4bbc040a 2487 | 4bbc040b 2488 | 4bbc040c 2489 | 4bbc040d 2490 | 4bbc040e 2491 | 4bbc040f 2492 | 4bbc040g 2493 | 4bbc040h 2494 | 4bbc040i 2495 | 4bbc040j 2496 | 4bbc040k 2497 | 4bbc040l 2498 | 4bbc040m 2499 | 4bbc040n 2500 | 4bbc040o 2501 | 4bbc040p 2502 | 4bbc040q 2503 | 4bbc040r 2504 | 4bbc040s 2505 | 4bbc040t 2506 | 4bbc040u 2507 | 4bbc040v 2508 | 4bbc040w 2509 | 4bbc040x 2510 | 4bbc040y 2511 | 4bbc040z 2512 | 4bbc0410 2513 | 4bbc0411 2514 | 4bbc0412 2515 | 4bbc0413 2516 | 4bbc0414 2517 | 4bbc0415 2518 | 4bbc0416 2519 | 4bbc0417 2520 | 4bbc0418 2521 | 4bbc0419 2522 | 4bbc041a 2523 | 4bbc041b 2524 | 4bbc041c 2525 | 4bbc041d 2526 | 4bbc041e 2527 | 4bbc041f 2528 | 4bcc0201 2529 | 4bcc0202 2530 | 4bcc0203 2531 | 4bcc0204 2532 | 4bcc0205 2533 | 4bcc0206 2534 | 4bcc0207 2535 | 4bcc0208 2536 | 4bcc0209 2537 | 4bcc020a 2538 | 4bcc020b 2539 | 4bcc020c 2540 | 4bcc020d 2541 | 4bcc020e 2542 | 4bcc020f 2543 | 4bcc020g 2544 | 4bcc020h 2545 | 4bcc020i 2546 | 4bcc020j 2547 | 4bcc020k 2548 | 4bcc020l 2549 | 4bcc020m 2550 | 4bcc020n 2551 | 4bcc020o 2552 | 4bcc020p 2553 | 4bcc020q 2554 | 4bcc020r 2555 | 4bcc020s 2556 | 4bcc020t 2557 | 4bcc020u 2558 | 4bcc020v 2559 | 4bcc020w 2560 | 4bcc020x 2561 | 4bcc020y 2562 | 4bcc020z 2563 | 4bcc0210 2564 | 4bcc0211 2565 | 4bcc0212 2566 | 4bcc0213 2567 | 4bcc0214 2568 | 4bcc0215 2569 | 4bcc0216 2570 | 4bcc0217 2571 | 4bcc0218 2572 | 4bcc0219 2573 | 4bcc021a 2574 | 4bcc021b 2575 | 4bcc021c 2576 | 4bcc021d 2577 | 4bcc021e 2578 | 4bcc021f 2579 | 4bcc021g 2580 | 4bcc0301 2581 | 4bcc0302 2582 | 4bcc0303 2583 | 4bcc0304 2584 | 4bcc0305 2585 | 4bcc0306 2586 | 4bcc0307 2587 | 4bcc0308 2588 | 4bcc0309 2589 | 4bcc030a 2590 | 4bcc030b 2591 | 4bcc030c 2592 | 4bcc030d 2593 | 4bcc030e 2594 | 4bcc030f 2595 | 4bcc030g 2596 | 4bcc030h 2597 | 4bcc030i 2598 | 4bcc030j 2599 | 4bcc030k 2600 | 4bcc030l 2601 | 4bcc030m 2602 | 4bcc030n 2603 | 4bcc030o 2604 | 4bcc030p 2605 | 4bcc030q 2606 | 4bcc030r 2607 | 4bcc030s 2608 | 4bcc030t 2609 | 4bcc030u 2610 | 4bcc030v 2611 | 4bcc030w 2612 | 4bcc030x 2613 | 4bcc030y 2614 | 4bcc030z 2615 | 4bcc0310 2616 | 4bcc0311 2617 | 4bcc0312 2618 | 4bcc0313 2619 | 4bcc0314 2620 | 4bcc0315 2621 | 4bcc0316 2622 | 4bcc0317 2623 | 4bcc0318 2624 | 4bcc0319 2625 | 4bcc031a 2626 | 4bcc031b 2627 | 4bcc031c 2628 | 4bcc0401 2629 | 4bcc0402 2630 | 4bcc0403 2631 | 4bcc0404 2632 | 4bcc0405 2633 | 4bcc0406 2634 | 4bcc0407 2635 | 4bcc0408 2636 | 4bcc0409 2637 | 4bcc040a 2638 | 4bcc040b 2639 | 4bcc040c 2640 | 4bcc040d 2641 | 4bcc040e 2642 | 4bcc040f 2643 | 4bcc040g 2644 | 4bcc040h 2645 | 4bcc040i 2646 | 4bcc040j 2647 | 4bcc040k 2648 | 4bcc040l 2649 | 4bcc040m 2650 | 4bcc040n 2651 | 4bcc040o 2652 | 4bcc040p 2653 | 4bcc040q 2654 | 4bcc040r 2655 | 4bcc040s 2656 | 4bcc040t 2657 | 4bcc040u 2658 | 4bcc040v 2659 | 4bcc040w 2660 | 4bcc040x 2661 | 4bcc040y 2662 | 4bcc040z 2663 | 4bcc0410 2664 | 4bcc0411 2665 | 4bcc0412 2666 | 4bcc0413 2667 | 4bcc0414 2668 | 4bcc0415 2669 | 4bcc0416 2670 | 4bcc0417 2671 | 4bcc0418 2672 | 4bcc0419 2673 | 4bcc041a 2674 | 4bcc041b 2675 | 4bcc041c 2676 | 4bcc041d 2677 | 4bcc041e 2678 | 4bcc041f 2679 | 4bcc041g 2680 | 4bdc0201 2681 | 4bdc0202 2682 | 4bdc0203 2683 | 4bdc0204 2684 | 4bdc0205 2685 | 4bdc0206 2686 | 4bdc0207 2687 | 4bdc0208 2688 | 4bdc0209 2689 | 4bdc020a 2690 | 4bdc020b 2691 | 4bdc020c 2692 | 4bdc020d 2693 | 4bdc020e 2694 | 4bdc020f 2695 | 4bdc020g 2696 | 4bdc020h 2697 | 4bdc020i 2698 | 4bdc020j 2699 | 4bdc020k 2700 | 4bdc020l 2701 | 4bdc020m 2702 | 4bdc020n 2703 | 4bdc020o 2704 | 4bdc020p 2705 | 4bdc020q 2706 | 4bdc020r 2707 | 4bdc020s 2708 | 4bdc020t 2709 | 4bdc020u 2710 | 4bdc020v 2711 | 4bdc020w 2712 | 4bdc020x 2713 | 4bdc020y 2714 | 4bdc020z 2715 | 4bdc0210 2716 | 4bdc0211 2717 | 4bdc0212 2718 | 4bdc0213 2719 | 4bdc0214 2720 | 4bdc0215 2721 | 4bdc0216 2722 | 4bdc0217 2723 | 4bdc0218 2724 | 4bdc0219 2725 | 4bdc021a 2726 | 4bdc021b 2727 | 4bdc021c 2728 | 4bdc021d 2729 | 4bdc021e 2730 | 4bdc021f 2731 | 4bdc021g 2732 | 4bdc0301 2733 | 4bdc0302 2734 | 4bdc0303 2735 | 4bdc0304 2736 | 4bdc0305 2737 | 4bdc0306 2738 | 4bdc0307 2739 | 4bdc0308 2740 | 4bdc0309 2741 | 4bdc030a 2742 | 4bdc030b 2743 | 4bdc030c 2744 | 4bdc030d 2745 | 4bdc030e 2746 | 4bdc030f 2747 | 4bdc030g 2748 | 4bdc030h 2749 | 4bdc030i 2750 | 4bdc030j 2751 | 4bdc030k 2752 | 4bdc030l 2753 | 4bdc030m 2754 | 4bdc030n 2755 | 4bdc030o 2756 | 4bdc030p 2757 | 4bdc030q 2758 | 4bdc030r 2759 | 4bdc030s 2760 | 4bdc030t 2761 | 4bdc030u 2762 | 4bdc030v 2763 | 4bdc030w 2764 | 4bdc030x 2765 | 4bdc030y 2766 | 4bdc030z 2767 | 4bdc0310 2768 | 4bdc0311 2769 | 4bdc0312 2770 | 4bdc0313 2771 | 4bdc0314 2772 | 4bdc0315 2773 | 4bdc0316 2774 | 4bdc0317 2775 | 4bdc0318 2776 | 4bdc0319 2777 | 4bdc031a 2778 | 4bdc031b 2779 | 4bdc031c 2780 | 4bdc031d 2781 | 4bdc031e 2782 | 4bdc031f 2783 | 4bdc0401 2784 | 4bdc0402 2785 | 4bdc0403 2786 | 4bdc0404 2787 | 4bdc0405 2788 | 4bdc0406 2789 | 4bdc0407 2790 | 4bdc0408 2791 | 4bdc0409 2792 | 4bdc040a 2793 | 4bdc040b 2794 | 4bdc040c 2795 | 4bdc040d 2796 | 4bdc040e 2797 | 4bdc040f 2798 | 4bdc040g 2799 | 4bdc040h 2800 | 4bdc040i 2801 | 4bdc040j 2802 | 4bdc040k 2803 | 4bdc040l 2804 | 4bdc040m 2805 | 4bdc040n 2806 | 4bdc040o 2807 | 4bdc040p 2808 | 4bdc040q 2809 | 4bdc040r 2810 | 4bdc040s 2811 | 4bdc040t 2812 | 4bdc040u 2813 | 4bdc040v 2814 | 4bdc040w 2815 | 4bdc040x 2816 | 4bdc040y 2817 | 4bdc040z 2818 | 4bdc0410 2819 | 4bdc0411 2820 | 4bdc0412 2821 | 4bdc0413 2822 | 4bdc0414 2823 | 4bdc0415 2824 | 4bdc0416 2825 | 4bdc0417 2826 | 4bdc0418 2827 | 4bdc0419 2828 | 4bdc041a 2829 | 4bdc041b 2830 | 4bec0201 2831 | 4bec0202 2832 | 4bec0203 2833 | 4bec0204 2834 | 4bec0205 2835 | 4bec0206 2836 | 4bec0207 2837 | 4bec0208 2838 | 4bec0209 2839 | 4bec020a 2840 | 4bec020b 2841 | 4bec020c 2842 | 4bec020d 2843 | 4bec020e 2844 | 4bec020f 2845 | 4bec020g 2846 | 4bec020h 2847 | 4bec020i 2848 | 4bec020j 2849 | 4bec020k 2850 | 4bec020l 2851 | 4bec020m 2852 | 4bec020n 2853 | 4bec020o 2854 | 4bec020p 2855 | 4bec020q 2856 | 4bec020r 2857 | 4bec020s 2858 | 4bec020t 2859 | 4bec020u 2860 | 4bec020v 2861 | 4bec020w 2862 | 4bec020x 2863 | 4bec020y 2864 | 4bec020z 2865 | 4bec0210 2866 | 4bec0211 2867 | 4bec0212 2868 | 4bec0213 2869 | 4bec0214 2870 | 4bec0215 2871 | 4bec0216 2872 | 4bec0217 2873 | 4bec0218 2874 | 4bec0219 2875 | 4bec021a 2876 | 4bec021b 2877 | 4bec021c 2878 | 4bec021d 2879 | 4bec021e 2880 | 4bec021f 2881 | 4bec0301 2882 | 4bec0303 2883 | 4bec0304 2884 | 4bec0305 2885 | 4bec0306 2886 | 4bec0307 2887 | 4bec0308 2888 | 4bec0309 2889 | 4bec030a 2890 | 4bec030b 2891 | 4bec030c 2892 | 4bec030d 2893 | 4bec030e 2894 | 4bec030f 2895 | 4bec030g 2896 | 4bec030h 2897 | 4bec030i 2898 | 4bec030j 2899 | 4bec030k 2900 | 4bec030l 2901 | 4bec030m 2902 | 4bec030n 2903 | 4bec030o 2904 | 4bec030p 2905 | 4bec030q 2906 | 4bec030r 2907 | 4bec030s 2908 | 4bec030t 2909 | 4bec030u 2910 | 4bec030v 2911 | 4bec030w 2912 | 4bec030x 2913 | 4bec030y 2914 | 4bec030z 2915 | 4bec0310 2916 | 4bec0311 2917 | 4bec0312 2918 | 4bec0313 2919 | 4bec0314 2920 | 4bec0315 2921 | 4bec0316 2922 | 4bec0317 2923 | 4bec0318 2924 | 4bec0319 2925 | 4bec031a 2926 | 4bec031b 2927 | 4bec031c 2928 | 4bec031d 2929 | 4bec0401 2930 | 4bec0402 2931 | 4bec0403 2932 | 4bec0404 2933 | 4bec0405 2934 | 4bec0406 2935 | 4bec0407 2936 | 4bec0408 2937 | 4bec0409 2938 | 4bec040a 2939 | 4bec040b 2940 | 4bec040c 2941 | 4bec040d 2942 | 4bec040e 2943 | 4bec040f 2944 | 4bec040g 2945 | 4bec040h 2946 | 4bec040i 2947 | 4bec040j 2948 | 4bec040k 2949 | 4bec040l 2950 | 4bec040m 2951 | 4bec040n 2952 | 4bec040o 2953 | 4bec040p 2954 | 4bec040q 2955 | 4bec040r 2956 | 4bec040s 2957 | 4bec040t 2958 | 4bec040u 2959 | 4bec040v 2960 | 4bec040w 2961 | 4bec040x 2962 | 4bec040y 2963 | 4bec040z 2964 | 4bec0410 2965 | 4bec0411 2966 | 4bec0412 2967 | 4bec0413 2968 | 4bec0414 2969 | 4bec0415 2970 | 4bec0416 2971 | 4bec0417 2972 | 4bec0418 2973 | 4bec0419 2974 | 4bec041a 2975 | 4bec041b 2976 | 4bec041c 2977 | 4bec041d 2978 | 4bec041e 2979 | 4bec041f 2980 | 4bec041g 2981 | 4bec041h 2982 | 4bfc0201 2983 | 4bfc0202 2984 | 4bfc0203 2985 | 4bfc0204 2986 | 4bfc0205 2987 | 4bfc0206 2988 | 4bfc0207 2989 | 4bfc0208 2990 | 4bfc0209 2991 | 4bfc020a 2992 | 4bfc020b 2993 | 4bfc020c 2994 | 4bfc020d 2995 | 4bfc020e 2996 | 4bfc020f 2997 | 4bfc020g 2998 | 4bfc020h 2999 | 4bfc020i 3000 | 4bfc020j 3001 | 4bfc020k 3002 | 4bfc020l 3003 | 4bfc020m 3004 | 4bfc020n 3005 | 4bfc020o 3006 | 4bfc020p 3007 | 4bfc020q 3008 | 4bfc020r 3009 | 4bfc020s 3010 | 4bfc020t 3011 | 4bfc020u 3012 | 4bfc020v 3013 | 4bfc020w 3014 | 4bfc020x 3015 | 4bfc020y 3016 | 4bfc020z 3017 | 4bfc0210 3018 | 4bfc0211 3019 | 4bfc0212 3020 | 4bfc0213 3021 | 4bfc0214 3022 | 4bfc0215 3023 | 4bfc0216 3024 | 4bfc0217 3025 | 4bfc0218 3026 | 4bfc0219 3027 | 4bfc021a 3028 | 4bfc021b 3029 | 4bfc021c 3030 | 4bfc021d 3031 | 4bfc021e 3032 | 4bfc021f 3033 | 4bfc021g 3034 | 4bfc0301 3035 | 4bfc0302 3036 | 4bfc0303 3037 | 4bfc0304 3038 | 4bfc0305 3039 | 4bfc0306 3040 | 4bfc0307 3041 | 4bfc0308 3042 | 4bfc0309 3043 | 4bfc030a 3044 | 4bfc030b 3045 | 4bfc030c 3046 | 4bfc030d 3047 | 4bfc030e 3048 | 4bfc030f 3049 | 4bfc030g 3050 | 4bfc030h 3051 | 4bfc030i 3052 | 4bfc030j 3053 | 4bfc030k 3054 | 4bfc030l 3055 | 4bfc030m 3056 | 4bfc030n 3057 | 4bfc030o 3058 | 4bfc030p 3059 | 4bfc030q 3060 | 4bfc030r 3061 | 4bfc030s 3062 | 4bfc030t 3063 | 4bfc030u 3064 | 4bfc030v 3065 | 4bfc030w 3066 | 4bfc030x 3067 | 4bfc030y 3068 | 4bfc030z 3069 | 4bfc0310 3070 | 4bfc0311 3071 | 4bfc0312 3072 | 4bfc0313 3073 | 4bfc0314 3074 | 4bfc0315 3075 | 4bfc0316 3076 | 4bfc0317 3077 | 4bfc0318 3078 | 4bfc0319 3079 | 4bfc031a 3080 | 4bfc031b 3081 | 4bfc031c 3082 | 4bfc0401 3083 | 4bfc0402 3084 | 4bfc0403 3085 | 4bfc0404 3086 | 4bfc0405 3087 | 4bfc0406 3088 | 4bfc0407 3089 | 4bfc0408 3090 | 4bfc0409 3091 | 4bfc040a 3092 | 4bfc040b 3093 | 4bfc040c 3094 | 4bfc040d 3095 | 4bfc040e 3096 | 4bfc040f 3097 | 4bfc040g 3098 | 4bfc040h 3099 | 4bfc040i 3100 | 4bfc040j 3101 | 4bfc040k 3102 | 4bfc040l 3103 | 4bfc040m 3104 | 4bfc040n 3105 | 4bfc040o 3106 | 4bfc040p 3107 | 4bfc040q 3108 | 4bfc040r 3109 | 4bfc040s 3110 | 4bfc040t 3111 | 4bfc040u 3112 | 4bfc040v 3113 | 4bfc040w 3114 | 4bfc040x 3115 | 4bfc040y 3116 | 4bfc040z 3117 | 4bfc0410 3118 | 4bfc0411 3119 | 4bfc0412 3120 | 4bfc0413 3121 | 4bfc0414 3122 | 4bfc0415 3123 | 4bfc0416 3124 | 4bfc0417 3125 | 4bfc0418 3126 | 4bfc0419 3127 | 4bfc041a 3128 | 4bfc041b 3129 | 4bfc041c 3130 | 4bfc041d 3131 | 4bfc041e 3132 | 4bfc041f 3133 | 4bfc041g 3134 | 4bgc0201 3135 | 4bgc0202 3136 | 4bgc0203 3137 | 4bgc0204 3138 | 4bgc0205 3139 | 4bgc0206 3140 | 4bgc0207 3141 | 4bgc0208 3142 | 4bgc0209 3143 | 4bgc020a 3144 | 4bgc020b 3145 | 4bgc020c 3146 | 4bgc020d 3147 | 4bgc020e 3148 | 4bgc020f 3149 | 4bgc020g 3150 | 4bgc020h 3151 | 4bgc020i 3152 | 4bgc020j 3153 | 4bgc020k 3154 | 4bgc020l 3155 | 4bgc020m 3156 | 4bgc020n 3157 | 4bgc020o 3158 | 4bgc020p 3159 | 4bgc020q 3160 | 4bgc020r 3161 | 4bgc020s 3162 | 4bgc020t 3163 | 4bgc020u 3164 | 4bgc020v 3165 | 4bgc020w 3166 | 4bgc020x 3167 | 4bgc020y 3168 | 4bgc020z 3169 | 4bgc0210 3170 | 4bgc0211 3171 | 4bgc0212 3172 | 4bgc0213 3173 | 4bgc0214 3174 | 4bgc0215 3175 | 4bgc0216 3176 | 4bgc0217 3177 | 4bgc0218 3178 | 4bgc0219 3179 | 4bgc021a 3180 | 4bgc021b 3181 | 4bgc021c 3182 | 4bgc021d 3183 | 4bgc021e 3184 | 4bgc021f 3185 | 4bgc0301 3186 | 4bgc0302 3187 | 4bgc0303 3188 | 4bgc0304 3189 | 4bgc0305 3190 | 4bgc0306 3191 | 4bgc0307 3192 | 4bgc0308 3193 | 4bgc0309 3194 | 4bgc030a 3195 | 4bgc030b 3196 | 4bgc030c 3197 | 4bgc030d 3198 | 4bgc030e 3199 | 4bgc030f 3200 | 4bgc030g 3201 | 4bgc030h 3202 | 4bgc030i 3203 | 4bgc030j 3204 | 4bgc030k 3205 | 4bgc030l 3206 | 4bgc030m 3207 | 4bgc030n 3208 | 4bgc030o 3209 | 4bgc030p 3210 | 4bgc030q 3211 | 4bgc030r 3212 | 4bgc030s 3213 | 4bgc030t 3214 | 4bgc030u 3215 | 4bgc030v 3216 | 4bgc030w 3217 | 4bgc030x 3218 | 4bgc030y 3219 | 4bgc030z 3220 | 4bgc0310 3221 | 4bgc0311 3222 | 4bgc0312 3223 | 4bgc0313 3224 | 4bgc0314 3225 | 4bgc0315 3226 | 4bgc0316 3227 | 4bgc0317 3228 | 4bgc0318 3229 | 4bgc0319 3230 | 4bgc031a 3231 | 4bgc031b 3232 | 4bgc031c 3233 | 4bgc031d 3234 | 4bgc031e 3235 | 4bgc031f 3236 | 4bgc0401 3237 | 4bgc0402 3238 | 4bgc0403 3239 | 4bgc0404 3240 | 4bgc0405 3241 | 4bgc0406 3242 | 4bgc0407 3243 | 4bgc0408 3244 | 4bgc0409 3245 | 4bgc040a 3246 | 4bgc040b 3247 | 4bgc040c 3248 | 4bgc040d 3249 | 4bgc040e 3250 | 4bgc040f 3251 | 4bgc040g 3252 | 4bgc040h 3253 | 4bgc040i 3254 | 4bgc040j 3255 | 4bgc040k 3256 | 4bgc040l 3257 | 4bgc040m 3258 | 4bgc040n 3259 | 4bgc040o 3260 | 4bgc040p 3261 | 4bgc040q 3262 | 4bgc040r 3263 | 4bgc040s 3264 | 4bgc040t 3265 | 4bgc040u 3266 | 4bgc040v 3267 | 4bgc040w 3268 | 4bgc040x 3269 | 4bgc040y 3270 | 4bgc040z 3271 | 4bgc0410 3272 | 4bgc0411 3273 | 4bgc0412 3274 | 4bgc0413 3275 | 4bgc0414 3276 | 4bgc0415 3277 | 4bgc0416 3278 | 4bgc0417 3279 | 4bgc0418 3280 | 4bgc0419 3281 | 4bgc041a 3282 | 4bgc041b 3283 | 4bgc041c 3284 | 4bgc041d 3285 | 4bgc041e 3286 | 4bhc0201 3287 | 4bhc0202 3288 | 4bhc0203 3289 | 4bhc0204 3290 | 4bhc0205 3291 | 4bhc0206 3292 | 4bhc0207 3293 | 4bhc0208 3294 | 4bhc0209 3295 | 4bhc020a 3296 | 4bhc020b 3297 | 4bhc020c 3298 | 4bhc020d 3299 | 4bhc020e 3300 | 4bhc020f 3301 | 4bhc020g 3302 | 4bhc020h 3303 | 4bhc020i 3304 | 4bhc020j 3305 | 4bhc020k 3306 | 4bhc020l 3307 | 4bhc020m 3308 | 4bhc020n 3309 | 4bhc020o 3310 | 4bhc020p 3311 | 4bhc020q 3312 | 4bhc020r 3313 | 4bhc020s 3314 | 4bhc020t 3315 | 4bhc020u 3316 | 4bhc020v 3317 | 4bhc020w 3318 | 4bhc020x 3319 | 4bhc020y 3320 | 4bhc020z 3321 | 4bhc0210 3322 | 4bhc0211 3323 | 4bhc0212 3324 | 4bhc0213 3325 | 4bhc0214 3326 | 4bhc0215 3327 | 4bhc0216 3328 | 4bhc0217 3329 | 4bhc0218 3330 | 4bhc0219 3331 | 4bhc021a 3332 | 4bhc021b 3333 | 4bhc021c 3334 | 4bhc021d 3335 | 4bhc021e 3336 | 4bhc0301 3337 | 4bhc0302 3338 | 4bhc0303 3339 | 4bhc0304 3340 | 4bhc0305 3341 | 4bhc0306 3342 | 4bhc0307 3343 | 4bhc0308 3344 | 4bhc0309 3345 | 4bhc030a 3346 | 4bhc030b 3347 | 4bhc030c 3348 | 4bhc030d 3349 | 4bhc030e 3350 | 4bhc030f 3351 | 4bhc030g 3352 | 4bhc030h 3353 | 4bhc030i 3354 | 4bhc030j 3355 | 4bhc030k 3356 | 4bhc030l 3357 | 4bhc030m 3358 | 4bhc030n 3359 | 4bhc030o 3360 | 4bhc030p 3361 | 4bhc030q 3362 | 4bhc030r 3363 | 4bhc030s 3364 | 4bhc030t 3365 | 4bhc030u 3366 | 4bhc030v 3367 | 4bhc030w 3368 | 4bhc030x 3369 | 4bhc030y 3370 | 4bhc030z 3371 | 4bhc0310 3372 | 4bhc0311 3373 | 4bhc0312 3374 | 4bhc0313 3375 | 4bhc0314 3376 | 4bhc0315 3377 | 4bhc0316 3378 | 4bhc0317 3379 | 4bhc0318 3380 | 4bhc0319 3381 | 4bhc031a 3382 | 4bhc031b 3383 | 4bhc031c 3384 | 4bhc031d 3385 | 4bhc031e 3386 | 4bhc031f 3387 | 4bhc0401 3388 | 4bhc0402 3389 | 4bhc0403 3390 | 4bhc0404 3391 | 4bhc0405 3392 | 4bhc0406 3393 | 4bhc0407 3394 | 4bhc0408 3395 | 4bhc0409 3396 | 4bhc040a 3397 | 4bhc040b 3398 | 4bhc040c 3399 | 4bhc040d 3400 | 4bhc040e 3401 | 4bhc040f 3402 | 4bhc040g 3403 | 4bhc040h 3404 | 4bhc040i 3405 | 4bhc040j 3406 | 4bhc040k 3407 | 4bhc040l 3408 | 4bhc040m 3409 | 4bhc040n 3410 | 4bhc040o 3411 | 4bhc040p 3412 | 4bhc040q 3413 | 4bhc040r 3414 | 4bhc040s 3415 | 4bhc040t 3416 | 4bhc040u 3417 | 4bhc040v 3418 | 4bhc040w 3419 | 4bhc040x 3420 | 4bhc040y 3421 | 4bhc040z 3422 | 4bhc0410 3423 | 4bhc0411 3424 | 4bhc0412 3425 | 4bhc0413 3426 | 4bhc0414 3427 | 4bhc0415 3428 | 4bhc0416 3429 | 4bhc0417 3430 | 4bhc0418 3431 | 4bhc0419 3432 | 4bhc041a 3433 | 4bhc041b 3434 | 4bhc041c 3435 | 4bhc041d 3436 | 4bhc041e 3437 | 4bic0201 3438 | 4bic0202 3439 | 4bic0203 3440 | 4bic0204 3441 | 4bic0205 3442 | 4bic0206 3443 | 4bic0207 3444 | 4bic0208 3445 | 4bic0209 3446 | 4bic020a 3447 | 4bic020b 3448 | 4bic020c 3449 | 4bic020d 3450 | 4bic020e 3451 | 4bic020f 3452 | 4bic020g 3453 | 4bic020h 3454 | 4bic020i 3455 | 4bic020j 3456 | 4bic020k 3457 | 4bic020l 3458 | 4bic020m 3459 | 4bic020n 3460 | 4bic020o 3461 | 4bic020p 3462 | 4bic020q 3463 | 4bic020r 3464 | 4bic020s 3465 | 4bic020t 3466 | 4bic020u 3467 | 4bic020v 3468 | 4bic020w 3469 | 4bic020x 3470 | 4bic020y 3471 | 4bic020z 3472 | 4bic0210 3473 | 4bic0211 3474 | 4bic0212 3475 | 4bic0213 3476 | 4bic0214 3477 | 4bic0215 3478 | 4bic0216 3479 | 4bic0217 3480 | 4bic0218 3481 | 4bic0219 3482 | 4bic021a 3483 | 4bic021b 3484 | 4bic021c 3485 | 4bic021d 3486 | 4bic021e 3487 | 4bic021f 3488 | 4bic021g 3489 | 4bic0301 3490 | 4bic0302 3491 | 4bic0303 3492 | 4bic0304 3493 | 4bic0305 3494 | 4bic0306 3495 | 4bic0307 3496 | 4bic0308 3497 | 4bic0309 3498 | 4bic030a 3499 | 4bic030b 3500 | 4bic030c 3501 | 4bic030d 3502 | 4bic030e 3503 | 4bic030f 3504 | 4bic030g 3505 | 4bic030h 3506 | 4bic030i 3507 | 4bic030j 3508 | 4bic030k 3509 | 4bic030l 3510 | 4bic030m 3511 | 4bic030n 3512 | 4bic030o 3513 | 4bic030p 3514 | 4bic030q 3515 | 4bic030r 3516 | 4bic030s 3517 | 4bic030t 3518 | 4bic030u 3519 | 4bic030v 3520 | 4bic030w 3521 | 4bic030x 3522 | 4bic030y 3523 | 4bic030z 3524 | 4bic0310 3525 | 4bic0311 3526 | 4bic0312 3527 | 4bic0313 3528 | 4bic0314 3529 | 4bic0315 3530 | 4bic0316 3531 | 4bic0317 3532 | 4bic0318 3533 | 4bic0319 3534 | 4bic031a 3535 | 4bic031b 3536 | 4bic031c 3537 | 4bic031d 3538 | 4bic0401 3539 | 4bic0402 3540 | 4bic0403 3541 | 4bic0404 3542 | 4bic0405 3543 | 4bic0406 3544 | 4bic0407 3545 | 4bic0408 3546 | 4bic0409 3547 | 4bic040a 3548 | 4bic040b 3549 | 4bic040c 3550 | 4bic040d 3551 | 4bic040e 3552 | 4bic040f 3553 | 4bic040g 3554 | 4bic040h 3555 | 4bic040i 3556 | 4bic040j 3557 | 4bic040k 3558 | 4bic040l 3559 | 4bic040m 3560 | 4bic040n 3561 | 4bic040o 3562 | 4bic040p 3563 | 4bic040q 3564 | 4bic040r 3565 | 4bic040s 3566 | 4bic040t 3567 | 4bic040u 3568 | 4bic040v 3569 | 4bic040w 3570 | 4bic040x 3571 | 4bic040y 3572 | 4bic040z 3573 | 4bic0410 3574 | 4bic0411 3575 | 4bic0412 3576 | 4bic0413 3577 | 4bic0414 3578 | 4bic0415 3579 | 4bic0416 3580 | 4bic0417 3581 | 4bic0418 3582 | 4bic0419 3583 | 4bic041a 3584 | 4bic041b 3585 | 4bic041c 3586 | 4bic041d 3587 | 4bic041e 3588 | 4bic041f 3589 | 4bjc0201 3590 | 4bjc0202 3591 | 4bjc0203 3592 | 4bjc0204 3593 | 4bjc0205 3594 | 4bjc0206 3595 | 4bjc0207 3596 | 4bjc0208 3597 | 4bjc0209 3598 | 4bjc020a 3599 | 4bjc020b 3600 | 4bjc020c 3601 | 4bjc020d 3602 | 4bjc020e 3603 | 4bjc020f 3604 | 4bjc020g 3605 | 4bjc020h 3606 | 4bjc020i 3607 | 4bjc020j 3608 | 4bjc020k 3609 | 4bjc020l 3610 | 4bjc020m 3611 | 4bjc020n 3612 | 4bjc020o 3613 | 4bjc020p 3614 | 4bjc020q 3615 | 4bjc020r 3616 | 4bjc020s 3617 | 4bjc020t 3618 | 4bjc020u 3619 | 4bjc020v 3620 | 4bjc020w 3621 | 4bjc020x 3622 | 4bjc020y 3623 | 4bjc020z 3624 | 4bjc0210 3625 | 4bjc0211 3626 | 4bjc0212 3627 | 4bjc0213 3628 | 4bjc0214 3629 | 4bjc0215 3630 | 4bjc0216 3631 | 4bjc0217 3632 | 4bjc0218 3633 | 4bjc0219 3634 | 4bjc021a 3635 | 4bjc021b 3636 | 4bjc021c 3637 | 4bjc021d 3638 | 4bjc021e 3639 | 4bjc021f 3640 | 4bjc0301 3641 | 4bjc0302 3642 | 4bjc0303 3643 | 4bjc0304 3644 | 4bjc0305 3645 | 4bjc0306 3646 | 4bjc0307 3647 | 4bjc0308 3648 | 4bjc0309 3649 | 4bjc030a 3650 | 4bjc030b 3651 | 4bjc030c 3652 | 4bjc030d 3653 | 4bjc030e 3654 | 4bjc030f 3655 | 4bjc030g 3656 | 4bjc030h 3657 | 4bjc030i 3658 | 4bjc030j 3659 | 4bjc030k 3660 | 4bjc030l 3661 | 4bjc030m 3662 | 4bjc030n 3663 | 4bjc030o 3664 | 4bjc030p 3665 | 4bjc030q 3666 | 4bjc030r 3667 | 4bjc030s 3668 | 4bjc030t 3669 | 4bjc030u 3670 | 4bjc030v 3671 | 4bjc030w 3672 | 4bjc030x 3673 | 4bjc030y 3674 | 4bjc030z 3675 | 4bjc0310 3676 | 4bjc0311 3677 | 4bjc0312 3678 | 4bjc0313 3679 | 4bjc0314 3680 | 4bjc0315 3681 | 4bjc0316 3682 | 4bjc0317 3683 | 4bjc0318 3684 | 4bjc0319 3685 | 4bjc031a 3686 | 4bjc031b 3687 | 4bjc031c 3688 | 4bjc031d 3689 | 4bjc031e 3690 | 4bjc0401 3691 | 4bjc0402 3692 | 4bjc0403 3693 | 4bjc0404 3694 | 4bjc0405 3695 | 4bjc0406 3696 | 4bjc0407 3697 | 4bjc0408 3698 | 4bjc0409 3699 | 4bjc040a 3700 | 4bjc040b 3701 | 4bjc040c 3702 | 4bjc040d 3703 | 4bjc040e 3704 | 4bjc040f 3705 | 4bjc040g 3706 | 4bjc040h 3707 | 4bjc040i 3708 | 4bjc040j 3709 | 4bjc040k 3710 | 4bjc040l 3711 | 4bjc040m 3712 | 4bjc040n 3713 | 4bjc040o 3714 | 4bjc040p 3715 | 4bjc040q 3716 | 4bjc040r 3717 | 4bjc040s 3718 | 4bjc040t 3719 | 4bjc040u 3720 | 4bjc040v 3721 | 4bjc040w 3722 | 4bjc040x 3723 | 4bjc040y 3724 | 4bjc040z 3725 | 4bjc0410 3726 | 4bjc0411 3727 | 4bjc0412 3728 | 4bjc0413 3729 | 4bjc0414 3730 | 4bjc0415 3731 | 4bjc0416 3732 | 4bjc0417 3733 | 4bjc0418 3734 | 4bjc0419 3735 | 4bjc041a 3736 | 4bjc041b 3737 | 4bjc041c 3738 | 4bjc041d 3739 | 4bjc041e 3740 | -------------------------------------------------------------------------------- /preprocess/refactor_wsj.py: -------------------------------------------------------------------------------- 1 | import os 2 | import glob 3 | import argparse 4 | from tqdm import tqdm 5 | import multiprocessing 6 | from pathlib import Path 7 | from subprocess import check_call, CalledProcessError 8 | 9 | 10 | # Arguments 11 | parser = argparse.ArgumentParser(description='Build up complete dataset.') 12 | parser.add_argument('--wsj_root', default=None, type=str, 13 | help='Path to downloaded raw WSJ', required=True) 14 | parser.add_argument('--dest', default=None, type=str, 15 | help='Path to store WSJ wav files.', required=True) 16 | parser.add_argument('--sph2pipe_bin', default='sph2pipe_v2.5/sph2pipe', type=str, 17 | help='Path to the binary file of sph2pipe', required=False) 18 | parser.add_argument('--n_workers', default=-1, type=int, 19 | help='Number of workers for multiprocessing', required=False) 20 | parser.add_argument('--debug', action='store_true', 21 | help='Debug mode', required=False) 22 | args = parser.parse_args() 23 | 24 | # Convert function 25 | def wv1_to_wav(file_path): 26 | wav_name = str(file_path).rsplit('/',1)[-1].replace('.wv1','.wav') 27 | outputpath = os.path.join(args.dest,'wav',wav_name) 28 | return check_call([args.sph2pipe_bin, '-f', 'rif', file_path, outputpath]) 29 | 30 | # Script begins 31 | print('===> Listing all WSJ wv1 files...') 32 | wav_dir = os.path.join(args.dest,'wav') 33 | os.makedirs(wav_dir, exist_ok=True) 34 | if 'wsj0' in args.wsj_root: 35 | wv1_regex = os.path.join('wsj/acoustics/cds/*/wsj*/*/*/*.wv1') 36 | else: 37 | wv1_regex = os.path.join('*/*/*/*/*.wv1') 38 | wv1_list = list(Path(args.wsj_root).glob(wv1_regex)) 39 | 40 | print('===> Converting {} wv1 files to wav files...'.format(len(wv1_list))) 41 | assert os.path.isfile(args.sph2pipe_bin) 42 | pool = multiprocessing.Pool( 43 | processes = multiprocessing.cpu_count()-1 if args.n_workers==-1 else args.n_workers) 44 | progress = tqdm(total=len(wv1_list)) 45 | def update(*a): 46 | progress.update() 47 | search_results = [] 48 | for file_path in wv1_list: 49 | search_results.append(pool.apply_async(wv1_to_wav, [file_path], callback=update)) 50 | pool.close() 51 | pool.join() 52 | progress.close() 53 | status = [1 for r in tqdm(search_results, desc='Collecting results') if r.get()!=0] 54 | print('===> Done ({} convertion failed)'.format(sum(status))) 55 | print('') -------------------------------------------------------------------------------- /preprocess/spk_split/dev_spk: -------------------------------------------------------------------------------- 1 | 011o030v 011 2 | 011o030w 011 3 | 011o030x 011 4 | 011o030y 011 5 | 011o030z 011 6 | 011o0310 011 7 | 011o0311 011 8 | 011o0312 011 9 | 011o0313 011 10 | 011o0314 011 11 | 013o030v 013 12 | 013o030w 013 13 | 013o030x 013 14 | 013o030y 013 15 | 013o030z 013 16 | 013o0310 013 17 | 013o0311 013 18 | 013o0312 013 19 | 013o0313 013 20 | 013o0314 013 21 | 014o030t 014 22 | 014o030u 014 23 | 014o030v 014 24 | 014o030w 014 25 | 014o030x 014 26 | 014o030y 014 27 | 014o030z 014 28 | 014o0310 014 29 | 014o0311 014 30 | 014o0312 014 31 | 015o030r 015 32 | 015o030s 015 33 | 015o030t 015 34 | 015o030u 015 35 | 015o030v 015 36 | 015o030w 015 37 | 015o030x 015 38 | 015o030y 015 39 | 015o030z 015 40 | 015o0310 015 41 | 016o030x 016 42 | 016o030y 016 43 | 016o030z 016 44 | 016o0310 016 45 | 016o0311 016 46 | 016o0312 016 47 | 016o0313 016 48 | 016o0314 016 49 | 016o0315 016 50 | 017o030u 017 51 | 017o030v 017 52 | 017o030w 017 53 | 017o030x 017 54 | 017o030y 017 55 | 017o030z 017 56 | 017o0310 017 57 | 017o0311 017 58 | 017o0312 017 59 | 017o0313 017 60 | 018o030t 018 61 | 018o030u 018 62 | 018o030v 018 63 | 018o030w 018 64 | 018o030x 018 65 | 018o030y 018 66 | 018o030z 018 67 | 018o0310 018 68 | 018o0311 018 69 | 018o0312 018 70 | 019o030s 019 71 | 019o030t 019 72 | 019o030u 019 73 | 019o030v 019 74 | 019o030w 019 75 | 019o030x 019 76 | 019o030y 019 77 | 019o030z 019 78 | 019o0310 019 79 | 019o0311 019 80 | 01bo030u 01b 81 | 01bo030v 01b 82 | 01bo030w 01b 83 | 01bo030x 01b 84 | 01bo030y 01b 85 | 01bo030z 01b 86 | 01bo0310 01b 87 | 01bo0311 01b 88 | 01bo0312 01b 89 | 01do030w 01d 90 | 01do030x 01d 91 | 01do030y 01d 92 | 01do030z 01d 93 | 01do0310 01d 94 | 01do0311 01d 95 | 01do0312 01d 96 | 01do0313 01d 97 | 01do0314 01d 98 | 01do0315 01d 99 | 01eo030v 01e 100 | 01eo030w 01e 101 | 01eo030x 01e 102 | 01eo030y 01e 103 | 01eo030z 01e 104 | 01eo0310 01e 105 | 01eo0311 01e 106 | 01eo0312 01e 107 | 01eo0313 01e 108 | 01eo0314 01e 109 | 01fo030t 01f 110 | 01fo030u 01f 111 | 01fo030v 01f 112 | 01fo030w 01f 113 | 01fo030x 01f 114 | 01fo030y 01f 115 | 01fo030z 01f 116 | 01fo0310 01f 117 | 01fo0311 01f 118 | 01go030w 01g 119 | 01go030x 01g 120 | 01go030y 01g 121 | 01go030z 01g 122 | 01go0310 01g 123 | 01go0311 01g 124 | 01go0312 01g 125 | 01go0313 01g 126 | 01go0314 01g 127 | 01go0315 01g 128 | 01io030v 01i 129 | 01io030w 01i 130 | 01io030x 01i 131 | 01io030y 01i 132 | 01io030z 01i 133 | 01io0310 01i 134 | 01io0311 01i 135 | 01io0312 01i 136 | 01io0313 01i 137 | 01io0314 01i 138 | 01jo030t 01j 139 | 01jo030u 01j 140 | 01jo030v 01j 141 | 01jo030w 01j 142 | 01jo030x 01j 143 | 01jo030y 01j 144 | 01jo030z 01j 145 | 01jo0310 01j 146 | 01jo0311 01j 147 | 01jo0312 01j 148 | 01ko030w 01k 149 | 01ko030x 01k 150 | 01ko030y 01k 151 | 01ko030z 01k 152 | 01ko0310 01k 153 | 01ko0311 01k 154 | 01ko0312 01k 155 | 01ko0313 01k 156 | 01ko0314 01k 157 | 01ko0315 01k 158 | 01lo030v 01l 159 | 01lo030w 01l 160 | 01lo030x 01l 161 | 01lo030y 01l 162 | 01lo030z 01l 163 | 01lo0310 01l 164 | 01lo0311 01l 165 | 01lo0312 01l 166 | 01lo0313 01l 167 | 01lo0314 01l 168 | 01mo030v 01m 169 | 01mo030w 01m 170 | 01mo030x 01m 171 | 01mo030y 01m 172 | 01mo030z 01m 173 | 01mo0310 01m 174 | 01mo0311 01m 175 | 01mo0312 01m 176 | 01mo0313 01m 177 | 01po030t 01p 178 | 01po030u 01p 179 | 01po030v 01p 180 | 01po030w 01p 181 | 01po030x 01p 182 | 01po030y 01p 183 | 01po030z 01p 184 | 01po0310 01p 185 | 01po0311 01p 186 | 01qo030x 01q 187 | 01qo030y 01q 188 | 01qo030z 01q 189 | 01qo0310 01q 190 | 01qo0311 01q 191 | 01qo0312 01q 192 | 01qo0313 01q 193 | 01qo0314 01q 194 | 01qo0315 01q 195 | 01ro030t 01r 196 | 01ro030u 01r 197 | 01ro030v 01r 198 | 01ro030w 01r 199 | 01ro030x 01r 200 | 01ro030y 01r 201 | 01ro030z 01r 202 | 01ro0310 01r 203 | 01ro0311 01r 204 | 01ro0312 01r 205 | 01to030w 01t 206 | 01to030x 01t 207 | 01to030y 01t 208 | 01to030z 01t 209 | 01to0310 01t 210 | 01to0311 01t 211 | 01to0312 01t 212 | 01to0313 01t 213 | 01to0314 01t 214 | 01to0315 01t 215 | 01uo030u 01u 216 | 01uo030v 01u 217 | 01uo030w 01u 218 | 01uo030x 01u 219 | 01uo030y 01u 220 | 01uo030z 01u 221 | 01uo0310 01u 222 | 01uo0311 01u 223 | 01uo0312 01u 224 | 01uo0313 01u 225 | 01wo030u 01w 226 | 01wo030v 01w 227 | 01wo030w 01w 228 | 01wo030x 01w 229 | 01wo030y 01w 230 | 01wo030z 01w 231 | 01wo0310 01w 232 | 01wo0311 01w 233 | 01wo0312 01w 234 | 01yo030t 01y 235 | 01yo030u 01y 236 | 01yo030v 01y 237 | 01yo030w 01y 238 | 01yo030x 01y 239 | 01yo030y 01y 240 | 01yo030z 01y 241 | 01yo0310 01y 242 | 01yo0311 01y 243 | 01yo0312 01y 244 | 01zo030w 01z 245 | 01zo030x 01z 246 | 01zo030y 01z 247 | 01zo030z 01z 248 | 01zo0310 01z 249 | 01zo0311 01z 250 | 01zo0312 01z 251 | 01zo0313 01z 252 | 01zo0314 01z 253 | 01zo0315 01z 254 | 020o030t 020 255 | 020o030u 020 256 | 020o030v 020 257 | 020o030y 020 258 | 020o030z 020 259 | 020o0310 020 260 | 020o0311 020 261 | 020o0312 020 262 | 020o0313 020 263 | 021o030v 021 264 | 021o030w 021 265 | 021o030x 021 266 | 021o030y 021 267 | 021o030z 021 268 | 021o0310 021 269 | 021o0311 021 270 | 021o0312 021 271 | 021o0313 021 272 | 021o0314 021 273 | 024o030v 024 274 | 024o030w 024 275 | 024o030x 024 276 | 024o030y 024 277 | 024o030z 024 278 | 024o0310 024 279 | 024o0311 024 280 | 024o0312 024 281 | 024o0313 024 282 | 024o0314 024 283 | 026o030t 026 284 | 026o030u 026 285 | 026o030v 026 286 | 026o030w 026 287 | 026o030x 026 288 | 026o030y 026 289 | 026o030z 026 290 | 026o0310 026 291 | 026o0311 026 292 | 026o0312 026 293 | 027o030s 027 294 | 027o030t 027 295 | 027o030u 027 296 | 027o030v 027 297 | 027o030w 027 298 | 027o030x 027 299 | 027o030y 027 300 | 027o030z 027 301 | 027o0310 027 302 | 027o0311 027 303 | 028o030u 028 304 | 028o030v 028 305 | 028o030w 028 306 | 028o030x 028 307 | 028o030y 028 308 | 028o030z 028 309 | 028o0310 028 310 | 028o0311 028 311 | 028o0312 028 312 | 028o0313 028 313 | 029o030v 029 314 | 029o030w 029 315 | 029o030x 029 316 | 029o030y 029 317 | 029o030z 029 318 | 029o0310 029 319 | 029o0311 029 320 | 029o0312 029 321 | 029o0313 029 322 | 029o0314 029 323 | 02bo030v 02b 324 | 02bo030w 02b 325 | 02bo030x 02b 326 | 02bo030y 02b 327 | 02bo030z 02b 328 | 02bo0310 02b 329 | 02bo0311 02b 330 | 02bo0312 02b 331 | 02bo0313 02b 332 | 02bo0314 02b 333 | 02co030v 02c 334 | 02co030w 02c 335 | 02co030x 02c 336 | 02co030y 02c 337 | 02co030z 02c 338 | 02co0310 02c 339 | 02co0311 02c 340 | 02co0312 02c 341 | 02co0313 02c 342 | 02eo030v 02e 343 | 02eo030w 02e 344 | 02eo030x 02e 345 | 02eo030y 02e 346 | 02eo030z 02e 347 | 02eo0310 02e 348 | 02eo0312 02e 349 | 02eo0313 02e 350 | 02eo0314 02e 351 | 02eo0315 02e 352 | 204o010g 204 353 | 204o010h 204 354 | 204o010i 204 355 | 204o010j 204 356 | 204o010k 204 357 | 205o010h 205 358 | 205o010i 205 359 | 205o010j 205 360 | 205o010k 205 361 | 205o010l 205 362 | 206o010j 206 363 | 206o010k 206 364 | 206o010l 206 365 | 206o010m 206 366 | 206o010n 206 367 | 207o010k 207 368 | 207o010l 207 369 | 207o010m 207 370 | 207o010n 207 371 | 207o010o 207 372 | 208o010h 208 373 | 208o010i 208 374 | 208o010j 208 375 | 208o010k 208 376 | 208o010l 208 377 | 209o010f 209 378 | 209o010g 209 379 | 209o010h 209 380 | 209o010i 209 381 | 209o010j 209 382 | 20ao010h 20a 383 | 20ao010i 20a 384 | 20ao010j 20a 385 | 20ao010k 20a 386 | 20ao010l 20a 387 | 20bo010h 20b 388 | 20bo010i 20b 389 | 20bo010j 20b 390 | 20bo010k 20b 391 | 20bo010l 20b 392 | 20co010g 20c 393 | 20co010h 20c 394 | 20co010i 20c 395 | 20co010j 20c 396 | 20co010k 20c 397 | 20do010h 20d 398 | 20do010i 20d 399 | 20do010j 20d 400 | 20do010k 20d 401 | 20do010l 20d 402 | 20eo010h 20e 403 | 20eo010i 20e 404 | 20eo010j 20e 405 | 20eo010k 20e 406 | 20eo010l 20e 407 | 20fo010f 20f 408 | 20fo010g 20f 409 | 20fo010h 20f 410 | 20fo010i 20f 411 | 20fo010j 20f 412 | 20go010i 20g 413 | 20go010j 20g 414 | 20go010k 20g 415 | 20go010l 20g 416 | 20go010m 20g 417 | 20ho010f 20h 418 | 20ho010g 20h 419 | 20ho010h 20h 420 | 20ho010i 20h 421 | 20ho010j 20h 422 | 20io010h 20i 423 | 20io010i 20i 424 | 20io010j 20i 425 | 20io010k 20i 426 | 20io010l 20i 427 | 20lo010g 20l 428 | 20lo010h 20l 429 | 20lo010i 20l 430 | 20lo010j 20l 431 | 20lo010k 20l 432 | 20mo010i 20m 433 | 20mo010j 20m 434 | 20mo010k 20m 435 | 20mo010l 20m 436 | 20mo010m 20m 437 | 20no010h 20n 438 | 20no010i 20n 439 | 20no010j 20n 440 | 20no010k 20n 441 | 20no010l 20n 442 | 20oo010h 20o 443 | 20oo010i 20o 444 | 20oo010j 20o 445 | 20oo010k 20o 446 | 20oo010l 20o 447 | 20po010g 20p 448 | 20po010h 20p 449 | 20po010i 20p 450 | 20po010j 20p 451 | 20po010k 20p 452 | 20qo010g 20q 453 | 20qo010h 20q 454 | 20qo010i 20q 455 | 20qo010j 20q 456 | 20qo010k 20q 457 | 20ro010i 20r 458 | 20ro010j 20r 459 | 20ro010k 20r 460 | 20ro010l 20r 461 | 20ro010m 20r 462 | 20to010f 20t 463 | 20to010g 20t 464 | 20to010h 20t 465 | 20to010i 20t 466 | 20to010j 20t 467 | 20vo010h 20v 468 | 20vo010i 20v 469 | 20vo010j 20v 470 | 20vo010k 20v 471 | 20vo010l 20v 472 | 401o030t 401 473 | 401o030u 401 474 | 401o030v 401 475 | 401o030w 401 476 | 401o030x 401 477 | 401o030y 401 478 | 401o030z 401 479 | 401o0310 401 480 | 401o0312 401 481 | 403o030s 403 482 | 403o030t 403 483 | 403o030u 403 484 | 403o030v 403 485 | 403o030w 403 486 | 403o030x 403 487 | 403o030y 403 488 | 403o030z 403 489 | 403o0310 403 490 | 404o030u 404 491 | 404o030v 404 492 | 404o030w 404 493 | 404o030x 404 494 | 404o030y 404 495 | 404o030z 404 496 | 404o0310 404 497 | 404o0311 404 498 | 404o0312 404 499 | 404o0313 404 500 | 405o030v 405 501 | 405o030w 405 502 | 405o030x 405 503 | 405o030y 405 504 | 405o030z 405 505 | 405o0310 405 506 | 405o0311 405 507 | 405o0312 405 508 | 405o0313 405 509 | 405o0314 405 510 | 406o030u 406 511 | 406o030v 406 512 | 406o030w 406 513 | 406o030x 406 514 | 406o030y 406 515 | 406o030z 406 516 | 406o0310 406 517 | 406o0311 406 518 | 406o0312 406 519 | 407o030r 407 520 | 407o030s 407 521 | 407o030t 407 522 | 407o030u 407 523 | 407o030v 407 524 | 407o030w 407 525 | 407o030x 407 526 | 407o030y 407 527 | 407o030z 407 528 | 407o0310 407 529 | 408o030t 408 530 | 408o030u 408 531 | 408o030v 408 532 | 408o030w 408 533 | 408o030x 408 534 | 408o030y 408 535 | 408o030z 408 536 | 408o0310 408 537 | 408o0311 408 538 | 408o0312 408 539 | 409o030t 409 540 | 409o030u 409 541 | 409o030v 409 542 | 409o030w 409 543 | 409o030x 409 544 | 409o030y 409 545 | 409o030z 409 546 | 409o0310 409 547 | 409o0311 409 548 | 409o0312 409 549 | 40ao030v 40a 550 | 40ao030w 40a 551 | 40ao030x 40a 552 | 40ao030y 40a 553 | 40ao030z 40a 554 | 40ao0310 40a 555 | 40ao0311 40a 556 | 40ao0312 40a 557 | 40ao0313 40a 558 | 40ao0314 40a 559 | 40bo030r 40b 560 | 40bo030s 40b 561 | 40bo030t 40b 562 | 40bo030u 40b 563 | 40bo030v 40b 564 | 40bo030w 40b 565 | 40bo030x 40b 566 | 40bo030y 40b 567 | 40bo0310 40b 568 | 40co030w 40c 569 | 40co030x 40c 570 | 40co030y 40c 571 | 40co030z 40c 572 | 40co0310 40c 573 | 40co0311 40c 574 | 40co0312 40c 575 | 40co0313 40c 576 | 40co0314 40c 577 | 40co0315 40c 578 | 40do030t 40d 579 | 40do030u 40d 580 | 40do030v 40d 581 | 40do030w 40d 582 | 40do030x 40d 583 | 40do030y 40d 584 | 40do030z 40d 585 | 40do0310 40d 586 | 40do0311 40d 587 | 40do0312 40d 588 | 40eo030s 40e 589 | 40eo030t 40e 590 | 40eo030u 40e 591 | 40eo030v 40e 592 | 40eo030w 40e 593 | 40eo030x 40e 594 | 40eo030y 40e 595 | 40eo030z 40e 596 | 40eo0310 40e 597 | 40eo0311 40e 598 | 40fo030w 40f 599 | 40fo030x 40f 600 | 40fo030y 40f 601 | 40fo030z 40f 602 | 40fo0310 40f 603 | 40fo0311 40f 604 | 40fo0312 40f 605 | 40fo0313 40f 606 | 40fo0314 40f 607 | 40fo0315 40f 608 | 40go030u 40g 609 | 40go030v 40g 610 | 40go030w 40g 611 | 40go030x 40g 612 | 40go030y 40g 613 | 40go030z 40g 614 | 40go0310 40g 615 | 40go0311 40g 616 | 40go0312 40g 617 | 40go0313 40g 618 | 40ho030u 40h 619 | 40ho030v 40h 620 | 40ho030w 40h 621 | 40ho030x 40h 622 | 40ho030y 40h 623 | 40ho030z 40h 624 | 40ho0310 40h 625 | 40ho0311 40h 626 | 40ho0312 40h 627 | 40ho0313 40h 628 | 40io030u 40i 629 | 40io030v 40i 630 | 40io030w 40i 631 | 40io030x 40i 632 | 40io030y 40i 633 | 40io030z 40i 634 | 40io0310 40i 635 | 40io0311 40i 636 | 40io0312 40i 637 | 40io0313 40i 638 | 40jo030v 40j 639 | 40jo030w 40j 640 | 40jo030x 40j 641 | 40jo030y 40j 642 | 40jo030z 40j 643 | 40jo0310 40j 644 | 40jo0311 40j 645 | 40jo0312 40j 646 | 40jo0313 40j 647 | 40jo0314 40j 648 | 40ko030t 40k 649 | 40ko030u 40k 650 | 40ko030v 40k 651 | 40ko030w 40k 652 | 40ko030x 40k 653 | 40ko030y 40k 654 | 40ko030z 40k 655 | 40ko0310 40k 656 | 40ko0311 40k 657 | 40ko0312 40k 658 | 40lo030w 40l 659 | 40lo030x 40l 660 | 40lo030y 40l 661 | 40lo030z 40l 662 | 40lo0310 40l 663 | 40lo0311 40l 664 | 40lo0312 40l 665 | 40lo0313 40l 666 | 40lo0314 40l 667 | 40lo0315 40l 668 | 40mo030t 40m 669 | 40mo030u 40m 670 | 40mo030v 40m 671 | 40mo030w 40m 672 | 40mo030x 40m 673 | 40mo030y 40m 674 | 40mo030z 40m 675 | 40mo0310 40m 676 | 40mo0311 40m 677 | 40mo0312 40m 678 | 40no030t 40n 679 | 40no030u 40n 680 | 40no030v 40n 681 | 40no030w 40n 682 | 40no030x 40n 683 | 40no030y 40n 684 | 40no030z 40n 685 | 40no0310 40n 686 | 40no0311 40n 687 | 40no0312 40n 688 | 40po030u 40p 689 | 40po030v 40p 690 | 40po030w 40p 691 | 40po030x 40p 692 | 40po030y 40p 693 | 40po030z 40p 694 | 40po0310 40p 695 | 40po0311 40p 696 | 40po0312 40p 697 | 40po0313 40p 698 | 460c040n 460 699 | 460c040o 460 700 | 460c040p 460 701 | 460c040q 460 702 | 460c040r 460 703 | 460c040s 460 704 | 460c040t 460 705 | 460c040u 460 706 | 460c040v 460 707 | 460c040w 460 708 | 460c040x 460 709 | 460c040y 460 710 | 460c040z 460 711 | 460c0410 460 712 | 460c0411 460 713 | 461c040i 461 714 | 461c040j 461 715 | 461c040k 461 716 | 461c040l 461 717 | 461c040m 461 718 | 461c040n 461 719 | 461c040o 461 720 | 461c040p 461 721 | 461c040q 461 722 | 461c040r 461 723 | 461c040s 461 724 | 461c040t 461 725 | 461c040u 461 726 | 461c040v 461 727 | 461c040w 461 728 | 462c040j 462 729 | 462c040k 462 730 | 462c040l 462 731 | 462c040m 462 732 | 462c040n 462 733 | 462c040o 462 734 | 462c040p 462 735 | 462c040q 462 736 | 462c040r 462 737 | 462c040s 462 738 | 462c040t 462 739 | 462c040u 462 740 | 462c040v 462 741 | 462c040w 462 742 | 462c040x 462 743 | 463c040l 463 744 | 463c040m 463 745 | 463c040n 463 746 | 463c040o 463 747 | 463c040p 463 748 | 463c040q 463 749 | 463c040r 463 750 | 463c040s 463 751 | 463c040t 463 752 | 463c040u 463 753 | 463c040v 463 754 | 463c040w 463 755 | 463c040x 463 756 | 463c040y 463 757 | 463c040z 463 758 | 464c040j 464 759 | 464c040k 464 760 | 464c040l 464 761 | 464c040m 464 762 | 464c040n 464 763 | 464c040o 464 764 | 464c040p 464 765 | 464c040q 464 766 | 464c040r 464 767 | 464c040s 464 768 | 464c040t 464 769 | 464c040u 464 770 | 464c040v 464 771 | 464c040w 464 772 | 464c040x 464 773 | 465c040l 465 774 | 465c040m 465 775 | 465c040n 465 776 | 465c040o 465 777 | 465c040p 465 778 | 465c040q 465 779 | 465c040r 465 780 | 465c040s 465 781 | 465c040t 465 782 | 465c040u 465 783 | 465c040v 465 784 | 465c040w 465 785 | 465c040x 465 786 | 465c040y 465 787 | 465c040z 465 788 | 466c040l 466 789 | 466c040m 466 790 | 466c040n 466 791 | 466c040o 466 792 | 466c040p 466 793 | 466c040q 466 794 | 466c040r 466 795 | 466c040s 466 796 | 466c040t 466 797 | 466c040u 466 798 | 466c040v 466 799 | 466c040w 466 800 | 466c040x 466 801 | 466c040y 466 802 | 466c040z 466 803 | 467c040h 467 804 | 467c040i 467 805 | 467c040j 467 806 | 467c040k 467 807 | 467c040l 467 808 | 467c040m 467 809 | 467c040n 467 810 | 467c040o 467 811 | 467c040p 467 812 | 467c040q 467 813 | 467c040r 467 814 | 467c040s 467 815 | 467c040t 467 816 | 467c040u 467 817 | 467c040v 467 818 | 468c040n 468 819 | 468c040o 468 820 | 468c040p 468 821 | 468c040q 468 822 | 468c040r 468 823 | 468c040s 468 824 | 468c040t 468 825 | 468c040u 468 826 | 468c040v 468 827 | 468c040w 468 828 | 468c040x 468 829 | 468c040y 468 830 | 468c040z 468 831 | 468c0410 468 832 | 468c0411 468 833 | 469c040k 469 834 | 469c040l 469 835 | 469c040m 469 836 | 469c040n 469 837 | 469c040o 469 838 | 469c040p 469 839 | 469c040q 469 840 | 469c040r 469 841 | 469c040s 469 842 | 469c040t 469 843 | 469c040u 469 844 | 469c040v 469 845 | 469c040w 469 846 | 469c040x 469 847 | 469c040y 469 848 | 46ac040k 46a 849 | 46ac040l 46a 850 | 46ac040m 46a 851 | 46ac040n 46a 852 | 46ac040o 46a 853 | 46ac040p 46a 854 | 46ac040q 46a 855 | 46ac040r 46a 856 | 46ac040s 46a 857 | 46ac040t 46a 858 | 46ac040u 46a 859 | 46ac040v 46a 860 | 46ac040w 46a 861 | 46ac040x 46a 862 | 46ac040y 46a 863 | 46bc040n 46b 864 | 46bc040o 46b 865 | 46bc040p 46b 866 | 46bc040q 46b 867 | 46bc040r 46b 868 | 46bc040s 46b 869 | 46bc040t 46b 870 | 46bc040u 46b 871 | 46bc040v 46b 872 | 46bc040w 46b 873 | 46bc040x 46b 874 | 46bc040y 46b 875 | 46bc040z 46b 876 | 46bc0410 46b 877 | 46bc0411 46b 878 | 46cc040j 46c 879 | 46cc040k 46c 880 | 46cc040l 46c 881 | 46cc040m 46c 882 | 46cc040n 46c 883 | 46cc040o 46c 884 | 46cc040p 46c 885 | 46cc040q 46c 886 | 46cc040r 46c 887 | 46cc040s 46c 888 | 46cc040t 46c 889 | 46cc040u 46c 890 | 46cc040v 46c 891 | 46cc040w 46c 892 | 46cc040x 46c 893 | 46dc040l 46d 894 | 46dc040m 46d 895 | 46dc040n 46d 896 | 46dc040o 46d 897 | 46dc040p 46d 898 | 46dc040q 46d 899 | 46dc040r 46d 900 | 46dc040s 46d 901 | 46dc040t 46d 902 | 46dc040u 46d 903 | 46dc040v 46d 904 | 46dc040w 46d 905 | 46dc040x 46d 906 | 46dc040y 46d 907 | 46dc040z 46d 908 | 46ec040h 46e 909 | 46ec040i 46e 910 | 46ec040j 46e 911 | 46ec040k 46e 912 | 46ec040l 46e 913 | 46ec040m 46e 914 | 46ec040n 46e 915 | 46ec040o 46e 916 | 46ec040p 46e 917 | 46ec040q 46e 918 | 46ec040r 46e 919 | 46ec040s 46e 920 | 46ec040t 46e 921 | 46ec040u 46e 922 | 46ec040v 46e 923 | 46fc040l 46f 924 | 46fc040m 46f 925 | 46fc040n 46f 926 | 46fc040o 46f 927 | 46fc040p 46f 928 | 46fc040q 46f 929 | 46fc040r 46f 930 | 46fc040s 46f 931 | 46fc040t 46f 932 | 46fc040u 46f 933 | 46fc040v 46f 934 | 46fc040w 46f 935 | 46fc040x 46f 936 | 46fc040y 46f 937 | 46fc040z 46f 938 | 46gc040m 46g 939 | 46gc040n 46g 940 | 46gc040o 46g 941 | 46gc040p 46g 942 | 46gc040q 46g 943 | 46gc040r 46g 944 | 46gc040s 46g 945 | 46gc040t 46g 946 | 46gc040u 46g 947 | 46gc040v 46g 948 | 46gc040w 46g 949 | 46gc040x 46g 950 | 46gc040y 46g 951 | 46gc040z 46g 952 | 46gc0410 46g 953 | 46hc040l 46h 954 | 46hc040m 46h 955 | 46hc040n 46h 956 | 46hc040o 46h 957 | 46hc040p 46h 958 | 46hc040q 46h 959 | 46hc040r 46h 960 | 46hc040s 46h 961 | 46hc040t 46h 962 | 46hc040u 46h 963 | 46hc040v 46h 964 | 46hc040w 46h 965 | 46hc040x 46h 966 | 46hc040y 46h 967 | 46hc040z 46h 968 | 46ic040l 46i 969 | 46ic040m 46i 970 | 46ic040n 46i 971 | 46ic040o 46i 972 | 46ic040p 46i 973 | 46ic040q 46i 974 | 46ic040r 46i 975 | 46ic040s 46i 976 | 46ic040t 46i 977 | 46ic040u 46i 978 | 46ic040v 46i 979 | 46ic040w 46i 980 | 46ic040x 46i 981 | 46ic040y 46i 982 | 46ic040z 46i 983 | 46jc040k 46j 984 | 46jc040l 46j 985 | 46jc040m 46j 986 | 46jc040n 46j 987 | 46jc040o 46j 988 | 46jc040p 46j 989 | 46jc040q 46j 990 | 46jc040r 46j 991 | 46jc040s 46j 992 | 46jc040t 46j 993 | 46jc040u 46j 994 | 46jc040v 46j 995 | 46jc040w 46j 996 | 46jc040x 46j 997 | 46jc040y 46j 998 | 46kc040l 46k 999 | 46kc040m 46k 1000 | 46kc040n 46k 1001 | 46kc040o 46k 1002 | 46kc040p 46k 1003 | 46kc040q 46k 1004 | 46kc040r 46k 1005 | 46kc040s 46k 1006 | 46kc040t 46k 1007 | 46kc040u 46k 1008 | 46kc040v 46k 1009 | 46kc040w 46k 1010 | 46kc040x 46k 1011 | 46kc040y 46k 1012 | 46kc040z 46k 1013 | 46lc040k 46l 1014 | 46lc040l 46l 1015 | 46lc040m 46l 1016 | 46lc040n 46l 1017 | 46lc040o 46l 1018 | 46lc040p 46l 1019 | 46lc040q 46l 1020 | 46lc040r 46l 1021 | 46lc040s 46l 1022 | 46lc040t 46l 1023 | 46lc040u 46l 1024 | 46lc040v 46l 1025 | 46lc040w 46l 1026 | 46lc040x 46l 1027 | 46lc040y 46l 1028 | 46mc040i 46m 1029 | 46mc040j 46m 1030 | 46mc040k 46m 1031 | 46mc040l 46m 1032 | 46mc040m 46m 1033 | 46mc040n 46m 1034 | 46mc040o 46m 1035 | 46mc040p 46m 1036 | 46mc040q 46m 1037 | 46mc040r 46m 1038 | 46mc040s 46m 1039 | 46mc040t 46m 1040 | 46mc040u 46m 1041 | 46mc040v 46m 1042 | 46mc040w 46m 1043 | 46nc040j 46n 1044 | 46nc040k 46n 1045 | 46nc040l 46n 1046 | 46nc040m 46n 1047 | 46nc040n 46n 1048 | 46nc040o 46n 1049 | 46nc040p 46n 1050 | 46nc040q 46n 1051 | 46nc040r 46n 1052 | 46nc040s 46n 1053 | 46nc040t 46n 1054 | 46nc040u 46n 1055 | 46nc040v 46n 1056 | 46nc040w 46n 1057 | 46nc040x 46n 1058 | 46oc040k 46o 1059 | 46oc040l 46o 1060 | 46oc040m 46o 1061 | 46oc040n 46o 1062 | 46oc040o 46o 1063 | 46oc040p 46o 1064 | 46oc040q 46o 1065 | 46oc040r 46o 1066 | 46oc040s 46o 1067 | 46oc040t 46o 1068 | 46oc040u 46o 1069 | 46oc040v 46o 1070 | 46oc040w 46o 1071 | 46oc040x 46o 1072 | 46oc040y 46o 1073 | 46pc040l 46p 1074 | 46pc040m 46p 1075 | 46pc040n 46p 1076 | 46pc040o 46p 1077 | 46pc040p 46p 1078 | 46pc040q 46p 1079 | 46pc040r 46p 1080 | 46pc040s 46p 1081 | 46pc040t 46p 1082 | 46pc040u 46p 1083 | 46pc040v 46p 1084 | 46pc040w 46p 1085 | 46pc040x 46p 1086 | 46pc040y 46p 1087 | 46pc040z 46p 1088 | 46qc040m 46q 1089 | 46qc040n 46q 1090 | 46qc040o 46q 1091 | 46qc040p 46q 1092 | 46qc040q 46q 1093 | 46qc040r 46q 1094 | 46qc040s 46q 1095 | 46qc040t 46q 1096 | 46qc040u 46q 1097 | 46qc040v 46q 1098 | 46qc040w 46q 1099 | 46qc040x 46q 1100 | 46qc040y 46q 1101 | 46qc040z 46q 1102 | 46qc0410 46q 1103 | 46rc040j 46r 1104 | 46rc040k 46r 1105 | 46rc040l 46r 1106 | 46rc040m 46r 1107 | 46rc040n 46r 1108 | 46rc040o 46r 1109 | 46rc040p 46r 1110 | 46rc040q 46r 1111 | 46rc040r 46r 1112 | 46rc040s 46r 1113 | 46rc040t 46r 1114 | 46rc040u 46r 1115 | 46rc040v 46r 1116 | 46rc040w 46r 1117 | 46rc040x 46r 1118 | 46sc040k 46s 1119 | 46sc040l 46s 1120 | 46sc040m 46s 1121 | 46sc040n 46s 1122 | 46sc040o 46s 1123 | 46sc040p 46s 1124 | 46sc040q 46s 1125 | 46sc040r 46s 1126 | 46sc040s 46s 1127 | 46sc040t 46s 1128 | 46sc040u 46s 1129 | 46sc040v 46s 1130 | 46sc040w 46s 1131 | 46sc040x 46s 1132 | 46sc040y 46s 1133 | 46tc040k 46t 1134 | 46tc040l 46t 1135 | 46tc040m 46t 1136 | 46tc040n 46t 1137 | 46tc040o 46t 1138 | 46tc040p 46t 1139 | 46tc040q 46t 1140 | 46tc040r 46t 1141 | 46tc040s 46t 1142 | 46tc040t 46t 1143 | 46tc040u 46t 1144 | 46tc040v 46t 1145 | 46tc040w 46t 1146 | 46tc040x 46t 1147 | 46tc040y 46t 1148 | 46uc040k 46u 1149 | 46uc040l 46u 1150 | 46uc040m 46u 1151 | 46uc040n 46u 1152 | 46uc040o 46u 1153 | 46uc040p 46u 1154 | 46uc040q 46u 1155 | 46uc040r 46u 1156 | 46uc040s 46u 1157 | 46uc040t 46u 1158 | 46uc040u 46u 1159 | 46uc040v 46u 1160 | 46uc040w 46u 1161 | 46uc040x 46u 1162 | 46uc040y 46u 1163 | 46vc040l 46v 1164 | 46vc040m 46v 1165 | 46vc040n 46v 1166 | 46vc040o 46v 1167 | 46vc040p 46v 1168 | 46vc040q 46v 1169 | 46vc040r 46v 1170 | 46vc040s 46v 1171 | 46vc040t 46v 1172 | 46vc040u 46v 1173 | 46vc040v 46v 1174 | 46vc040w 46v 1175 | 46vc040x 46v 1176 | 46vc040y 46v 1177 | 46vc040z 46v 1178 | 46wc040k 46w 1179 | 46wc040l 46w 1180 | 46wc040m 46w 1181 | 46wc040n 46w 1182 | 46wc040o 46w 1183 | 46wc040p 46w 1184 | 46wc040q 46w 1185 | 46wc040r 46w 1186 | 46wc040s 46w 1187 | 46wc040t 46w 1188 | 46wc040u 46w 1189 | 46wc040v 46w 1190 | 46wc040w 46w 1191 | 46wc040x 46w 1192 | 46wc040y 46w 1193 | 46xc040l 46x 1194 | 46xc040m 46x 1195 | 46xc040n 46x 1196 | 46xc040o 46x 1197 | 46xc040p 46x 1198 | 46xc040q 46x 1199 | 46xc040r 46x 1200 | 46xc040s 46x 1201 | 46xc040t 46x 1202 | 46xc040u 46x 1203 | 46xc040v 46x 1204 | 46xc040w 46x 1205 | 46xc040x 46x 1206 | 46xc040y 46x 1207 | 46xc040z 46x 1208 | 46yc040k 46y 1209 | 46yc040l 46y 1210 | 46yc040m 46y 1211 | 46yc040n 46y 1212 | 46yc040o 46y 1213 | 46yc040p 46y 1214 | 46yc040q 46y 1215 | 46yc040r 46y 1216 | 46yc040s 46y 1217 | 46yc040t 46y 1218 | 46yc040u 46y 1219 | 46yc040v 46y 1220 | 46yc040w 46y 1221 | 46yc040x 46y 1222 | 46yc040y 46y 1223 | 46zc040i 46z 1224 | 46zc040j 46z 1225 | 46zc040k 46z 1226 | 46zc040l 46z 1227 | 46zc040m 46z 1228 | 46zc040n 46z 1229 | 46zc040o 46z 1230 | 46zc040p 46z 1231 | 46zc040q 46z 1232 | 46zc040r 46z 1233 | 46zc040s 46z 1234 | 46zc040t 46z 1235 | 46zc040u 46z 1236 | 46zc040v 46z 1237 | 46zc040w 46z 1238 | 470c040j 470 1239 | 470c040k 470 1240 | 470c040l 470 1241 | 470c040m 470 1242 | 470c040n 470 1243 | 470c040o 470 1244 | 470c040p 470 1245 | 470c040q 470 1246 | 470c040r 470 1247 | 470c040s 470 1248 | 470c040t 470 1249 | 470c040u 470 1250 | 470c040v 470 1251 | 470c040w 470 1252 | 470c040x 470 1253 | 471c040l 471 1254 | 471c040m 471 1255 | 471c040n 471 1256 | 471c040o 471 1257 | 471c040p 471 1258 | 471c040q 471 1259 | 471c040r 471 1260 | 471c040s 471 1261 | 471c040t 471 1262 | 471c040u 471 1263 | 471c040v 471 1264 | 471c040w 471 1265 | 471c040x 471 1266 | 471c040y 471 1267 | 471c040z 471 1268 | 472c040j 472 1269 | 472c040k 472 1270 | 472c040l 472 1271 | 472c040m 472 1272 | 472c040n 472 1273 | 472c040o 472 1274 | 472c040p 472 1275 | 472c040q 472 1276 | 472c040r 472 1277 | 472c040s 472 1278 | 472c040t 472 1279 | 472c040u 472 1280 | 472c040v 472 1281 | 472c040w 472 1282 | 472c040x 472 1283 | 473c040l 473 1284 | 473c040m 473 1285 | 473c040n 473 1286 | 473c040o 473 1287 | 473c040p 473 1288 | 473c040q 473 1289 | 473c040r 473 1290 | 473c040s 473 1291 | 473c040t 473 1292 | 473c040u 473 1293 | 473c040v 473 1294 | 473c040w 473 1295 | 473c040x 473 1296 | 473c040y 473 1297 | 473c040z 473 1298 | 474c040l 474 1299 | 474c040m 474 1300 | 474c040n 474 1301 | 474c040o 474 1302 | 474c040p 474 1303 | 474c040q 474 1304 | 474c040r 474 1305 | 474c040s 474 1306 | 474c040t 474 1307 | 474c040u 474 1308 | 474c040v 474 1309 | 474c040w 474 1310 | 474c040x 474 1311 | 474c040y 474 1312 | 474c040z 474 1313 | 475c040h 475 1314 | 475c040i 475 1315 | 475c040j 475 1316 | 475c040k 475 1317 | 475c040l 475 1318 | 475c040m 475 1319 | 475c040n 475 1320 | 475c040o 475 1321 | 475c040p 475 1322 | 475c040q 475 1323 | 475c040r 475 1324 | 475c040s 475 1325 | 475c040t 475 1326 | 475c040u 475 1327 | 475c040v 475 1328 | 476c040n 476 1329 | 476c040o 476 1330 | 476c040p 476 1331 | 476c040q 476 1332 | 476c040r 476 1333 | 476c040s 476 1334 | 476c040t 476 1335 | 476c040u 476 1336 | 476c040v 476 1337 | 476c040w 476 1338 | 476c040x 476 1339 | 476c040y 476 1340 | 476c040z 476 1341 | 476c0410 476 1342 | 476c0411 476 1343 | 477c040k 477 1344 | 477c040l 477 1345 | 477c040m 477 1346 | 477c040n 477 1347 | 477c040o 477 1348 | 477c040p 477 1349 | 477c040q 477 1350 | 477c040r 477 1351 | 477c040s 477 1352 | 477c040t 477 1353 | 477c040u 477 1354 | 477c040v 477 1355 | 477c040w 477 1356 | 477c040x 477 1357 | 477c040y 477 1358 | 478c040j 478 1359 | 478c040k 478 1360 | 478c040l 478 1361 | 478c040m 478 1362 | 478c040n 478 1363 | 478c040o 478 1364 | 478c040p 478 1365 | 478c040q 478 1366 | 478c040r 478 1367 | 478c040s 478 1368 | 478c040t 478 1369 | 478c040u 478 1370 | 478c040v 478 1371 | 478c040w 478 1372 | 478c040x 478 1373 | 479c040i 479 1374 | 479c040j 479 1375 | 479c040k 479 1376 | 479c040l 479 1377 | 479c040m 479 1378 | 479c040n 479 1379 | 479c040o 479 1380 | 479c040p 479 1381 | 479c040q 479 1382 | 479c040r 479 1383 | 479c040s 479 1384 | 479c040t 479 1385 | 479c040u 479 1386 | 479c040v 479 1387 | 479c040w 479 1388 | 47ac040j 47a 1389 | 47ac040k 47a 1390 | 47ac040l 47a 1391 | 47ac040m 47a 1392 | 47ac040n 47a 1393 | 47ac040o 47a 1394 | 47ac040p 47a 1395 | 47ac040q 47a 1396 | 47ac040r 47a 1397 | 47ac040s 47a 1398 | 47ac040t 47a 1399 | 47ac040u 47a 1400 | 47ac040v 47a 1401 | 47ac040w 47a 1402 | 47ac040x 47a 1403 | 47bc040l 47b 1404 | 47bc040m 47b 1405 | 47bc040n 47b 1406 | 47bc040o 47b 1407 | 47bc040p 47b 1408 | 47bc040q 47b 1409 | 47bc040r 47b 1410 | 47bc040s 47b 1411 | 47bc040t 47b 1412 | 47bc040u 47b 1413 | 47bc040v 47b 1414 | 47bc040w 47b 1415 | 47bc040x 47b 1416 | 47bc040y 47b 1417 | 47bc040z 47b 1418 | 47cc040h 47c 1419 | 47cc040i 47c 1420 | 47cc040j 47c 1421 | 47cc040k 47c 1422 | 47cc040l 47c 1423 | 47cc040m 47c 1424 | 47cc040n 47c 1425 | 47cc040o 47c 1426 | 47cc040p 47c 1427 | 47cc040q 47c 1428 | 47cc040r 47c 1429 | 47cc040s 47c 1430 | 47cc040t 47c 1431 | 47cc040u 47c 1432 | 47cc040v 47c 1433 | 47dc040l 47d 1434 | 47dc040m 47d 1435 | 47dc040n 47d 1436 | 47dc040o 47d 1437 | 47dc040p 47d 1438 | 47dc040q 47d 1439 | 47dc040r 47d 1440 | 47dc040s 47d 1441 | 47dc040t 47d 1442 | 47dc040u 47d 1443 | 47dc040v 47d 1444 | 47dc040w 47d 1445 | 47dc040x 47d 1446 | 47dc040y 47d 1447 | 47dc040z 47d 1448 | 47ec040m 47e 1449 | 47ec040n 47e 1450 | 47ec040o 47e 1451 | 47ec040p 47e 1452 | 47ec040q 47e 1453 | 47ec040r 47e 1454 | 47ec040s 47e 1455 | 47ec040t 47e 1456 | 47ec040u 47e 1457 | 47ec040v 47e 1458 | 47ec040w 47e 1459 | 47ec040x 47e 1460 | 47ec040y 47e 1461 | 47ec040z 47e 1462 | 47ec0410 47e 1463 | 47fc040l 47f 1464 | 47fc040m 47f 1465 | 47fc040n 47f 1466 | 47fc040o 47f 1467 | 47fc040p 47f 1468 | 47fc040q 47f 1469 | 47fc040r 47f 1470 | 47fc040s 47f 1471 | 47fc040t 47f 1472 | 47fc040u 47f 1473 | 47fc040v 47f 1474 | 47fc040w 47f 1475 | 47fc040x 47f 1476 | 47fc040y 47f 1477 | 47fc040z 47f 1478 | 47gc040l 47g 1479 | 47gc040m 47g 1480 | 47gc040n 47g 1481 | 47gc040o 47g 1482 | 47gc040p 47g 1483 | 47gc040q 47g 1484 | 47gc040r 47g 1485 | 47gc040s 47g 1486 | 47gc040t 47g 1487 | 47gc040u 47g 1488 | 47gc040v 47g 1489 | 47gc040w 47g 1490 | 47gc040x 47g 1491 | 47gc040y 47g 1492 | 47gc040z 47g 1493 | 47hc040k 47h 1494 | 47hc040l 47h 1495 | 47hc040m 47h 1496 | 47hc040n 47h 1497 | 47hc040o 47h 1498 | 47hc040p 47h 1499 | 47hc040q 47h 1500 | 47hc040r 47h 1501 | 47hc040s 47h 1502 | 47hc040t 47h 1503 | 47hc040u 47h 1504 | 47hc040v 47h 1505 | 47hc040w 47h 1506 | 47hc040x 47h 1507 | 47hc040y 47h 1508 | 47ic040l 47i 1509 | 47ic040m 47i 1510 | 47ic040n 47i 1511 | 47ic040o 47i 1512 | 47ic040p 47i 1513 | 47ic040q 47i 1514 | 47ic040r 47i 1515 | 47ic040s 47i 1516 | 47ic040t 47i 1517 | 47ic040u 47i 1518 | 47ic040v 47i 1519 | 47ic040w 47i 1520 | 47ic040x 47i 1521 | 47ic040y 47i 1522 | 47ic040z 47i 1523 | 47jc040k 47j 1524 | 47jc040l 47j 1525 | 47jc040m 47j 1526 | 47jc040n 47j 1527 | 47jc040o 47j 1528 | 47jc040p 47j 1529 | 47jc040q 47j 1530 | 47jc040r 47j 1531 | 47jc040s 47j 1532 | 47jc040t 47j 1533 | 47jc040u 47j 1534 | 47jc040v 47j 1535 | 47jc040w 47j 1536 | 47jc040x 47j 1537 | 47jc040y 47j 1538 | 47kc040i 47k 1539 | 47kc040j 47k 1540 | 47kc040k 47k 1541 | 47kc040l 47k 1542 | 47kc040m 47k 1543 | 47kc040n 47k 1544 | 47kc040o 47k 1545 | 47kc040p 47k 1546 | 47kc040q 47k 1547 | 47kc040r 47k 1548 | 47kc040s 47k 1549 | 47kc040t 47k 1550 | 47kc040u 47k 1551 | 47kc040v 47k 1552 | 47kc040w 47k 1553 | 47lc040j 47l 1554 | 47lc040k 47l 1555 | 47lc040l 47l 1556 | 47lc040m 47l 1557 | 47lc040n 47l 1558 | 47lc040o 47l 1559 | 47lc040p 47l 1560 | 47lc040q 47l 1561 | 47lc040r 47l 1562 | 47lc040s 47l 1563 | 47lc040t 47l 1564 | 47lc040u 47l 1565 | 47lc040v 47l 1566 | 47lc040w 47l 1567 | 47lc040x 47l 1568 | 47mc040k 47m 1569 | 47mc040l 47m 1570 | 47mc040m 47m 1571 | 47mc040n 47m 1572 | 47mc040o 47m 1573 | 47mc040p 47m 1574 | 47mc040q 47m 1575 | 47mc040r 47m 1576 | 47mc040s 47m 1577 | 47mc040t 47m 1578 | 47mc040u 47m 1579 | 47mc040v 47m 1580 | 47mc040w 47m 1581 | 47mc040x 47m 1582 | 47mc040y 47m 1583 | 47nc040l 47n 1584 | 47nc040m 47n 1585 | 47nc040n 47n 1586 | 47nc040o 47n 1587 | 47nc040p 47n 1588 | 47nc040q 47n 1589 | 47nc040r 47n 1590 | 47nc040s 47n 1591 | 47nc040t 47n 1592 | 47nc040u 47n 1593 | 47nc040v 47n 1594 | 47nc040w 47n 1595 | 47nc040x 47n 1596 | 47nc040y 47n 1597 | 47nc040z 47n 1598 | 47oc040m 47o 1599 | 47oc040n 47o 1600 | 47oc040o 47o 1601 | 47oc040p 47o 1602 | 47oc040q 47o 1603 | 47oc040r 47o 1604 | 47oc040s 47o 1605 | 47oc040t 47o 1606 | 47oc040u 47o 1607 | 47oc040v 47o 1608 | 47oc040w 47o 1609 | 47oc040x 47o 1610 | 47oc040y 47o 1611 | 47oc040z 47o 1612 | 47oc0410 47o 1613 | 47pc040j 47p 1614 | 47pc040k 47p 1615 | 47pc040l 47p 1616 | 47pc040m 47p 1617 | 47pc040n 47p 1618 | 47pc040o 47p 1619 | 47pc040p 47p 1620 | 47pc040q 47p 1621 | 47pc040r 47p 1622 | 47pc040s 47p 1623 | 47pc040t 47p 1624 | 47pc040u 47p 1625 | 47pc040v 47p 1626 | 47pc040w 47p 1627 | 47pc040x 47p 1628 | 47qc040k 47q 1629 | 47qc040l 47q 1630 | 47qc040m 47q 1631 | 47qc040n 47q 1632 | 47qc040o 47q 1633 | 47qc040p 47q 1634 | 47qc040q 47q 1635 | 47qc040r 47q 1636 | 47qc040s 47q 1637 | 47qc040t 47q 1638 | 47qc040u 47q 1639 | 47qc040v 47q 1640 | 47qc040w 47q 1641 | 47qc040x 47q 1642 | 47qc040y 47q 1643 | 47rc040k 47r 1644 | 47rc040l 47r 1645 | 47rc040m 47r 1646 | 47rc040n 47r 1647 | 47rc040o 47r 1648 | 47rc040p 47r 1649 | 47rc040q 47r 1650 | 47rc040r 47r 1651 | 47rc040s 47r 1652 | 47rc040t 47r 1653 | 47rc040u 47r 1654 | 47rc040v 47r 1655 | 47rc040w 47r 1656 | 47rc040x 47r 1657 | 47rc040y 47r 1658 | 47sc040k 47s 1659 | 47sc040l 47s 1660 | 47sc040m 47s 1661 | 47sc040n 47s 1662 | 47sc040o 47s 1663 | 47sc040p 47s 1664 | 47sc040q 47s 1665 | 47sc040r 47s 1666 | 47sc040s 47s 1667 | 47sc040t 47s 1668 | 47sc040u 47s 1669 | 47sc040v 47s 1670 | 47sc040w 47s 1671 | 47sc040x 47s 1672 | 47sc040y 47s 1673 | 47tc040l 47t 1674 | 47tc040m 47t 1675 | 47tc040n 47t 1676 | 47tc040o 47t 1677 | 47tc040p 47t 1678 | 47tc040q 47t 1679 | 47tc040r 47t 1680 | 47tc040s 47t 1681 | 47tc040t 47t 1682 | 47tc040u 47t 1683 | 47tc040v 47t 1684 | 47tc040w 47t 1685 | 47tc040x 47t 1686 | 47tc040y 47t 1687 | 47tc040z 47t 1688 | 47uc040k 47u 1689 | 47uc040l 47u 1690 | 47uc040m 47u 1691 | 47uc040n 47u 1692 | 47uc040o 47u 1693 | 47uc040p 47u 1694 | 47uc040q 47u 1695 | 47uc040r 47u 1696 | 47uc040s 47u 1697 | 47uc040t 47u 1698 | 47uc040u 47u 1699 | 47uc040v 47u 1700 | 47uc040w 47u 1701 | 47uc040x 47u 1702 | 47uc040y 47u 1703 | 47vc040l 47v 1704 | 47vc040m 47v 1705 | 47vc040n 47v 1706 | 47vc040o 47v 1707 | 47vc040p 47v 1708 | 47vc040q 47v 1709 | 47vc040r 47v 1710 | 47vc040s 47v 1711 | 47vc040t 47v 1712 | 47vc040u 47v 1713 | 47vc040v 47v 1714 | 47vc040w 47v 1715 | 47vc040x 47v 1716 | 47vc040y 47v 1717 | 47vc040z 47v 1718 | 47wc040k 47w 1719 | 47wc040l 47w 1720 | 47wc040m 47w 1721 | 47wc040n 47w 1722 | 47wc040o 47w 1723 | 47wc040p 47w 1724 | 47wc040q 47w 1725 | 47wc040r 47w 1726 | 47wc040s 47w 1727 | 47wc040t 47w 1728 | 47wc040u 47w 1729 | 47wc040v 47w 1730 | 47wc040w 47w 1731 | 47wc040x 47w 1732 | 47wc040y 47w 1733 | 47xc040i 47x 1734 | 47xc040j 47x 1735 | 47xc040k 47x 1736 | 47xc040l 47x 1737 | 47xc040m 47x 1738 | 47xc040n 47x 1739 | 47xc040o 47x 1740 | 47xc040p 47x 1741 | 47xc040q 47x 1742 | 47xc040r 47x 1743 | 47xc040s 47x 1744 | 47xc040t 47x 1745 | 47xc040u 47x 1746 | 47xc040v 47x 1747 | 47xc040w 47x 1748 | 47yc040k 47y 1749 | 47yc040l 47y 1750 | 47yc040m 47y 1751 | 47yc040n 47y 1752 | 47yc040o 47y 1753 | 47yc040p 47y 1754 | 47yc040q 47y 1755 | 47yc040r 47y 1756 | 47yc040s 47y 1757 | 47yc040t 47y 1758 | 47yc040u 47y 1759 | 47yc040v 47y 1760 | 47yc040w 47y 1761 | 47yc040x 47y 1762 | 47yc040y 47y 1763 | 47zc040l 47z 1764 | 47zc040m 47z 1765 | 47zc040n 47z 1766 | 47zc040o 47z 1767 | 47zc040p 47z 1768 | 47zc040q 47z 1769 | 47zc040r 47z 1770 | 47zc040s 47z 1771 | 47zc040t 47z 1772 | 47zc040u 47z 1773 | 47zc040v 47z 1774 | 47zc040w 47z 1775 | 47zc040x 47z 1776 | 47zc040y 47z 1777 | 47zc040z 47z 1778 | 480c040j 480 1779 | 480c040k 480 1780 | 480c040l 480 1781 | 480c040m 480 1782 | 480c040n 480 1783 | 480c040o 480 1784 | 480c040p 480 1785 | 480c040q 480 1786 | 480c040r 480 1787 | 480c040s 480 1788 | 480c040t 480 1789 | 480c040u 480 1790 | 480c040v 480 1791 | 480c040w 480 1792 | 480c040x 480 1793 | 481c040l 481 1794 | 481c040m 481 1795 | 481c040n 481 1796 | 481c040o 481 1797 | 481c040p 481 1798 | 481c040q 481 1799 | 481c040r 481 1800 | 481c040s 481 1801 | 481c040t 481 1802 | 481c040u 481 1803 | 481c040v 481 1804 | 481c040w 481 1805 | 481c040x 481 1806 | 481c040y 481 1807 | 481c040z 481 1808 | 482c040l 482 1809 | 482c040m 482 1810 | 482c040n 482 1811 | 482c040o 482 1812 | 482c040p 482 1813 | 482c040q 482 1814 | 482c040r 482 1815 | 482c040s 482 1816 | 482c040t 482 1817 | 482c040u 482 1818 | 482c040v 482 1819 | 482c040w 482 1820 | 482c040x 482 1821 | 482c040y 482 1822 | 482c040z 482 1823 | 483c040h 483 1824 | 483c040i 483 1825 | 483c040j 483 1826 | 483c040k 483 1827 | 483c040l 483 1828 | 483c040m 483 1829 | 483c040n 483 1830 | 483c040o 483 1831 | 483c040p 483 1832 | 483c040q 483 1833 | 483c040r 483 1834 | 483c040s 483 1835 | 483c040t 483 1836 | 483c040u 483 1837 | 483c040v 483 1838 | 484c040n 484 1839 | 484c040o 484 1840 | 484c040p 484 1841 | 484c040q 484 1842 | 484c040r 484 1843 | 484c040s 484 1844 | 484c040t 484 1845 | 484c040u 484 1846 | 484c040v 484 1847 | 484c040w 484 1848 | 484c040x 484 1849 | 484c040y 484 1850 | 484c040z 484 1851 | 484c0410 484 1852 | 484c0411 484 1853 | 485c040k 485 1854 | 485c040l 485 1855 | 485c040m 485 1856 | 485c040n 485 1857 | 485c040o 485 1858 | 485c040p 485 1859 | 485c040q 485 1860 | 485c040r 485 1861 | 485c040s 485 1862 | 485c040t 485 1863 | 485c040u 485 1864 | 485c040v 485 1865 | 485c040w 485 1866 | 485c040x 485 1867 | 485c040y 485 1868 | 486c040j 486 1869 | 486c040k 486 1870 | 486c040l 486 1871 | 486c040m 486 1872 | 486c040n 486 1873 | 486c040o 486 1874 | 486c040p 486 1875 | 486c040q 486 1876 | 486c040r 486 1877 | 486c040s 486 1878 | 486c040t 486 1879 | 486c040u 486 1880 | 486c040v 486 1881 | 486c040w 486 1882 | 486c040x 486 1883 | 487c040i 487 1884 | 487c040j 487 1885 | 487c040k 487 1886 | 487c040l 487 1887 | 487c040m 487 1888 | 487c040n 487 1889 | 487c040o 487 1890 | 487c040p 487 1891 | 487c040q 487 1892 | 487c040r 487 1893 | 487c040s 487 1894 | 487c040t 487 1895 | 487c040u 487 1896 | 487c040v 487 1897 | 487c040w 487 1898 | 488c040k 488 1899 | 488c040l 488 1900 | 488c040m 488 1901 | 488c040n 488 1902 | 488c040o 488 1903 | 488c040p 488 1904 | 488c040q 488 1905 | 488c040r 488 1906 | 488c040s 488 1907 | 488c040t 488 1908 | 488c040u 488 1909 | 488c040v 488 1910 | 488c040w 488 1911 | 488c040x 488 1912 | 488c040y 488 1913 | 489c040j 489 1914 | 489c040k 489 1915 | 489c040l 489 1916 | 489c040m 489 1917 | 489c040n 489 1918 | 489c040o 489 1919 | 489c040p 489 1920 | 489c040q 489 1921 | 489c040r 489 1922 | 489c040s 489 1923 | 489c040t 489 1924 | 489c040u 489 1925 | 489c040v 489 1926 | 489c040w 489 1927 | 489c040x 489 1928 | 48ac040h 48a 1929 | 48ac040i 48a 1930 | 48ac040j 48a 1931 | 48ac040k 48a 1932 | 48ac040l 48a 1933 | 48ac040m 48a 1934 | 48ac040n 48a 1935 | 48ac040o 48a 1936 | 48ac040p 48a 1937 | 48ac040q 48a 1938 | 48ac040r 48a 1939 | 48ac040s 48a 1940 | 48ac040t 48a 1941 | 48ac040u 48a 1942 | 48ac040v 48a 1943 | 48bc040l 48b 1944 | 48bc040m 48b 1945 | 48bc040n 48b 1946 | 48bc040o 48b 1947 | 48bc040p 48b 1948 | 48bc040q 48b 1949 | 48bc040r 48b 1950 | 48bc040s 48b 1951 | 48bc040t 48b 1952 | 48bc040u 48b 1953 | 48bc040v 48b 1954 | 48bc040w 48b 1955 | 48bc040x 48b 1956 | 48bc040y 48b 1957 | 48bc040z 48b 1958 | 48cc040m 48c 1959 | 48cc040n 48c 1960 | 48cc040o 48c 1961 | 48cc040p 48c 1962 | 48cc040q 48c 1963 | 48cc040r 48c 1964 | 48cc040s 48c 1965 | 48cc040t 48c 1966 | 48cc040u 48c 1967 | 48cc040v 48c 1968 | 48cc040w 48c 1969 | 48cc040x 48c 1970 | 48cc040y 48c 1971 | 48cc040z 48c 1972 | 48cc0410 48c 1973 | 48dc040l 48d 1974 | 48dc040m 48d 1975 | 48dc040n 48d 1976 | 48dc040o 48d 1977 | 48dc040p 48d 1978 | 48dc040q 48d 1979 | 48dc040r 48d 1980 | 48dc040s 48d 1981 | 48dc040t 48d 1982 | 48dc040u 48d 1983 | 48dc040v 48d 1984 | 48dc040w 48d 1985 | 48dc040x 48d 1986 | 48dc040y 48d 1987 | 48dc040z 48d 1988 | 48ec040l 48e 1989 | 48ec040m 48e 1990 | 48ec040n 48e 1991 | 48ec040o 48e 1992 | 48ec040p 48e 1993 | 48ec040q 48e 1994 | 48ec040r 48e 1995 | 48ec040s 48e 1996 | 48ec040t 48e 1997 | 48ec040u 48e 1998 | 48ec040v 48e 1999 | 48ec040w 48e 2000 | 48ec040x 48e 2001 | 48ec040y 48e 2002 | 48ec040z 48e 2003 | 48fc040k 48f 2004 | 48fc040l 48f 2005 | 48fc040m 48f 2006 | 48fc040n 48f 2007 | 48fc040o 48f 2008 | 48fc040p 48f 2009 | 48fc040q 48f 2010 | 48fc040r 48f 2011 | 48fc040s 48f 2012 | 48fc040t 48f 2013 | 48fc040u 48f 2014 | 48fc040v 48f 2015 | 48fc040w 48f 2016 | 48fc040x 48f 2017 | 48fc040y 48f 2018 | 48gc040l 48g 2019 | 48gc040m 48g 2020 | 48gc040n 48g 2021 | 48gc040o 48g 2022 | 48gc040p 48g 2023 | 48gc040q 48g 2024 | 48gc040r 48g 2025 | 48gc040s 48g 2026 | 48gc040t 48g 2027 | 48gc040u 48g 2028 | 48gc040v 48g 2029 | 48gc040w 48g 2030 | 48gc040x 48g 2031 | 48gc040y 48g 2032 | 48gc040z 48g 2033 | 48hc040k 48h 2034 | 48hc040l 48h 2035 | 48hc040m 48h 2036 | 48hc040n 48h 2037 | 48hc040o 48h 2038 | 48hc040p 48h 2039 | 48hc040q 48h 2040 | 48hc040r 48h 2041 | 48hc040s 48h 2042 | 48hc040t 48h 2043 | 48hc040u 48h 2044 | 48hc040v 48h 2045 | 48hc040w 48h 2046 | 48hc040x 48h 2047 | 48hc040y 48h 2048 | 48ic040i 48i 2049 | 48ic040j 48i 2050 | 48ic040k 48i 2051 | 48ic040l 48i 2052 | 48ic040m 48i 2053 | 48ic040n 48i 2054 | 48ic040o 48i 2055 | 48ic040p 48i 2056 | 48ic040q 48i 2057 | 48ic040r 48i 2058 | 48ic040s 48i 2059 | 48ic040t 48i 2060 | 48ic040u 48i 2061 | 48ic040v 48i 2062 | 48ic040w 48i 2063 | 48jc040j 48j 2064 | 48jc040k 48j 2065 | 48jc040l 48j 2066 | 48jc040m 48j 2067 | 48jc040n 48j 2068 | 48jc040o 48j 2069 | 48jc040p 48j 2070 | 48jc040q 48j 2071 | 48jc040r 48j 2072 | 48jc040s 48j 2073 | 48jc040t 48j 2074 | 48jc040u 48j 2075 | 48jc040v 48j 2076 | 48jc040w 48j 2077 | 48jc040x 48j 2078 | 48kc040k 48k 2079 | 48kc040l 48k 2080 | 48kc040m 48k 2081 | 48kc040n 48k 2082 | 48kc040o 48k 2083 | 48kc040p 48k 2084 | 48kc040q 48k 2085 | 48kc040r 48k 2086 | 48kc040s 48k 2087 | 48kc040t 48k 2088 | 48kc040u 48k 2089 | 48kc040v 48k 2090 | 48kc040w 48k 2091 | 48kc040x 48k 2092 | 48kc040y 48k 2093 | 48lc040l 48l 2094 | 48lc040m 48l 2095 | 48lc040n 48l 2096 | 48lc040o 48l 2097 | 48lc040p 48l 2098 | 48lc040q 48l 2099 | 48lc040r 48l 2100 | 48lc040s 48l 2101 | 48lc040t 48l 2102 | 48lc040u 48l 2103 | 48lc040v 48l 2104 | 48lc040w 48l 2105 | 48lc040x 48l 2106 | 48lc040y 48l 2107 | 48lc040z 48l 2108 | 48mc040m 48m 2109 | 48mc040n 48m 2110 | 48mc040o 48m 2111 | 48mc040p 48m 2112 | 48mc040q 48m 2113 | 48mc040r 48m 2114 | 48mc040s 48m 2115 | 48mc040t 48m 2116 | 48mc040u 48m 2117 | 48mc040v 48m 2118 | 48mc040w 48m 2119 | 48mc040x 48m 2120 | 48mc040y 48m 2121 | 48mc040z 48m 2122 | 48mc0410 48m 2123 | 48nc040j 48n 2124 | 48nc040k 48n 2125 | 48nc040l 48n 2126 | 48nc040m 48n 2127 | 48nc040n 48n 2128 | 48nc040o 48n 2129 | 48nc040p 48n 2130 | 48nc040q 48n 2131 | 48nc040r 48n 2132 | 48nc040s 48n 2133 | 48nc040t 48n 2134 | 48nc040u 48n 2135 | 48nc040v 48n 2136 | 48nc040w 48n 2137 | 48nc040x 48n 2138 | 48oc040k 48o 2139 | 48oc040l 48o 2140 | 48oc040m 48o 2141 | 48oc040n 48o 2142 | 48oc040o 48o 2143 | 48oc040p 48o 2144 | 48oc040q 48o 2145 | 48oc040r 48o 2146 | 48oc040s 48o 2147 | 48oc040t 48o 2148 | 48oc040u 48o 2149 | 48oc040v 48o 2150 | 48oc040w 48o 2151 | 48oc040x 48o 2152 | 48oc040y 48o 2153 | 48pc040k 48p 2154 | 48pc040l 48p 2155 | 48pc040m 48p 2156 | 48pc040n 48p 2157 | 48pc040o 48p 2158 | 48pc040p 48p 2159 | 48pc040q 48p 2160 | 48pc040r 48p 2161 | 48pc040s 48p 2162 | 48pc040t 48p 2163 | 48pc040u 48p 2164 | 48pc040v 48p 2165 | 48pc040w 48p 2166 | 48pc040x 48p 2167 | 48pc040y 48p 2168 | 48qc040k 48q 2169 | 48qc040l 48q 2170 | 48qc040m 48q 2171 | 48qc040n 48q 2172 | 48qc040o 48q 2173 | 48qc040p 48q 2174 | 48qc040q 48q 2175 | 48qc040r 48q 2176 | 48qc040s 48q 2177 | 48qc040t 48q 2178 | 48qc040u 48q 2179 | 48qc040v 48q 2180 | 48qc040w 48q 2181 | 48qc040x 48q 2182 | 48qc040y 48q 2183 | 48rc040l 48r 2184 | 48rc040m 48r 2185 | 48rc040n 48r 2186 | 48rc040o 48r 2187 | 48rc040p 48r 2188 | 48rc040q 48r 2189 | 48rc040r 48r 2190 | 48rc040s 48r 2191 | 48rc040t 48r 2192 | 48rc040u 48r 2193 | 48rc040v 48r 2194 | 48rc040w 48r 2195 | 48rc040x 48r 2196 | 48rc040y 48r 2197 | 48rc040z 48r 2198 | 48sc040k 48s 2199 | 48sc040l 48s 2200 | 48sc040m 48s 2201 | 48sc040n 48s 2202 | 48sc040o 48s 2203 | 48sc040p 48s 2204 | 48sc040q 48s 2205 | 48sc040r 48s 2206 | 48sc040s 48s 2207 | 48sc040t 48s 2208 | 48sc040u 48s 2209 | 48sc040v 48s 2210 | 48sc040w 48s 2211 | 48sc040x 48s 2212 | 48sc040y 48s 2213 | 48tc040l 48t 2214 | 48tc040m 48t 2215 | 48tc040n 48t 2216 | 48tc040o 48t 2217 | 48tc040p 48t 2218 | 48tc040q 48t 2219 | 48tc040r 48t 2220 | 48tc040s 48t 2221 | 48tc040t 48t 2222 | 48tc040u 48t 2223 | 48tc040v 48t 2224 | 48tc040w 48t 2225 | 48tc040x 48t 2226 | 48tc040y 48t 2227 | 48tc040z 48t 2228 | 48uc040k 48u 2229 | 48uc040l 48u 2230 | 48uc040m 48u 2231 | 48uc040n 48u 2232 | 48uc040o 48u 2233 | 48uc040p 48u 2234 | 48uc040q 48u 2235 | 48uc040r 48u 2236 | 48uc040s 48u 2237 | 48uc040t 48u 2238 | 48uc040u 48u 2239 | 48uc040v 48u 2240 | 48uc040w 48u 2241 | 48uc040x 48u 2242 | 48uc040y 48u 2243 | 48vc040i 48v 2244 | 48vc040j 48v 2245 | 48vc040k 48v 2246 | 48vc040l 48v 2247 | 48vc040m 48v 2248 | 48vc040n 48v 2249 | 48vc040o 48v 2250 | 48vc040p 48v 2251 | 48vc040q 48v 2252 | 48vc040r 48v 2253 | 48vc040s 48v 2254 | 48vc040t 48v 2255 | 48vc040u 48v 2256 | 48vc040v 48v 2257 | 48vc040w 48v 2258 | 48wc040k 48w 2259 | 48wc040l 48w 2260 | 48wc040m 48w 2261 | 48wc040n 48w 2262 | 48wc040o 48w 2263 | 48wc040p 48w 2264 | 48wc040q 48w 2265 | 48wc040r 48w 2266 | 48wc040s 48w 2267 | 48wc040t 48w 2268 | 48wc040u 48w 2269 | 48wc040v 48w 2270 | 48wc040w 48w 2271 | 48wc040x 48w 2272 | 48wc040y 48w 2273 | 48xc040l 48x 2274 | 48xc040m 48x 2275 | 48xc040n 48x 2276 | 48xc040o 48x 2277 | 48xc040p 48x 2278 | 48xc040q 48x 2279 | 48xc040r 48x 2280 | 48xc040s 48x 2281 | 48xc040t 48x 2282 | 48xc040u 48x 2283 | 48xc040v 48x 2284 | 48xc040w 48x 2285 | 48xc040x 48x 2286 | 48xc040y 48x 2287 | 48xc040z 48x 2288 | 48yc040l 48y 2289 | 48yc040m 48y 2290 | 48yc040n 48y 2291 | 48yc040o 48y 2292 | 48yc040p 48y 2293 | 48yc040q 48y 2294 | 48yc040r 48y 2295 | 48yc040s 48y 2296 | 48yc040t 48y 2297 | 48yc040u 48y 2298 | 48yc040v 48y 2299 | 48yc040w 48y 2300 | 48yc040x 48y 2301 | 48yc040y 48y 2302 | 48yc040z 48y 2303 | 48zc040k 48z 2304 | 48zc040l 48z 2305 | 48zc040m 48z 2306 | 48zc040n 48z 2307 | 48zc040o 48z 2308 | 48zc040p 48z 2309 | 48zc040q 48z 2310 | 48zc040r 48z 2311 | 48zc040s 48z 2312 | 48zc040t 48z 2313 | 48zc040u 48z 2314 | 48zc040v 48z 2315 | 48zc040w 48z 2316 | 48zc040x 48z 2317 | 48zc040y 48z 2318 | 490c040l 490 2319 | 490c040m 490 2320 | 490c040n 490 2321 | 490c040o 490 2322 | 490c040p 490 2323 | 490c040q 490 2324 | 490c040r 490 2325 | 490c040s 490 2326 | 490c040t 490 2327 | 490c040u 490 2328 | 490c040v 490 2329 | 490c040w 490 2330 | 490c040x 490 2331 | 490c040y 490 2332 | 490c040z 490 2333 | 491c040h 491 2334 | 491c040i 491 2335 | 491c040j 491 2336 | 491c040k 491 2337 | 491c040l 491 2338 | 491c040m 491 2339 | 491c040n 491 2340 | 491c040o 491 2341 | 491c040p 491 2342 | 491c040q 491 2343 | 491c040r 491 2344 | 491c040s 491 2345 | 491c040t 491 2346 | 491c040u 491 2347 | 491c040v 491 2348 | 492c040n 492 2349 | 492c040o 492 2350 | 492c040p 492 2351 | 492c040q 492 2352 | 492c040r 492 2353 | 492c040s 492 2354 | 492c040t 492 2355 | 492c040u 492 2356 | 492c040v 492 2357 | 492c040w 492 2358 | 492c040x 492 2359 | 492c040y 492 2360 | 492c040z 492 2361 | 492c0410 492 2362 | 492c0411 492 2363 | 493c040k 493 2364 | 493c040l 493 2365 | 493c040m 493 2366 | 493c040n 493 2367 | 493c040o 493 2368 | 493c040p 493 2369 | 493c040q 493 2370 | 493c040r 493 2371 | 493c040s 493 2372 | 493c040t 493 2373 | 493c040u 493 2374 | 493c040v 493 2375 | 493c040w 493 2376 | 493c040x 493 2377 | 493c040y 493 2378 | 494c040j 494 2379 | 494c040k 494 2380 | 494c040l 494 2381 | 494c040m 494 2382 | 494c040n 494 2383 | 494c040o 494 2384 | 494c040p 494 2385 | 494c040q 494 2386 | 494c040r 494 2387 | 494c040s 494 2388 | 494c040t 494 2389 | 494c040u 494 2390 | 494c040v 494 2391 | 494c040w 494 2392 | 494c040x 494 2393 | 495c040i 495 2394 | 495c040j 495 2395 | 495c040k 495 2396 | 495c040l 495 2397 | 495c040m 495 2398 | 495c040n 495 2399 | 495c040o 495 2400 | 495c040p 495 2401 | 495c040q 495 2402 | 495c040r 495 2403 | 495c040s 495 2404 | 495c040t 495 2405 | 495c040u 495 2406 | 495c040v 495 2407 | 495c040w 495 2408 | 496c040k 496 2409 | 496c040l 496 2410 | 496c040m 496 2411 | 496c040n 496 2412 | 496c040o 496 2413 | 496c040p 496 2414 | 496c040q 496 2415 | 496c040r 496 2416 | 496c040s 496 2417 | 496c040t 496 2418 | 496c040u 496 2419 | 496c040v 496 2420 | 496c040w 496 2421 | 496c040x 496 2422 | 496c040y 496 2423 | 497c040j 497 2424 | 497c040k 497 2425 | 497c040l 497 2426 | 497c040m 497 2427 | 497c040n 497 2428 | 497c040o 497 2429 | 497c040p 497 2430 | 497c040q 497 2431 | 497c040r 497 2432 | 497c040s 497 2433 | 497c040t 497 2434 | 497c040u 497 2435 | 497c040v 497 2436 | 497c040w 497 2437 | 497c040x 497 2438 | 498c040m 498 2439 | 498c040n 498 2440 | 498c040o 498 2441 | 498c040p 498 2442 | 498c040q 498 2443 | 498c040r 498 2444 | 498c040s 498 2445 | 498c040t 498 2446 | 498c040u 498 2447 | 498c040v 498 2448 | 498c040w 498 2449 | 498c040x 498 2450 | 498c040y 498 2451 | 498c040z 498 2452 | 498c0410 498 2453 | 499c040k 499 2454 | 499c040l 499 2455 | 499c040m 499 2456 | 499c040n 499 2457 | 499c040o 499 2458 | 499c040p 499 2459 | 499c040q 499 2460 | 499c040r 499 2461 | 499c040s 499 2462 | 499c040t 499 2463 | 499c040u 499 2464 | 499c040v 499 2465 | 499c040w 499 2466 | 499c040x 499 2467 | 499c040y 499 2468 | 49ac040m 49a 2469 | 49ac040n 49a 2470 | 49ac040o 49a 2471 | 49ac040p 49a 2472 | 49ac040q 49a 2473 | 49ac040r 49a 2474 | 49ac040s 49a 2475 | 49ac040t 49a 2476 | 49ac040u 49a 2477 | 49ac040v 49a 2478 | 49ac040w 49a 2479 | 49ac040x 49a 2480 | 49ac040y 49a 2481 | 49ac040z 49a 2482 | 49ac0410 49a 2483 | 49bc040l 49b 2484 | 49bc040m 49b 2485 | 49bc040n 49b 2486 | 49bc040o 49b 2487 | 49bc040p 49b 2488 | 49bc040q 49b 2489 | 49bc040r 49b 2490 | 49bc040s 49b 2491 | 49bc040t 49b 2492 | 49bc040u 49b 2493 | 49bc040v 49b 2494 | 49bc040w 49b 2495 | 49bc040x 49b 2496 | 49bc040y 49b 2497 | 49bc040z 49b 2498 | 49cc040l 49c 2499 | 49cc040m 49c 2500 | 49cc040n 49c 2501 | 49cc040o 49c 2502 | 49cc040p 49c 2503 | 49cc040q 49c 2504 | 49cc040r 49c 2505 | 49cc040s 49c 2506 | 49cc040t 49c 2507 | 49cc040u 49c 2508 | 49cc040v 49c 2509 | 49cc040w 49c 2510 | 49cc040x 49c 2511 | 49cc040y 49c 2512 | 49cc040z 49c 2513 | 49dc040k 49d 2514 | 49dc040l 49d 2515 | 49dc040m 49d 2516 | 49dc040n 49d 2517 | 49dc040o 49d 2518 | 49dc040p 49d 2519 | 49dc040q 49d 2520 | 49dc040r 49d 2521 | 49dc040s 49d 2522 | 49dc040t 49d 2523 | 49dc040u 49d 2524 | 49dc040v 49d 2525 | 49dc040w 49d 2526 | 49dc040x 49d 2527 | 49dc040y 49d 2528 | 49ec040l 49e 2529 | 49ec040m 49e 2530 | 49ec040n 49e 2531 | 49ec040o 49e 2532 | 49ec040p 49e 2533 | 49ec040q 49e 2534 | 49ec040r 49e 2535 | 49ec040s 49e 2536 | 49ec040t 49e 2537 | 49ec040u 49e 2538 | 49ec040v 49e 2539 | 49ec040w 49e 2540 | 49ec040x 49e 2541 | 49ec040y 49e 2542 | 49ec040z 49e 2543 | 49fc040k 49f 2544 | 49fc040l 49f 2545 | 49fc040m 49f 2546 | 49fc040n 49f 2547 | 49fc040o 49f 2548 | 49fc040p 49f 2549 | 49fc040q 49f 2550 | 49fc040r 49f 2551 | 49fc040s 49f 2552 | 49fc040t 49f 2553 | 49fc040u 49f 2554 | 49fc040v 49f 2555 | 49fc040w 49f 2556 | 49fc040x 49f 2557 | 49fc040y 49f 2558 | 49gc040i 49g 2559 | 49gc040j 49g 2560 | 49gc040k 49g 2561 | 49gc040l 49g 2562 | 49gc040m 49g 2563 | 49gc040n 49g 2564 | 49gc040o 49g 2565 | 49gc040p 49g 2566 | 49gc040q 49g 2567 | 49gc040r 49g 2568 | 49gc040s 49g 2569 | 49gc040t 49g 2570 | 49gc040u 49g 2571 | 49gc040v 49g 2572 | 49gc040w 49g 2573 | 49hc040j 49h 2574 | 49hc040k 49h 2575 | 49hc040l 49h 2576 | 49hc040m 49h 2577 | 49hc040n 49h 2578 | 49hc040o 49h 2579 | 49hc040p 49h 2580 | 49hc040q 49h 2581 | 49hc040r 49h 2582 | 49hc040s 49h 2583 | 49hc040t 49h 2584 | 49hc040u 49h 2585 | 49hc040v 49h 2586 | 49hc040w 49h 2587 | 49hc040x 49h 2588 | 49ic040k 49i 2589 | 49ic040l 49i 2590 | 49ic040m 49i 2591 | 49ic040n 49i 2592 | 49ic040o 49i 2593 | 49ic040p 49i 2594 | 49ic040q 49i 2595 | 49ic040r 49i 2596 | 49ic040s 49i 2597 | 49ic040t 49i 2598 | 49ic040u 49i 2599 | 49ic040v 49i 2600 | 49ic040w 49i 2601 | 49ic040x 49i 2602 | 49ic040y 49i 2603 | 49jc040l 49j 2604 | 49jc040m 49j 2605 | 49jc040n 49j 2606 | 49jc040o 49j 2607 | 49jc040p 49j 2608 | 49jc040q 49j 2609 | 49jc040r 49j 2610 | 49jc040s 49j 2611 | 49jc040t 49j 2612 | 49jc040u 49j 2613 | 49jc040v 49j 2614 | 49jc040w 49j 2615 | 49jc040x 49j 2616 | 49jc040y 49j 2617 | 49jc040z 49j 2618 | 49kc040m 49k 2619 | 49kc040n 49k 2620 | 49kc040o 49k 2621 | 49kc040p 49k 2622 | 49kc040q 49k 2623 | 49kc040r 49k 2624 | 49kc040s 49k 2625 | 49kc040t 49k 2626 | 49kc040u 49k 2627 | 49kc040v 49k 2628 | 49kc040w 49k 2629 | 49kc040x 49k 2630 | 49kc040y 49k 2631 | 49kc040z 49k 2632 | 49kc0410 49k 2633 | 49lc040j 49l 2634 | 49lc040k 49l 2635 | 49lc040l 49l 2636 | 49lc040m 49l 2637 | 49lc040n 49l 2638 | 49lc040o 49l 2639 | 49lc040p 49l 2640 | 49lc040q 49l 2641 | 49lc040r 49l 2642 | 49lc040s 49l 2643 | 49lc040t 49l 2644 | 49lc040u 49l 2645 | 49lc040v 49l 2646 | 49lc040w 49l 2647 | 49lc040x 49l 2648 | 49mc040k 49m 2649 | 49mc040l 49m 2650 | 49mc040m 49m 2651 | 49mc040n 49m 2652 | 49mc040o 49m 2653 | 49mc040p 49m 2654 | 49mc040q 49m 2655 | 49mc040r 49m 2656 | 49mc040s 49m 2657 | 49mc040t 49m 2658 | 49mc040u 49m 2659 | 49mc040v 49m 2660 | 49mc040w 49m 2661 | 49mc040x 49m 2662 | 49mc040y 49m 2663 | 49nc040k 49n 2664 | 49nc040l 49n 2665 | 49nc040m 49n 2666 | 49nc040n 49n 2667 | 49nc040o 49n 2668 | 49nc040p 49n 2669 | 49nc040q 49n 2670 | 49nc040r 49n 2671 | 49nc040s 49n 2672 | 49nc040t 49n 2673 | 49nc040u 49n 2674 | 49nc040v 49n 2675 | 49nc040w 49n 2676 | 49nc040x 49n 2677 | 49nc040y 49n 2678 | 49oc040k 49o 2679 | 49oc040l 49o 2680 | 49oc040m 49o 2681 | 49oc040n 49o 2682 | 49oc040o 49o 2683 | 49oc040p 49o 2684 | 49oc040q 49o 2685 | 49oc040r 49o 2686 | 49oc040s 49o 2687 | 49oc040t 49o 2688 | 49oc040u 49o 2689 | 49oc040v 49o 2690 | 49oc040w 49o 2691 | 49oc040x 49o 2692 | 49oc040y 49o 2693 | 49pc040l 49p 2694 | 49pc040m 49p 2695 | 49pc040n 49p 2696 | 49pc040o 49p 2697 | 49pc040p 49p 2698 | 49pc040q 49p 2699 | 49pc040r 49p 2700 | 49pc040s 49p 2701 | 49pc040t 49p 2702 | 49pc040u 49p 2703 | 49pc040v 49p 2704 | 49pc040w 49p 2705 | 49pc040x 49p 2706 | 49pc040y 49p 2707 | 49pc040z 49p 2708 | 49qc040k 49q 2709 | 49qc040l 49q 2710 | 49qc040m 49q 2711 | 49qc040n 49q 2712 | 49qc040o 49q 2713 | 49qc040p 49q 2714 | 49qc040q 49q 2715 | 49qc040r 49q 2716 | 49qc040s 49q 2717 | 49qc040t 49q 2718 | 49qc040u 49q 2719 | 49qc040v 49q 2720 | 49qc040w 49q 2721 | 49qc040x 49q 2722 | 49qc040y 49q 2723 | 49rc040l 49r 2724 | 49rc040m 49r 2725 | 49rc040n 49r 2726 | 49rc040o 49r 2727 | 49rc040p 49r 2728 | 49rc040q 49r 2729 | 49rc040r 49r 2730 | 49rc040s 49r 2731 | 49rc040t 49r 2732 | 49rc040u 49r 2733 | 49rc040v 49r 2734 | 49rc040w 49r 2735 | 49rc040x 49r 2736 | 49rc040y 49r 2737 | 49rc040z 49r 2738 | 49sc040k 49s 2739 | 49sc040l 49s 2740 | 49sc040m 49s 2741 | 49sc040n 49s 2742 | 49sc040o 49s 2743 | 49sc040p 49s 2744 | 49sc040q 49s 2745 | 49sc040r 49s 2746 | 49sc040s 49s 2747 | 49sc040t 49s 2748 | 49sc040u 49s 2749 | 49sc040v 49s 2750 | 49sc040w 49s 2751 | 49sc040x 49s 2752 | 49sc040y 49s 2753 | 49tc040i 49t 2754 | 49tc040j 49t 2755 | 49tc040k 49t 2756 | 49tc040l 49t 2757 | 49tc040m 49t 2758 | 49tc040n 49t 2759 | 49tc040o 49t 2760 | 49tc040p 49t 2761 | 49tc040q 49t 2762 | 49tc040r 49t 2763 | 49tc040s 49t 2764 | 49tc040t 49t 2765 | 49tc040u 49t 2766 | 49tc040v 49t 2767 | 49tc040w 49t 2768 | 49uc040k 49u 2769 | 49uc040l 49u 2770 | 49uc040m 49u 2771 | 49uc040n 49u 2772 | 49uc040o 49u 2773 | 49uc040p 49u 2774 | 49uc040q 49u 2775 | 49uc040r 49u 2776 | 49uc040s 49u 2777 | 49uc040t 49u 2778 | 49uc040u 49u 2779 | 49uc040v 49u 2780 | 49uc040w 49u 2781 | 49uc040x 49u 2782 | 49uc040y 49u 2783 | 49vc040l 49v 2784 | 49vc040m 49v 2785 | 49vc040n 49v 2786 | 49vc040o 49v 2787 | 49vc040p 49v 2788 | 49vc040q 49v 2789 | 49vc040r 49v 2790 | 49vc040s 49v 2791 | 49vc040t 49v 2792 | 49vc040u 49v 2793 | 49vc040v 49v 2794 | 49vc040w 49v 2795 | 49vc040x 49v 2796 | 49vc040y 49v 2797 | 49vc040z 49v 2798 | 49wc040l 49w 2799 | 49wc040m 49w 2800 | 49wc040n 49w 2801 | 49wc040o 49w 2802 | 49wc040p 49w 2803 | 49wc040q 49w 2804 | 49wc040r 49w 2805 | 49wc040s 49w 2806 | 49wc040t 49w 2807 | 49wc040u 49w 2808 | 49wc040v 49w 2809 | 49wc040w 49w 2810 | 49wc040x 49w 2811 | 49wc040y 49w 2812 | 49wc040z 49w 2813 | 49xc040k 49x 2814 | 49xc040l 49x 2815 | 49xc040m 49x 2816 | 49xc040n 49x 2817 | 49xc040o 49x 2818 | 49xc040p 49x 2819 | 49xc040q 49x 2820 | 49xc040r 49x 2821 | 49xc040s 49x 2822 | 49xc040t 49x 2823 | 49xc040u 49x 2824 | 49xc040v 49x 2825 | 49xc040w 49x 2826 | 49xc040x 49x 2827 | 49xc040y 49x 2828 | 49yc040l 49y 2829 | 49yc040m 49y 2830 | 49yc040n 49y 2831 | 49yc040o 49y 2832 | 49yc040p 49y 2833 | 49yc040q 49y 2834 | 49yc040r 49y 2835 | 49yc040s 49y 2836 | 49yc040t 49y 2837 | 49yc040u 49y 2838 | 49yc040v 49y 2839 | 49yc040w 49y 2840 | 49yc040x 49y 2841 | 49yc040y 49y 2842 | 49yc040z 49y 2843 | 49zc040l 49z 2844 | 49zc040m 49z 2845 | 49zc040n 49z 2846 | 49zc040o 49z 2847 | 49zc040p 49z 2848 | 49zc040q 49z 2849 | 49zc040r 49z 2850 | 49zc040s 49z 2851 | 49zc040t 49z 2852 | 49zc040u 49z 2853 | 49zc040v 49z 2854 | 49zc040w 49z 2855 | 49zc040x 49z 2856 | 49zc040y 49z 2857 | 49zc040z 49z 2858 | 4a0c040k 4a0 2859 | 4a0c040l 4a0 2860 | 4a0c040m 4a0 2861 | 4a0c040n 4a0 2862 | 4a0c040o 4a0 2863 | 4a0c040p 4a0 2864 | 4a0c040q 4a0 2865 | 4a0c040r 4a0 2866 | 4a0c040s 4a0 2867 | 4a0c040t 4a0 2868 | 4a0c040u 4a0 2869 | 4a0c040v 4a0 2870 | 4a0c040w 4a0 2871 | 4a0c040x 4a0 2872 | 4a0c040y 4a0 2873 | 4a1c040j 4a1 2874 | 4a1c040k 4a1 2875 | 4a1c040l 4a1 2876 | 4a1c040m 4a1 2877 | 4a1c040n 4a1 2878 | 4a1c040o 4a1 2879 | 4a1c040p 4a1 2880 | 4a1c040q 4a1 2881 | 4a1c040r 4a1 2882 | 4a1c040s 4a1 2883 | 4a1c040t 4a1 2884 | 4a1c040u 4a1 2885 | 4a1c040v 4a1 2886 | 4a1c040w 4a1 2887 | 4a1c040x 4a1 2888 | 4a2c040n 4a2 2889 | 4a2c040o 4a2 2890 | 4a2c040p 4a2 2891 | 4a2c040q 4a2 2892 | 4a2c040r 4a2 2893 | 4a2c040s 4a2 2894 | 4a2c040t 4a2 2895 | 4a2c040u 4a2 2896 | 4a2c040v 4a2 2897 | 4a2c040w 4a2 2898 | 4a2c040x 4a2 2899 | 4a2c040y 4a2 2900 | 4a2c040z 4a2 2901 | 4a2c0410 4a2 2902 | 4a2c0411 4a2 2903 | 4a3c040l 4a3 2904 | 4a3c040m 4a3 2905 | 4a3c040n 4a3 2906 | 4a3c040o 4a3 2907 | 4a3c040p 4a3 2908 | 4a3c040q 4a3 2909 | 4a3c040r 4a3 2910 | 4a3c040s 4a3 2911 | 4a3c040t 4a3 2912 | 4a3c040u 4a3 2913 | 4a3c040v 4a3 2914 | 4a3c040w 4a3 2915 | 4a3c040x 4a3 2916 | 4a3c040y 4a3 2917 | 4a3c040z 4a3 2918 | 4a4c040l 4a4 2919 | 4a4c040m 4a4 2920 | 4a4c040n 4a4 2921 | 4a4c040o 4a4 2922 | 4a4c040p 4a4 2923 | 4a4c040q 4a4 2924 | 4a4c040r 4a4 2925 | 4a4c040s 4a4 2926 | 4a4c040t 4a4 2927 | 4a4c040u 4a4 2928 | 4a4c040v 4a4 2929 | 4a4c040w 4a4 2930 | 4a4c040x 4a4 2931 | 4a4c040y 4a4 2932 | 4a4c040z 4a4 2933 | 4a5c040o 4a5 2934 | 4a5c040p 4a5 2935 | 4a5c040q 4a5 2936 | 4a5c040r 4a5 2937 | 4a5c040s 4a5 2938 | 4a5c040t 4a5 2939 | 4a5c040u 4a5 2940 | 4a5c040v 4a5 2941 | 4a5c040w 4a5 2942 | 4a5c040x 4a5 2943 | 4a5c040y 4a5 2944 | 4a5c040z 4a5 2945 | 4a5c0410 4a5 2946 | 4a5c0411 4a5 2947 | 4a5c0412 4a5 2948 | 4a6c040l 4a6 2949 | 4a6c040m 4a6 2950 | 4a6c040n 4a6 2951 | 4a6c040o 4a6 2952 | 4a6c040p 4a6 2953 | 4a6c040q 4a6 2954 | 4a6c040r 4a6 2955 | 4a6c040s 4a6 2956 | 4a6c040t 4a6 2957 | 4a6c040u 4a6 2958 | 4a6c040v 4a6 2959 | 4a6c040w 4a6 2960 | 4a6c040x 4a6 2961 | 4a6c040y 4a6 2962 | 4a6c040z 4a6 2963 | 4a7c040k 4a7 2964 | 4a7c040l 4a7 2965 | 4a7c040m 4a7 2966 | 4a7c040n 4a7 2967 | 4a7c040o 4a7 2968 | 4a7c040p 4a7 2969 | 4a7c040q 4a7 2970 | 4a7c040r 4a7 2971 | 4a7c040s 4a7 2972 | 4a7c040t 4a7 2973 | 4a7c040u 4a7 2974 | 4a7c040v 4a7 2975 | 4a7c040w 4a7 2976 | 4a7c040x 4a7 2977 | 4a7c040y 4a7 2978 | 4a8c040n 4a8 2979 | 4a8c040o 4a8 2980 | 4a8c040p 4a8 2981 | 4a8c040q 4a8 2982 | 4a8c040r 4a8 2983 | 4a8c040s 4a8 2984 | 4a8c040t 4a8 2985 | 4a8c040u 4a8 2986 | 4a8c040v 4a8 2987 | 4a8c040w 4a8 2988 | 4a8c040x 4a8 2989 | 4a8c040y 4a8 2990 | 4a8c040z 4a8 2991 | 4a8c0410 4a8 2992 | 4a8c0411 4a8 2993 | 4a9c040i 4a9 2994 | 4a9c040j 4a9 2995 | 4a9c040k 4a9 2996 | 4a9c040l 4a9 2997 | 4a9c040m 4a9 2998 | 4a9c040n 4a9 2999 | 4a9c040o 4a9 3000 | 4a9c040p 4a9 3001 | 4a9c040q 4a9 3002 | 4a9c040r 4a9 3003 | 4a9c040s 4a9 3004 | 4a9c040t 4a9 3005 | 4a9c040u 4a9 3006 | 4a9c040v 4a9 3007 | 4a9c040w 4a9 3008 | 4aac040l 4aa 3009 | 4aac040m 4aa 3010 | 4aac040n 4aa 3011 | 4aac040o 4aa 3012 | 4aac040p 4aa 3013 | 4aac040q 4aa 3014 | 4aac040r 4aa 3015 | 4aac040s 4aa 3016 | 4aac040t 4aa 3017 | 4aac040u 4aa 3018 | 4aac040v 4aa 3019 | 4aac040w 4aa 3020 | 4aac040x 4aa 3021 | 4aac040y 4aa 3022 | 4aac040z 4aa 3023 | 4abc040j 4ab 3024 | 4abc040k 4ab 3025 | 4abc040l 4ab 3026 | 4abc040m 4ab 3027 | 4abc040n 4ab 3028 | 4abc040o 4ab 3029 | 4abc040p 4ab 3030 | 4abc040q 4ab 3031 | 4abc040r 4ab 3032 | 4abc040s 4ab 3033 | 4abc040t 4ab 3034 | 4abc040u 4ab 3035 | 4abc040v 4ab 3036 | 4abc040w 4ab 3037 | 4abc040x 4ab 3038 | 4acc040k 4ac 3039 | 4acc040l 4ac 3040 | 4acc040m 4ac 3041 | 4acc040n 4ac 3042 | 4acc040o 4ac 3043 | 4acc040p 4ac 3044 | 4acc040q 4ac 3045 | 4acc040r 4ac 3046 | 4acc040s 4ac 3047 | 4acc040t 4ac 3048 | 4acc040u 4ac 3049 | 4acc040v 4ac 3050 | 4acc040w 4ac 3051 | 4acc040x 4ac 3052 | 4acc040y 4ac 3053 | 4adc040l 4ad 3054 | 4adc040m 4ad 3055 | 4adc040n 4ad 3056 | 4adc040o 4ad 3057 | 4adc040p 4ad 3058 | 4adc040q 4ad 3059 | 4adc040r 4ad 3060 | 4adc040s 4ad 3061 | 4adc040t 4ad 3062 | 4adc040u 4ad 3063 | 4adc040v 4ad 3064 | 4adc040w 4ad 3065 | 4adc040x 4ad 3066 | 4adc040y 4ad 3067 | 4adc040z 4ad 3068 | 4aec040m 4ae 3069 | 4aec040n 4ae 3070 | 4aec040o 4ae 3071 | 4aec040p 4ae 3072 | 4aec040q 4ae 3073 | 4aec040r 4ae 3074 | 4aec040s 4ae 3075 | 4aec040t 4ae 3076 | 4aec040u 4ae 3077 | 4aec040v 4ae 3078 | 4aec040w 4ae 3079 | 4aec040x 4ae 3080 | 4aec040y 4ae 3081 | 4aec040z 4ae 3082 | 4aec0410 4ae 3083 | 4afc040i 4af 3084 | 4afc040j 4af 3085 | 4afc040k 4af 3086 | 4afc040l 4af 3087 | 4afc040m 4af 3088 | 4afc040n 4af 3089 | 4afc040o 4af 3090 | 4afc040p 4af 3091 | 4afc040q 4af 3092 | 4afc040r 4af 3093 | 4afc040s 4af 3094 | 4afc040t 4af 3095 | 4afc040u 4af 3096 | 4afc040v 4af 3097 | 4afc040w 4af 3098 | 4agc040n 4ag 3099 | 4agc040o 4ag 3100 | 4agc040p 4ag 3101 | 4agc040q 4ag 3102 | 4agc040r 4ag 3103 | 4agc040s 4ag 3104 | 4agc040t 4ag 3105 | 4agc040u 4ag 3106 | 4agc040v 4ag 3107 | 4agc040w 4ag 3108 | 4agc040x 4ag 3109 | 4agc040y 4ag 3110 | 4agc040z 4ag 3111 | 4agc0410 4ag 3112 | 4agc0411 4ag 3113 | 4ahc040m 4ah 3114 | 4ahc040n 4ah 3115 | 4ahc040o 4ah 3116 | 4ahc040p 4ah 3117 | 4ahc040q 4ah 3118 | 4ahc040r 4ah 3119 | 4ahc040s 4ah 3120 | 4ahc040t 4ah 3121 | 4ahc040u 4ah 3122 | 4ahc040v 4ah 3123 | 4ahc040w 4ah 3124 | 4ahc040x 4ah 3125 | 4ahc040y 4ah 3126 | 4ahc040z 4ah 3127 | 4ahc0410 4ah 3128 | 4aic040k 4ai 3129 | 4aic040l 4ai 3130 | 4aic040m 4ai 3131 | 4aic040n 4ai 3132 | 4aic040o 4ai 3133 | 4aic040p 4ai 3134 | 4aic040q 4ai 3135 | 4aic040r 4ai 3136 | 4aic040s 4ai 3137 | 4aic040t 4ai 3138 | 4aic040u 4ai 3139 | 4aic040v 4ai 3140 | 4aic040w 4ai 3141 | 4aic040x 4ai 3142 | 4aic040y 4ai 3143 | 4ajc040k 4aj 3144 | 4ajc040l 4aj 3145 | 4ajc040m 4aj 3146 | 4ajc040n 4aj 3147 | 4ajc040o 4aj 3148 | 4ajc040p 4aj 3149 | 4ajc040q 4aj 3150 | 4ajc040r 4aj 3151 | 4ajc040s 4aj 3152 | 4ajc040t 4aj 3153 | 4ajc040u 4aj 3154 | 4ajc040v 4aj 3155 | 4ajc040w 4aj 3156 | 4ajc040x 4aj 3157 | 4ajc040y 4aj 3158 | 4akc040l 4ak 3159 | 4akc040m 4ak 3160 | 4akc040n 4ak 3161 | 4akc040o 4ak 3162 | 4akc040p 4ak 3163 | 4akc040q 4ak 3164 | 4akc040r 4ak 3165 | 4akc040s 4ak 3166 | 4akc040t 4ak 3167 | 4akc040u 4ak 3168 | 4akc040v 4ak 3169 | 4akc040w 4ak 3170 | 4akc040x 4ak 3171 | 4akc040y 4ak 3172 | 4akc040z 4ak 3173 | 4alc040k 4al 3174 | 4alc040l 4al 3175 | 4alc040m 4al 3176 | 4alc040n 4al 3177 | 4alc040o 4al 3178 | 4alc040p 4al 3179 | 4alc040q 4al 3180 | 4alc040r 4al 3181 | 4alc040s 4al 3182 | 4alc040t 4al 3183 | 4alc040u 4al 3184 | 4alc040v 4al 3185 | 4alc040w 4al 3186 | 4alc040x 4al 3187 | 4alc040y 4al 3188 | 4amc040l 4am 3189 | 4amc040m 4am 3190 | 4amc040n 4am 3191 | 4amc040o 4am 3192 | 4amc040p 4am 3193 | 4amc040q 4am 3194 | 4amc040r 4am 3195 | 4amc040s 4am 3196 | 4amc040t 4am 3197 | 4amc040u 4am 3198 | 4amc040v 4am 3199 | 4amc040w 4am 3200 | 4amc040x 4am 3201 | 4amc040y 4am 3202 | 4amc040z 4am 3203 | 4anc040j 4an 3204 | 4anc040k 4an 3205 | 4anc040l 4an 3206 | 4anc040m 4an 3207 | 4anc040n 4an 3208 | 4anc040o 4an 3209 | 4anc040p 4an 3210 | 4anc040q 4an 3211 | 4anc040r 4an 3212 | 4anc040s 4an 3213 | 4anc040t 4an 3214 | 4anc040u 4an 3215 | 4anc040v 4an 3216 | 4anc040w 4an 3217 | 4anc040x 4an 3218 | 4aoc040j 4ao 3219 | 4aoc040k 4ao 3220 | 4aoc040l 4ao 3221 | 4aoc040m 4ao 3222 | 4aoc040n 4ao 3223 | 4aoc040o 4ao 3224 | 4aoc040p 4ao 3225 | 4aoc040q 4ao 3226 | 4aoc040r 4ao 3227 | 4aoc040s 4ao 3228 | 4aoc040t 4ao 3229 | 4aoc040u 4ao 3230 | 4aoc040v 4ao 3231 | 4aoc040w 4ao 3232 | 4aoc040x 4ao 3233 | 4apc040j 4ap 3234 | 4apc040k 4ap 3235 | 4apc040l 4ap 3236 | 4apc040m 4ap 3237 | 4apc040n 4ap 3238 | 4apc040o 4ap 3239 | 4apc040p 4ap 3240 | 4apc040q 4ap 3241 | 4apc040r 4ap 3242 | 4apc040s 4ap 3243 | 4apc040t 4ap 3244 | 4apc040u 4ap 3245 | 4apc040v 4ap 3246 | 4apc040w 4ap 3247 | 4apc040x 4ap 3248 | 4aqc040k 4aq 3249 | 4aqc040l 4aq 3250 | 4aqc040m 4aq 3251 | 4aqc040n 4aq 3252 | 4aqc040o 4aq 3253 | 4aqc040p 4aq 3254 | 4aqc040q 4aq 3255 | 4aqc040r 4aq 3256 | 4aqc040s 4aq 3257 | 4aqc040t 4aq 3258 | 4aqc040u 4aq 3259 | 4aqc040v 4aq 3260 | 4aqc040w 4aq 3261 | 4aqc040x 4aq 3262 | 4aqc040y 4aq 3263 | 4arc040k 4ar 3264 | 4arc040l 4ar 3265 | 4arc040m 4ar 3266 | 4arc040n 4ar 3267 | 4arc040o 4ar 3268 | 4arc040p 4ar 3269 | 4arc040q 4ar 3270 | 4arc040r 4ar 3271 | 4arc040s 4ar 3272 | 4arc040t 4ar 3273 | 4arc040u 4ar 3274 | 4arc040v 4ar 3275 | 4arc040w 4ar 3276 | 4arc040x 4ar 3277 | 4arc040y 4ar 3278 | 4asc040l 4as 3279 | 4asc040m 4as 3280 | 4asc040n 4as 3281 | 4asc040o 4as 3282 | 4asc040p 4as 3283 | 4asc040q 4as 3284 | 4asc040r 4as 3285 | 4asc040s 4as 3286 | 4asc040t 4as 3287 | 4asc040u 4as 3288 | 4asc040v 4as 3289 | 4asc040w 4as 3290 | 4asc040x 4as 3291 | 4asc040y 4as 3292 | 4asc040z 4as 3293 | 4atc040j 4at 3294 | 4atc040k 4at 3295 | 4atc040l 4at 3296 | 4atc040m 4at 3297 | 4atc040n 4at 3298 | 4atc040o 4at 3299 | 4atc040p 4at 3300 | 4atc040q 4at 3301 | 4atc040r 4at 3302 | 4atc040s 4at 3303 | 4atc040t 4at 3304 | 4atc040u 4at 3305 | 4atc040v 4at 3306 | 4atc040w 4at 3307 | 4atc040x 4at 3308 | 4auc040j 4au 3309 | 4auc040k 4au 3310 | 4auc040l 4au 3311 | 4auc040m 4au 3312 | 4auc040n 4au 3313 | 4auc040o 4au 3314 | 4auc040p 4au 3315 | 4auc040q 4au 3316 | 4auc040r 4au 3317 | 4auc040s 4au 3318 | 4auc040t 4au 3319 | 4auc040u 4au 3320 | 4auc040v 4au 3321 | 4auc040w 4au 3322 | 4auc040x 4au 3323 | 4avc040k 4av 3324 | 4avc040l 4av 3325 | 4avc040m 4av 3326 | 4avc040n 4av 3327 | 4avc040o 4av 3328 | 4avc040p 4av 3329 | 4avc040q 4av 3330 | 4avc040r 4av 3331 | 4avc040s 4av 3332 | 4avc040t 4av 3333 | 4avc040u 4av 3334 | 4avc040v 4av 3335 | 4avc040w 4av 3336 | 4avc040x 4av 3337 | 4avc040y 4av 3338 | 4awc040l 4aw 3339 | 4awc040m 4aw 3340 | 4awc040n 4aw 3341 | 4awc040o 4aw 3342 | 4awc040p 4aw 3343 | 4awc040q 4aw 3344 | 4awc040r 4aw 3345 | 4awc040s 4aw 3346 | 4awc040t 4aw 3347 | 4awc040u 4aw 3348 | 4awc040v 4aw 3349 | 4awc040w 4aw 3350 | 4awc040x 4aw 3351 | 4awc040y 4aw 3352 | 4awc040z 4aw 3353 | 4axc040k 4ax 3354 | 4axc040l 4ax 3355 | 4axc040m 4ax 3356 | 4axc040n 4ax 3357 | 4axc040o 4ax 3358 | 4axc040p 4ax 3359 | 4axc040q 4ax 3360 | 4axc040r 4ax 3361 | 4axc040s 4ax 3362 | 4axc040t 4ax 3363 | 4axc040u 4ax 3364 | 4axc040v 4ax 3365 | 4axc040w 4ax 3366 | 4axc040x 4ax 3367 | 4axc040y 4ax 3368 | 4ayc040l 4ay 3369 | 4ayc040m 4ay 3370 | 4ayc040n 4ay 3371 | 4ayc040o 4ay 3372 | 4ayc040p 4ay 3373 | 4ayc040q 4ay 3374 | 4ayc040r 4ay 3375 | 4ayc040s 4ay 3376 | 4ayc040t 4ay 3377 | 4ayc040u 4ay 3378 | 4ayc040v 4ay 3379 | 4ayc040w 4ay 3380 | 4ayc040x 4ay 3381 | 4ayc040y 4ay 3382 | 4ayc040z 4ay 3383 | 4azc040j 4az 3384 | 4azc040k 4az 3385 | 4azc040l 4az 3386 | 4azc040m 4az 3387 | 4azc040n 4az 3388 | 4azc040o 4az 3389 | 4azc040p 4az 3390 | 4azc040q 4az 3391 | 4azc040r 4az 3392 | 4azc040s 4az 3393 | 4azc040t 4az 3394 | 4azc040u 4az 3395 | 4azc040v 4az 3396 | 4azc040w 4az 3397 | 4azc040x 4az 3398 | 4b0c040n 4b0 3399 | 4b0c040o 4b0 3400 | 4b0c040p 4b0 3401 | 4b0c040q 4b0 3402 | 4b0c040r 4b0 3403 | 4b0c040s 4b0 3404 | 4b0c040t 4b0 3405 | 4b0c040u 4b0 3406 | 4b0c040v 4b0 3407 | 4b0c040w 4b0 3408 | 4b0c040x 4b0 3409 | 4b0c040y 4b0 3410 | 4b0c040z 4b0 3411 | 4b0c0410 4b0 3412 | 4b0c0411 4b0 3413 | 4b1c040l 4b1 3414 | 4b1c040m 4b1 3415 | 4b1c040n 4b1 3416 | 4b1c040o 4b1 3417 | 4b1c040p 4b1 3418 | 4b1c040q 4b1 3419 | 4b1c040r 4b1 3420 | 4b1c040s 4b1 3421 | 4b1c040t 4b1 3422 | 4b1c040u 4b1 3423 | 4b1c040v 4b1 3424 | 4b1c040w 4b1 3425 | 4b1c040x 4b1 3426 | 4b1c040y 4b1 3427 | 4b1c040z 4b1 3428 | 4b2c040l 4b2 3429 | 4b2c040m 4b2 3430 | 4b2c040n 4b2 3431 | 4b2c040o 4b2 3432 | 4b2c040p 4b2 3433 | 4b2c040q 4b2 3434 | 4b2c040r 4b2 3435 | 4b2c040s 4b2 3436 | 4b2c040t 4b2 3437 | 4b2c040u 4b2 3438 | 4b2c040v 4b2 3439 | 4b2c040w 4b2 3440 | 4b2c040x 4b2 3441 | 4b2c040y 4b2 3442 | 4b2c040z 4b2 3443 | 4b3c040o 4b3 3444 | 4b3c040p 4b3 3445 | 4b3c040q 4b3 3446 | 4b3c040r 4b3 3447 | 4b3c040s 4b3 3448 | 4b3c040t 4b3 3449 | 4b3c040u 4b3 3450 | 4b3c040v 4b3 3451 | 4b3c040w 4b3 3452 | 4b3c040x 4b3 3453 | 4b3c040y 4b3 3454 | 4b3c040z 4b3 3455 | 4b3c0410 4b3 3456 | 4b3c0411 4b3 3457 | 4b3c0412 4b3 3458 | 4b4c040l 4b4 3459 | 4b4c040m 4b4 3460 | 4b4c040n 4b4 3461 | 4b4c040o 4b4 3462 | 4b4c040p 4b4 3463 | 4b4c040q 4b4 3464 | 4b4c040r 4b4 3465 | 4b4c040s 4b4 3466 | 4b4c040t 4b4 3467 | 4b4c040u 4b4 3468 | 4b4c040v 4b4 3469 | 4b4c040w 4b4 3470 | 4b4c040x 4b4 3471 | 4b4c040y 4b4 3472 | 4b4c040z 4b4 3473 | 4b5c040k 4b5 3474 | 4b5c040l 4b5 3475 | 4b5c040m 4b5 3476 | 4b5c040n 4b5 3477 | 4b5c040o 4b5 3478 | 4b5c040p 4b5 3479 | 4b5c040q 4b5 3480 | 4b5c040r 4b5 3481 | 4b5c040s 4b5 3482 | 4b5c040t 4b5 3483 | 4b5c040u 4b5 3484 | 4b5c040v 4b5 3485 | 4b5c040w 4b5 3486 | 4b5c040x 4b5 3487 | 4b5c040y 4b5 3488 | 4b6c040n 4b6 3489 | 4b6c040o 4b6 3490 | 4b6c040p 4b6 3491 | 4b6c040q 4b6 3492 | 4b6c040r 4b6 3493 | 4b6c040s 4b6 3494 | 4b6c040t 4b6 3495 | 4b6c040u 4b6 3496 | 4b6c040v 4b6 3497 | 4b6c040w 4b6 3498 | 4b6c040x 4b6 3499 | 4b6c040y 4b6 3500 | 4b6c040z 4b6 3501 | 4b6c0410 4b6 3502 | 4b6c0411 4b6 3503 | 4b7c040i 4b7 3504 | 4b7c040j 4b7 3505 | 4b7c040k 4b7 3506 | 4b7c040l 4b7 3507 | 4b7c040m 4b7 3508 | 4b7c040n 4b7 3509 | 4b7c040o 4b7 3510 | 4b7c040p 4b7 3511 | 4b7c040q 4b7 3512 | 4b7c040r 4b7 3513 | 4b7c040s 4b7 3514 | 4b7c040t 4b7 3515 | 4b7c040u 4b7 3516 | 4b7c040v 4b7 3517 | 4b7c040w 4b7 3518 | 4b8c040j 4b8 3519 | 4b8c040k 4b8 3520 | 4b8c040l 4b8 3521 | 4b8c040m 4b8 3522 | 4b8c040n 4b8 3523 | 4b8c040o 4b8 3524 | 4b8c040p 4b8 3525 | 4b8c040q 4b8 3526 | 4b8c040r 4b8 3527 | 4b8c040s 4b8 3528 | 4b8c040t 4b8 3529 | 4b8c040u 4b8 3530 | 4b8c040v 4b8 3531 | 4b8c040w 4b8 3532 | 4b8c040x 4b8 3533 | 4b9c040k 4b9 3534 | 4b9c040l 4b9 3535 | 4b9c040m 4b9 3536 | 4b9c040n 4b9 3537 | 4b9c040o 4b9 3538 | 4b9c040p 4b9 3539 | 4b9c040q 4b9 3540 | 4b9c040r 4b9 3541 | 4b9c040s 4b9 3542 | 4b9c040t 4b9 3543 | 4b9c040u 4b9 3544 | 4b9c040v 4b9 3545 | 4b9c040w 4b9 3546 | 4b9c040x 4b9 3547 | 4b9c040y 4b9 3548 | 4bac040k 4ba 3549 | 4bac040l 4ba 3550 | 4bac040m 4ba 3551 | 4bac040n 4ba 3552 | 4bac040o 4ba 3553 | 4bac040p 4ba 3554 | 4bac040q 4ba 3555 | 4bac040r 4ba 3556 | 4bac040s 4ba 3557 | 4bac040t 4ba 3558 | 4bac040u 4ba 3559 | 4bac040v 4ba 3560 | 4bac040w 4ba 3561 | 4bac040x 4ba 3562 | 4bac040y 4ba 3563 | 4bbc040l 4bb 3564 | 4bbc040m 4bb 3565 | 4bbc040n 4bb 3566 | 4bbc040o 4bb 3567 | 4bbc040p 4bb 3568 | 4bbc040q 4bb 3569 | 4bbc040r 4bb 3570 | 4bbc040s 4bb 3571 | 4bbc040t 4bb 3572 | 4bbc040u 4bb 3573 | 4bbc040v 4bb 3574 | 4bbc040w 4bb 3575 | 4bbc040x 4bb 3576 | 4bbc040y 4bb 3577 | 4bbc040z 4bb 3578 | 4bcc040m 4bc 3579 | 4bcc040n 4bc 3580 | 4bcc040o 4bc 3581 | 4bcc040p 4bc 3582 | 4bcc040q 4bc 3583 | 4bcc040r 4bc 3584 | 4bcc040s 4bc 3585 | 4bcc040t 4bc 3586 | 4bcc040u 4bc 3587 | 4bcc040v 4bc 3588 | 4bcc040w 4bc 3589 | 4bcc040x 4bc 3590 | 4bcc040y 4bc 3591 | 4bcc040z 4bc 3592 | 4bcc0410 4bc 3593 | 4bdc040i 4bd 3594 | 4bdc040j 4bd 3595 | 4bdc040k 4bd 3596 | 4bdc040l 4bd 3597 | 4bdc040m 4bd 3598 | 4bdc040n 4bd 3599 | 4bdc040o 4bd 3600 | 4bdc040p 4bd 3601 | 4bdc040q 4bd 3602 | 4bdc040r 4bd 3603 | 4bdc040s 4bd 3604 | 4bdc040t 4bd 3605 | 4bdc040u 4bd 3606 | 4bdc040v 4bd 3607 | 4bdc040w 4bd 3608 | 4bec040n 4be 3609 | 4bec040o 4be 3610 | 4bec040p 4be 3611 | 4bec040q 4be 3612 | 4bec040r 4be 3613 | 4bec040s 4be 3614 | 4bec040t 4be 3615 | 4bec040u 4be 3616 | 4bec040v 4be 3617 | 4bec040w 4be 3618 | 4bec040x 4be 3619 | 4bec040y 4be 3620 | 4bec040z 4be 3621 | 4bec0410 4be 3622 | 4bec0411 4be 3623 | 4bfc040m 4bf 3624 | 4bfc040n 4bf 3625 | 4bfc040o 4bf 3626 | 4bfc040p 4bf 3627 | 4bfc040q 4bf 3628 | 4bfc040r 4bf 3629 | 4bfc040s 4bf 3630 | 4bfc040t 4bf 3631 | 4bfc040u 4bf 3632 | 4bfc040v 4bf 3633 | 4bfc040w 4bf 3634 | 4bfc040x 4bf 3635 | 4bfc040y 4bf 3636 | 4bfc040z 4bf 3637 | 4bfc0410 4bf 3638 | 4bgc040k 4bg 3639 | 4bgc040l 4bg 3640 | 4bgc040m 4bg 3641 | 4bgc040n 4bg 3642 | 4bgc040o 4bg 3643 | 4bgc040p 4bg 3644 | 4bgc040q 4bg 3645 | 4bgc040r 4bg 3646 | 4bgc040s 4bg 3647 | 4bgc040t 4bg 3648 | 4bgc040u 4bg 3649 | 4bgc040v 4bg 3650 | 4bgc040w 4bg 3651 | 4bgc040x 4bg 3652 | 4bgc040y 4bg 3653 | 4bhc040k 4bh 3654 | 4bhc040l 4bh 3655 | 4bhc040m 4bh 3656 | 4bhc040n 4bh 3657 | 4bhc040o 4bh 3658 | 4bhc040p 4bh 3659 | 4bhc040q 4bh 3660 | 4bhc040r 4bh 3661 | 4bhc040s 4bh 3662 | 4bhc040t 4bh 3663 | 4bhc040u 4bh 3664 | 4bhc040v 4bh 3665 | 4bhc040w 4bh 3666 | 4bhc040x 4bh 3667 | 4bhc040y 4bh 3668 | 4bic040l 4bi 3669 | 4bic040m 4bi 3670 | 4bic040n 4bi 3671 | 4bic040o 4bi 3672 | 4bic040p 4bi 3673 | 4bic040q 4bi 3674 | 4bic040r 4bi 3675 | 4bic040s 4bi 3676 | 4bic040t 4bi 3677 | 4bic040u 4bi 3678 | 4bic040v 4bi 3679 | 4bic040w 4bi 3680 | 4bic040x 4bi 3681 | 4bic040y 4bi 3682 | 4bic040z 4bi 3683 | 4bjc040k 4bj 3684 | 4bjc040l 4bj 3685 | 4bjc040m 4bj 3686 | 4bjc040n 4bj 3687 | 4bjc040o 4bj 3688 | 4bjc040p 4bj 3689 | 4bjc040q 4bj 3690 | 4bjc040r 4bj 3691 | 4bjc040s 4bj 3692 | 4bjc040t 4bj 3693 | 4bjc040u 4bj 3694 | 4bjc040v 4bj 3695 | 4bjc040w 4bj 3696 | 4bjc040x 4bj 3697 | 4bjc040y 4bj 3698 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | tqdm 2 | torch 3 | scipy 4 | pyyaml 5 | pandas 6 | torchaudio 7 | tensorboard 8 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Alexander-H-Liu/NPC/0d7d189faa21176c5e2d0e2fe3ff986717b1e926/src/__init__.py -------------------------------------------------------------------------------- /src/audio.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torchaudio 4 | from torchaudio.compliance import kaldi 5 | from config.default_hparas import WINDOW_TYPE 6 | 7 | 8 | class CMVN(torch.jit.ScriptModule): 9 | ''' 10 | Utterence-wised zero mean unit variance from 11 | https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/master/src/audio.py 12 | implemented by Tzu-Wei Sung (https://github.com/WindQAQ) 13 | ''' 14 | __constants__ = ["mode", "dim", "eps"] 15 | 16 | def __init__(self, mode="global", dim=2, eps=1e-10): 17 | # `torchaudio.load()` loads audio with shape [channel, feature_dim, time] 18 | # so perform normalization on dim=2 by default 19 | super(CMVN, self).__init__() 20 | 21 | if mode != "global": 22 | raise NotImplementedError( 23 | "Only support global mean variance normalization.") 24 | 25 | self.mode = mode 26 | self.dim = dim 27 | self.eps = eps 28 | 29 | @torch.jit.script_method 30 | def forward(self, x): 31 | if self.mode == "global": 32 | return (x - x.mean(self.dim, keepdim=True)) \ 33 | / (self.eps + x.std(self.dim, keepdim=True)) 34 | 35 | def extra_repr(self): 36 | return "mode={}, dim={}, eps={}".format(self.mode, self.dim, self.eps) 37 | 38 | class FeatureExtractor(nn.Module): 39 | ''' Feature extractor, transforming file path to Mel spectrogram ''' 40 | def __init__(self, mode="fbank", num_mel_bins=80, decode_wav=False, 41 | apply_cmvn=True, **kwargs): 42 | super(FeatureExtractor, self).__init__() 43 | # ToDo: Other surface representation 44 | assert mode=="fbank", "Only Mel-spectrogram implemented" 45 | self.mode = mode 46 | self.extract_fn = kaldi.fbank 47 | self.apply_cmvn = apply_cmvn 48 | if self.apply_cmvn: 49 | self.cmvn = CMVN() 50 | self.num_mel_bins = num_mel_bins 51 | self.kwargs = kwargs 52 | self.decode_wav = decode_wav 53 | if self.decode_wav: 54 | # HACK: sox cannot deal with wav with incorrect file length 55 | torchaudio.set_audio_backend('soundfile') 56 | 57 | def _load_file(self, filepath): 58 | if self.decode_wav: 59 | waveform, sample_rate = torchaudio.load_wav(filepath) 60 | else: 61 | waveform, sample_rate = torchaudio.load(filepath) 62 | return waveform, sample_rate 63 | 64 | def forward(self, filepath): 65 | # Load waveform 66 | waveform, sr = self._load_file(filepath) 67 | # feature extraction 68 | y = self.extract_fn(waveform, 69 | num_mel_bins=self.num_mel_bins, 70 | sample_frequency=sr, 71 | window_type = WINDOW_TYPE, 72 | **self.kwargs) 73 | # CMVN 74 | if self.apply_cmvn: 75 | y = y.transpose(0, 1).unsqueeze(0) # TxD -> 1xDxT 76 | y = self.cmvn(y) 77 | y = y.squeeze(0).transpose(0,1) # 1xDxT -> TxD 78 | return y 79 | 80 | def extra_repr(self): 81 | return "mode={}, num_mel_bins={}".format(self.mode, self.num_mel_bins) 82 | 83 | def create_msg(self): 84 | ''' List msg for verbose function ''' 85 | msg = 'Audio spec.| Audio feat. = {}\t\t| feat. dim = {}\t| CMVN = {}'\ 86 | .format(self.mode, self.num_mel_bins, self.apply_cmvn) 87 | return [msg] 88 | 89 | def create_transform(audio_config): 90 | feat_type = audio_config.pop("feat_type") 91 | feat_dim = audio_config.pop("feat_dim") 92 | decode_wav = audio_config.pop("decode_wav",False) 93 | apply_cmvn = audio_config.pop("cmvn",True) 94 | transforms = FeatureExtractor(feat_type, feat_dim, decode_wav, 95 | apply_cmvn, **audio_config) 96 | return transforms, feat_dim -------------------------------------------------------------------------------- /src/data.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from functools import partial 3 | from src.audio import create_transform 4 | from torch.utils.data import DataLoader 5 | 6 | 7 | def create_dataset(name, path, batch_size, audio_max_frames, 8 | train_split, dev_split, test_split=None): 9 | ''' Interface for creating dataset ''' 10 | 11 | # Import dataset & collect function from target dataset 12 | try: 13 | ds = __import__(".".join(["dataset",name.lower()]), 14 | fromlist=['DSet','collect_batch']) 15 | Dataset = ds.DSet 16 | collect_fn = ds.collect_batch 17 | except: 18 | raise NotImplementedError 19 | # Create dataset (tr/dv set should always be provided) 20 | tr_set = Dataset(path, train_split) 21 | dv_set = Dataset(path, dev_split) 22 | # Messages to show 23 | msg_list = _data_msg(name, train_split.__str__(), tr_set, 24 | dev_split.__str__(), dv_set, audio_max_frames, 25 | batch_size) 26 | # Test set of downstream task included if specified 27 | if test_split is None: 28 | tt_set = None 29 | else: 30 | tt_set = Dataset(path, test_split) 31 | msg_list.append(' | Test sets = {}\t| Size = {}'\ 32 | .format(test_split.__str__(),len(tt_set))) 33 | 34 | return tr_set, dv_set, tt_set, batch_size, \ 35 | msg_list, collect_fn, audio_max_frames 36 | 37 | 38 | def prepare_data(n_jobs, dev_n_jobs, use_gpu, pin_memory, dataset, audio): 39 | ''' Prepare dataloader for training/validation''' 40 | 41 | # Audio feature extractor 42 | audio_transform, audio_dim = create_transform(audio.copy()) 43 | data_msg = audio_transform.create_msg() 44 | 45 | # Create dataset 46 | tr_set, dv_set, tt_set, batch_size, msg, collect_fn, audio_max_frames =\ 47 | create_dataset( **dataset) 48 | data_msg += msg 49 | 50 | # Collect function 51 | collect_tr = partial(collect_fn, audio_max_frames=audio_max_frames, 52 | audio_transform=audio_transform, mode='train') 53 | collect_dv = partial(collect_fn, audio_max_frames=audio_max_frames, 54 | audio_transform=audio_transform, mode='dev') 55 | # Create data loader 56 | tr_set = DataLoader(tr_set, batch_size=batch_size, shuffle=True, 57 | drop_last=True, collate_fn=collect_tr, 58 | num_workers=n_jobs, pin_memory=use_gpu) 59 | dv_set = DataLoader(dv_set, batch_size=batch_size, shuffle=False, 60 | drop_last=False, collate_fn=collect_dv, 61 | num_workers=dev_n_jobs, pin_memory=pin_memory) 62 | 63 | # Prepare testset if needed 64 | if tt_set is not None: 65 | collect_tt = partial(collect_fn, audio_max_frames=audio_max_frames, 66 | audio_transform=audio_transform, mode='test') 67 | tt_set = DataLoader(tt_set, batch_size=batch_size, shuffle=False, 68 | drop_last=False, collate_fn=collect_tt, 69 | num_workers=dev_n_jobs, pin_memory=pin_memory) 70 | 71 | return tr_set, dv_set, tt_set, audio_dim, data_msg 72 | 73 | def _data_msg(name, tr_spt, tr_set, dv_spt, dv_set, audio_max_frames, bs): 74 | ''' List msg for verbose function ''' 75 | msg_list = [] 76 | msg_list.append('Data spec. | Dataset = {}\t| Max Frame = {}\t| Batch size = {}'\ 77 | .format(name, audio_max_frames, bs)) 78 | msg_list.append(' | Train sets = {}\t| Size = {}'\ 79 | .format(tr_spt, len(tr_set))) 80 | msg_list.append(' | Dev sets = {}\t| Size = {}'\ 81 | .format(dv_spt, len(dv_set))) 82 | return msg_list -------------------------------------------------------------------------------- /src/optim.py: -------------------------------------------------------------------------------- 1 | 2 | import torch 3 | import numpy as np 4 | 5 | MIN_LR = 1e-4 6 | 7 | class Optimizer(): 8 | def __init__(self,parameters,optimizer,lr,decay=1.0,**kwargs): 9 | # Setup torch optimizer 10 | self.n_steps = 0 11 | self.opt_type = optimizer 12 | self.init_lr = lr 13 | opt = getattr(torch.optim, optimizer) 14 | if optimizer == 'SGD': 15 | self.opt = opt(parameters, lr=lr, momentum=0.9, weight_decay=0.0001) 16 | else: 17 | self.opt = opt(parameters, lr=lr) 18 | self.decay_rate = decay 19 | 20 | def get_opt_state_dict(self): 21 | return self.opt.state_dict() 22 | 23 | def load_opt_state_dict(self, state_dict): 24 | self.opt.load_state_dict(state_dict) 25 | 26 | def pre_step(self, step): 27 | self.opt.zero_grad() 28 | 29 | def decay(self): 30 | if self.decay_rate<1.0: 31 | for param_group in self.opt.param_groups: 32 | param_group['lr'] = max(param_group['lr']*self.decay_rate,MIN_LR) 33 | 34 | def step(self): 35 | self.opt.step() 36 | self.n_steps += 1 37 | 38 | def create_msg(self): 39 | return ['Optim.spec.| Algo. = {}\t| Lr = {}\t' 40 | .format(self.opt_type, self.init_lr)] 41 | -------------------------------------------------------------------------------- /src/solver.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import abc 4 | import math 5 | import yaml 6 | import torch 7 | from torch.utils.tensorboard import SummaryWriter 8 | from config.default_hparas import default_hparas 9 | from src.util import human_format, Timer 10 | 11 | class BaseSolver(): 12 | ''' 13 | Prototype Solver for all kinds of tasks 14 | Arguments 15 | config - yaml-styled config 16 | paras - argparse outcome 17 | mode - string that specifies training/testing 18 | ''' 19 | 20 | def __init__(self, config, paras): 21 | # General Settings 22 | self.config = config 23 | self.paras = paras 24 | for k, v in default_hparas.items(): 25 | setattr(self, k, v) 26 | if self.paras.gpu and torch.cuda.is_available(): 27 | self.gpu = True 28 | self.device = torch.device('cuda') 29 | else: 30 | self.gpu = False 31 | self.device = torch.device('cpu') 32 | 33 | # Settings for training/testing 34 | self.mode = self.paras.mode # legacy, should be removed 35 | 36 | # Name experiment 37 | self.exp_name = paras.name 38 | if self.exp_name is None: 39 | # By default, exp is named after config file 40 | self.exp_name = paras.config.split('/')[-1].split('.y')[0] 41 | self.exp_name += '_sd{}'.format(paras.seed) 42 | 43 | # Filepath setup 44 | os.makedirs(paras.ckpdir, exist_ok=True) 45 | self.ckpdir = os.path.join(paras.ckpdir, self.exp_name) 46 | os.makedirs(self.ckpdir, exist_ok=True) 47 | 48 | # Logger settings 49 | self.logdir = os.path.join(paras.logdir, self.exp_name) 50 | self.log = SummaryWriter( 51 | self.logdir, flush_secs=self.TB_FLUSH_FREQ) 52 | self.timer = Timer() 53 | 54 | # Hyperparameters 55 | self.step = 0 56 | self.epoch = config['hparas']['epoch'] 57 | 58 | self.verbose('Exp. name : {}'.format(self.exp_name)) 59 | self.verbose('Loading data...') 60 | 61 | def backward(self, loss): 62 | ''' 63 | Standard backward step with timer and debugger 64 | Arguments 65 | loss - the loss to perform loss.backward() 66 | ''' 67 | self.timer.set() 68 | loss.backward() 69 | grad_norm = torch.nn.utils.clip_grad_norm_( 70 | self.model.parameters(), self.GRAD_CLIP) 71 | if math.isnan(grad_norm): 72 | self.verbose('Error : grad norm is NaN @ step '+str(self.step)) 73 | else: 74 | self.optimizer.step() 75 | self.timer.cnt('bw') 76 | return grad_norm 77 | 78 | def load_ckpt(self): 79 | ''' Load ckpt if --load option is specified ''' 80 | if self.paras.load: 81 | # Load weights 82 | ckpt = torch.load( self.paras.load, 83 | map_location=self.device if self.paras.mode == 'train' else 'cpu') 84 | ckpt['model'] = {k.replace('module.','',1):v \ 85 | for k,v in ckpt['model'].items()} 86 | self.model.load_state_dict(ckpt['model']) 87 | 88 | # Load task-dependent items 89 | metric = "None" 90 | score = 0.0 91 | for k, v in ckpt.items(): 92 | if type(v) is float: 93 | metric, score = k, v 94 | if self.paras.mode == 'train': 95 | self.cur_epoch = ckpt['epoch'] 96 | self.step = ckpt['global_step'] 97 | self.optimizer.load_opt_state_dict(ckpt['optimizer']) 98 | msg = \ 99 | 'Load ckpt from {}, restarting at step {} \ 100 | (recorded {} = {:.2f} %)'\ 101 | .format(self.paras.load, self.step, metric, score) 102 | self.verbose(msg) 103 | else: 104 | # Inference 105 | msg = 'Evaluation target = {} (recorded {} = {:.2f} %)'\ 106 | .format(self.paras.load, metric, score) 107 | self.verbose(msg) 108 | 109 | def verbose(self, msg, display_step=False): 110 | ''' Verbose function for print information to stdout''' 111 | header = '['+human_format(self.step)+']' if display_step else '[INFO]' 112 | if self.paras.verbose: 113 | if type(msg) == list: 114 | for m in msg: 115 | print(header, m.ljust(100)) 116 | else: 117 | print(header, msg.ljust(100)) 118 | 119 | def progress(self, msg): 120 | ''' Verbose function for updating progress on stdout 121 | Do not include newline in msg ''' 122 | if self.paras.verbose: 123 | sys.stdout.write("\033[K") # Clear line 124 | print('[Ep {}] {}'.format(human_format(self.cur_epoch), msg), end='\r') 125 | 126 | def write_log(self, log_name, log_dict, bins=None): 127 | ''' Write log to TensorBoard 128 | log_name - Name of tensorboard variable 129 | log_dict - / Value of variable (e.g. dict of losses) 130 | ''' 131 | if log_dict is not None: 132 | if type(log_dict) is dict: 133 | log_dict = {key: val for key, val in log_dict.items() if ( 134 | val is not None and not math.isnan(val))} 135 | self.log.add_scalars(log_name, log_dict, self.step) 136 | elif 'Hist.' in log_name or 'Spec' in log_name: 137 | img, form = log_dict 138 | self.log.add_image(log_name,img, global_step=self.step, dataformats=form) 139 | else: 140 | raise NotImplementedError 141 | 142 | def save_checkpoint(self, f_name, metric, score, show_msg=True): 143 | '''' pt saver 144 | f_name - the name of ckpt (w/o prefix), overwrite if existed 145 | score - The value of metric used to evaluate model 146 | ''' 147 | ckpt_path = os.path.join(self.ckpdir, f_name) 148 | full_dict = { 149 | "model": self.model.state_dict(), 150 | "optimizer": self.optimizer.get_opt_state_dict(), 151 | "global_step": self.step, 152 | "epoch": self.cur_epoch, 153 | metric: score 154 | } 155 | torch.save(full_dict, ckpt_path) 156 | if show_msg: 157 | msg = "Saved checkpoint (epoch = {}, {} = {:.2f}) and status @ {}" 158 | self.verbose(msg.format( 159 | human_format(self.cur_epoch), metric, score, ckpt_path)) 160 | return ckpt_path 161 | 162 | 163 | # ----------------------------------- Abtract Methods ------------------- # 164 | @abc.abstractmethod 165 | def load_data(self): 166 | ''' 167 | Called by main to load all data 168 | After this call, data related attributes should be setup 169 | (e.g. self.tr_set, self.dev_set) 170 | No return value 171 | ''' 172 | raise NotImplementedError 173 | 174 | @abc.abstractmethod 175 | def set_model(self): 176 | ''' 177 | Called by main to set models 178 | After this call, model related attributes should be setup 179 | The followings MUST be setup 180 | - self.model (torch.nn.Module) 181 | - self.optimizer (src.Optimizer), 182 | Loading pre-trained model should also be performed here 183 | No return value 184 | ''' 185 | raise NotImplementedError 186 | 187 | @abc.abstractmethod 188 | def exec(self): 189 | ''' 190 | Called by main to execute training/inference 191 | ''' 192 | raise NotImplementedError -------------------------------------------------------------------------------- /src/util.py: -------------------------------------------------------------------------------- 1 | import time 2 | import torch 3 | import numpy as np 4 | import matplotlib 5 | matplotlib.use('Agg') 6 | import matplotlib.pyplot as plt 7 | 8 | class Timer(): 9 | ''' Timer for recording training time distribution. ''' 10 | 11 | def __init__(self): 12 | self.prev_t = time.time() 13 | self.clear() 14 | 15 | def set(self): 16 | self.prev_t = time.time() 17 | 18 | def cnt(self, mode): 19 | self.time_table[mode] += time.time()-self.prev_t 20 | self.set() 21 | if mode == 'bw': 22 | self.click += 1 23 | 24 | def show(self): 25 | total_time = sum(self.time_table.values()) 26 | self.time_table['avg'] = total_time/self.click 27 | self.time_table['rd'] = 100*self.time_table['rd']/total_time 28 | self.time_table['fw'] = 100*self.time_table['fw']/total_time 29 | self.time_table['bw'] = 100*self.time_table['bw']/total_time 30 | msg = '{avg:.3f} sec/step (rd {rd:.1f}% | fw {fw:.1f}% | bw {bw:.1f}%)\ 31 | '.format(**self.time_table) 32 | self.clear() 33 | return msg 34 | 35 | def clear(self): 36 | self.time_table = {'rd': 0, 'fw': 0, 'bw': 0} 37 | self.click = 0 38 | 39 | 40 | def human_format(num): 41 | ''' Convert number to human readable format 42 | Reference : 43 | https://stackoverflow.com/questions/579310/\ 44 | formatting-long-numbers-as-strings-in-python''' 45 | magnitude = 0 46 | while num >= 1000: 47 | magnitude += 1 48 | num /= 1000.0 49 | # add more suffixes if you need them 50 | return '{:3.1f}{}'.format(num, [' ', 'K', 'M', 'G', 'T', 'P'][magnitude]) 51 | 52 | def draw(data, hist=False): 53 | if data is None: 54 | return None 55 | if hist: 56 | data = _save_canvas( data, hist) 57 | else: 58 | data = _save_canvas(data.detach().cpu().numpy().T, hist) 59 | return torch.FloatTensor(data),"HWC" 60 | 61 | def _save_canvas(data, hist): 62 | fig, ax = plt.subplots(figsize=(20, 8)) 63 | if not hist: 64 | ax.imshow(data, aspect="auto", origin="lower") 65 | else: 66 | # Bar for distribution 67 | ax.bar(list(range(len(data))),data) 68 | fig.canvas.draw() 69 | # Note : torch tb add_image takes color as [0,1] 70 | data = np.array(fig.canvas.renderer._renderer)[:,:,:-1]/255.0 71 | plt.close(fig) 72 | return data 73 | 74 | def cal_per(pred, label, seq_len): 75 | # BxCxT -> BxT 76 | pred = pred.argmax(dim=1).detach().cpu() 77 | label = label.cpu() 78 | hit, total = 0,0 79 | for idx,l in enumerate(seq_len): 80 | hit += sum(pred[idx,:l] == label[idx,:l]) 81 | total += l 82 | return 1-(float(hit)/float(total)), hit, total 83 | 84 | --------------------------------------------------------------------------------